diff --git a/build/OWNERS b/build/OWNERS new file mode 100644 index 00000000000..122b6e64c4a --- /dev/null +++ b/build/OWNERS @@ -0,0 +1,12 @@ +agrieve@chromium.org +dpranke@chromium.org +jbudorick@chromium.org +jochen@chromium.org +scottmg@chromium.org +thakis@chromium.org +brucedawson@chromium.org + +per-file mac_toolchain.py=erikchen@chromium.org +per-file mac_toolchain.py=justincohen@chromium.org +per-file package_mac_toolchain.py=erikchen@chromium.org +per-file package_mac_toolchain.py=justincohen@chromium.org diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py new file mode 100644 index 00000000000..fca962f1caa --- /dev/null +++ b/build/PRESUBMIT.py @@ -0,0 +1,16 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +def _RunTests(input_api, output_api): + return (input_api.canned_checks.RunUnitTestsInDirectory( + input_api, output_api, '.', whitelist=[r'.+_test.py$'])) + + +def CheckChangeOnUpload(input_api, output_api): + return _RunTests(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return _RunTests(input_api, output_api) diff --git a/build/all.gyp b/build/all.gyp new file mode 100644 index 00000000000..04cd9752f84 --- /dev/null +++ b/build/all.gyp @@ -0,0 +1,1342 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + # A hook that can be overridden in other repositories to add additional + # compilation targets to 'All'. + 'app_targets%': [], + # For Android-specific targets. + 'android_app_targets%': [], + }, + 'includes': [ + '../third_party/openh264/openh264_args.gypi', + ], + 'targets': [ + { + 'target_name': 'All', + 'type': 'none', + 'xcode_create_dependents_test_runner': 1, + 'dependencies': [ + '<@(app_targets)', + 'some.gyp:*', + '../base/base.gyp:*', + '../components/components.gyp:*', + '../components/components_tests.gyp:*', + '../crypto/crypto.gyp:*', + '../net/net.gyp:*', + '../sdch/sdch.gyp:*', + '../sql/sql.gyp:*', + '../testing/gmock.gyp:*', + '../testing/gtest.gyp:*', + '../third_party/boringssl/boringssl.gyp:*', + '../third_party/icu/icu.gyp:*', + '../third_party/libxml/libxml.gyp:*', + '../third_party/sqlite/sqlite.gyp:*', + '../third_party/zlib/zlib.gyp:*', + '../ui/accessibility/accessibility.gyp:*', + '../ui/base/ui_base.gyp:*', + '../ui/display/display.gyp:display_unittests', + '../ui/native_theme/native_theme.gyp:native_theme_unittests', + '../ui/snapshot/snapshot.gyp:*', + '../url/url.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="mac"', { + 'dependencies': [ + '../ui/touch_selection/ui_touch_selection.gyp:*', + ], + }], + ['OS=="ios"', { + 'dependencies': [ + '../ios/ios.gyp:*', + # NOTE: This list of targets is present because + # mojo_base.gyp:mojo_base cannot be built on iOS, as + # javascript-related targets cause v8 to be built. + # TODO(crbug.com/605508): http://crrev.com/1832703002 introduced + # a dependency on //third_party/WebKit that cause build failures + # when using Xcode version of clang (loading clang plugin fails). + # '../mojo/mojo_base.gyp:mojo_common_lib', + # '../mojo/mojo_base.gyp:mojo_common_unittests', + # '../mojo/mojo_edk.gyp:mojo_system_impl', + # '../mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests', + # '../mojo/mojo_edk_tests.gyp:mojo_public_system_unittests', + # '../mojo/mojo_edk_tests.gyp:mojo_system_unittests', + # '../mojo/mojo_public.gyp:mojo_cpp_bindings', + # '../mojo/mojo_public.gyp:mojo_public_test_utils', + # '../mojo/mojo_public.gyp:mojo_system', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + ], + }, { # 'OS!="ios" + 'dependencies': [ + '../content/content.gyp:*', + '../device/bluetooth/bluetooth.gyp:*', + '../device/device_tests.gyp:*', + ], + }], + ['OS=="android"', { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell_apk', + '<@(android_app_targets)', + 'android_builder_tests', + '../third_party/catapult/telemetry/telemetry.gyp:*#host', + # TODO(nyquist) This should instead by a target for sync when all of + # the sync-related code for Android has been upstreamed. + # See http://crbug.com/159203 + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:android_webview_apk', + '../android_webview/android_webview_shell.gyp:system_webview_shell_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_apk', + '../chrome/android/chrome_apk.gyp:chrome_sync_shell_apk', + ], + }], + ['chromecast==0 and use_webview_internal_framework==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:system_webview_apk', + ], + }], + # TODO: Enable packed relocations for x64. See: b/20532404 + ['target_arch != "x64"', { + 'dependencies': [ + '../third_party/android_platform/relocation_packer.gyp:android_relocation_packer_unittests#host', + ], + }], + ], + }, { + 'dependencies': [ + # TODO: This should build on Android and the target should move to the list above. + '../components/sync.gyp:*', + ], + 'conditions': [ + ['OS!="ios"', { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:*', + ], + }], + ], + }], + ['OS!="ios" and OS!="android" and chromecast==0', { + 'dependencies': [ + '../third_party/re2/re2.gyp:re2', + '../chrome/chrome.gyp:*', + '../cc/blink/cc_blink_tests.gyp:*', + '../cc/cc_tests.gyp:*', + '../device/usb/usb.gyp:*', + '../extensions/extensions.gyp:*', + '../extensions/extensions_tests.gyp:*', + '../gin/gin.gyp:*', + '../gpu/gpu.gyp:*', + '../gpu/tools/tools.gyp:*', + '../ipc/ipc.gyp:*', + '../jingle/jingle.gyp:*', + '../media/capture/capture.gyp:*', + '../media/cast/cast.gyp:*', + '../media/media.gyp:*', + '../media/midi/midi.gyp:*', + '../mojo/mojo.gyp:*', + '../mojo/mojo_base.gyp:*', + '../ppapi/ppapi.gyp:*', + '../ppapi/ppapi_internal.gyp:*', + '../ppapi/tools/ppapi_tools.gyp:*', + '../services/shell/shell.gyp:*', + '../skia/skia.gyp:*', + '../components/sync/tools/sync_tools.gyp:*', + '../third_party/catapult/telemetry/telemetry.gyp:*', + '../third_party/WebKit/public/all.gyp:*', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:*', + '../third_party/codesighs/codesighs.gyp:*', + '../third_party/ffmpeg/ffmpeg.gyp:*', + '../third_party/iccjpeg/iccjpeg.gyp:*', + '../third_party/libpng/libpng.gyp:*', + '../third_party/libusb/libusb.gyp:*', + '../third_party/libwebp/libwebp.gyp:*', + '../third_party/libxslt/libxslt.gyp:*', + '../third_party/lzma_sdk/lzma_sdk.gyp:*', + '../third_party/mesa/mesa.gyp:*', + '../third_party/modp_b64/modp_b64.gyp:*', + '../third_party/ots/ots.gyp:*', + '../third_party/pdfium/samples/samples.gyp:*', + '../third_party/qcms/qcms.gyp:*', + '../tools/battor_agent/battor_agent.gyp:*', + '../tools/gn/gn.gyp:*', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../v8/src/v8.gyp:*', + '<(libjpeg_gyp_path):*', + ], + }], + ['OS=="win" or OS=="ios" or OS=="linux"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:*', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:*', + '../third_party/crashpad/crashpad/crashpad.gyp:*', + '../third_party/ocmock/ocmock.gyp:*', + ], + 'conditions': [ + ['enable_ipc_fuzzer==1', { + 'dependencies': [ + '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*', + ], + }], + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../courgette/courgette.gyp:*', + '../sandbox/sandbox.gyp:*', + ], + 'conditions': [ + ['branding=="Chrome"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_packages_<(channel)', + ], + }], + ['enable_ipc_fuzzer==1', { + 'dependencies': [ + '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*', + ], + }], + ['use_dbus==1', { + 'dependencies': [ + '../dbus/dbus.gyp:*', + ], + }], + ], + }], + ['chromecast==1', { + 'dependencies': [ + '../chromecast/chromecast.gyp:*', + ], + }], + ['use_x11==1', { + 'dependencies': [ + '../tools/xdisplaycheck/xdisplaycheck.gyp:*', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome_elf/chrome_elf.gyp:*', + '../courgette/courgette.gyp:*', + '../rlz/rlz.gyp:*', + '../sandbox/sandbox.gyp:*', + '<(angle_path)/src/angle.gyp:*', + '../third_party/bspatch/bspatch.gyp:*', + '../tools/win/static_initializers/static_initializers.gyp:*', + ], + }], + ['toolkit_views==1', { + 'dependencies': [ + '../ui/views/controls/webview/webview.gyp:*', + '../ui/views/views.gyp:*', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ash/ash.gyp:*', + '../ui/app_list/app_list.gyp:*', + '../ui/aura/aura.gyp:*', + '../ui/aura_extra/aura_extra.gyp:*', + ], + }], + ['remoting==1', { + 'dependencies': [ + '../remoting/remoting_all.gyp:remoting_all', + ], + }], + ['OS!="ios"', { + 'dependencies': [ + '../third_party/boringssl/boringssl_tests.gyp:*', + ], + }], + ['OS!="android" and OS!="ios"', { + 'dependencies': [ + '../google_apis/gcm/gcm.gyp:*', + ], + }], + ['(chromeos==1 or OS=="linux" or OS=="win" or OS=="mac") and chromecast==0', { + 'dependencies': [ + '../extensions/shell/app_shell.gyp:*', + ], + }], + ['envoy==1', { + 'dependencies': [ + '../envoy/envoy.gyp:*', + ], + }], + ['use_openh264==1', { + 'dependencies': [ + '../third_party/openh264/openh264.gyp:*', + ], + }], + ['enable_basic_printing==1 or enable_print_preview==1', { + 'dependencies': [ + '../printing/printing.gyp:*', + ], + }], + ], + }, # target_name: All + { + 'target_name': 'All_syzygy', + 'type': 'none', + 'conditions': [ + ['OS=="win" and fastbuild==0 and target_arch=="ia32" and ' + '(syzyasan==1 or syzygy_optimize==1)', { + 'dependencies': [ + '../chrome/installer/mini_installer_syzygy.gyp:*', + ], + }], + ], + }, # target_name: All_syzygy + { + # Note: Android uses android_builder_tests below. + # TODO: Consider merging that with this target. + 'target_name': 'chromium_builder_tests', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../components/components_tests.gyp:components_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../net/net.gyp:net_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/display/display.gyp:display_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../url/url.gyp:url_unittests', + ], + 'conditions': [ + ['OS!="ios"', { + 'dependencies': [ + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/ipc/url_ipc.gyp:url_ipc_unittests', + ], + }], + ['OS!="ios" and OS!="mac"', { + 'dependencies': [ + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + ], + }], + ['OS!="ios" and OS!="android"', { + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../gin/gin.gyp:gin_unittests', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/cast/cast.gyp:cast_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../mojo/mojo.gyp:mojo', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../third_party/catapult/telemetry/telemetry.gyp:*', + '../third_party/WebKit/public/all.gyp:all_blink', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + ], + }], + ['OS!="ios" and OS!="android" and chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chromedriver_tests', + '../chrome/chrome.gyp:chromedriver_unittests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../extensions/extensions_tests.gyp:extensions_browsertests', + '../extensions/extensions_tests.gyp:extensions_unittests', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:install_static_unittests', + '../chrome/chrome.gyp:setup_unittests', + # ../chrome/test/mini_installer requires mini_installer. + '../chrome/installer/mini_installer.gyp:mini_installer', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests', + '../courgette/courgette.gyp:courgette_unittests', + '../sandbox/sandbox.gyp:sbox_integration_tests', + '../sandbox/sandbox.gyp:sbox_unittests', + '../sandbox/sandbox.gyp:sbox_validation_tests', + ], + 'conditions': [ + # remoting_host_installation uses lots of non-trivial GYP that tend + # to break because of differences between ninja and msbuild. Make + # sure this target is built by the builders on the main waterfall. + # See http://crbug.com/180600. + ['wix_exists == "True"', { + 'dependencies': [ + '../remoting/remoting.gyp:remoting_host_installation', + ], + }], + ['syzyasan==1', { + 'variables': { + # Disable incremental linking for all modules. + # 0: inherit, 1: disabled, 2: enabled. + 'msvs_debug_link_incremental': '1', + 'msvs_large_module_debug_link_mode': '1', + # Disable RTC. Syzygy explicitly doesn't support RTC + # instrumented binaries for now. + 'win_debug_RuntimeChecks': '0', + }, + 'defines': [ + # Disable iterator debugging (huge speed boost). + '_HAS_ITERATOR_DEBUGGING=0', + ], + 'msvs_settings': { + 'VCLinkerTool': { + # Enable profile information (necessary for SyzyAsan + # instrumentation). This is incompatible with incremental + # linking. + 'Profile': 'true', + }, + } + }], + ['component!="shared_library" or target_arch!="ia32"', { + 'dependencies': [ + '../chrome/installer/mini_installer.gyp:next_version_mini_installer', + ], + }], + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests', + '../ui/arc/arc.gyp:ui_arc_unittests', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:sandbox_linux_unittests', + ], + }], + ['OS=="linux" and use_dbus==1', { + 'dependencies': [ + '../dbus/dbus.gyp:dbus_unittests', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../ui/message_center/message_center.gyp:*', + ], + }], + ['test_isolation_mode != "noop"', { + 'dependencies': [ + 'chromium_swarm_tests', + ], + }], + ['OS!="android"', { + 'dependencies': [ + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + ], + }], + ['enable_basic_printing==1 or enable_print_preview==1', { + 'dependencies': [ + '../printing/printing.gyp:printing_unittests', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ash/ash.gyp:ash_unittests', + '../ui/app_list/app_list.gyp:app_list_unittests', + '../ui/app_list/presenter/app_list_presenter.gyp:app_list_presenter_unittests', + '../ui/aura/aura.gyp:aura_unittests', + '../ui/compositor/compositor.gyp:compositor_unittests', + ], + }], + ['use_aura==1 and chromecast==0', { + 'dependencies': [ + '../ui/keyboard/keyboard.gyp:keyboard_unittests', + '../ui/views/views.gyp:views_unittests', + ], + }], + ['use_aura==1 or toolkit_views==1', { + 'dependencies': [ + '../ui/events/events_unittests.gyp:events_unittests', + ], + }], + ['disable_nacl==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_loader_unittests', + ], + }], + ['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', { + 'dependencies': [ + '../components/nacl.gyp:nacl_helper_nonsfi_unittests', + ], + }], + ], + }, # target_name: chromium_builder_tests + ], + 'conditions': [ + # TODO(GYP): make gn_migration.gypi work unconditionally. + ['OS=="mac" or OS=="win" or (OS=="android" and chromecast==0) or (OS=="linux" and target_arch=="x64" and chromecast==0)', { + 'includes': [ + 'gn_migration.gypi', + ], + }], + ['OS!="ios"', { + 'targets': [ + { + 'target_name': 'blink_tests', + 'type': 'none', + 'dependencies': [ + '../third_party/WebKit/public/all.gyp:all_blink', + ], + 'conditions': [ + ['OS=="android"', { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell_apk', + '../breakpad/breakpad.gyp:dump_syms#host', + '../breakpad/breakpad.gyp:minidump_stackwalk#host', + '../tools/imagediff/image_diff.gyp:image_diff#host', + ], + }, { # OS!="android" + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell', + '../tools/imagediff/image_diff.gyp:image_diff', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + ], + }], + ['OS!="win" and OS!="android"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../breakpad/breakpad.gyp:dump_syms#host', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:dump_syms#host', + ], + }], + ], + }, # target_name: blink_tests + ], + }], # OS!=ios + ['OS!="ios" and OS!="android" and chromecast==0', { + 'targets': [ + { + 'target_name': 'chromium_builder_nacl_win_integration', + 'type': 'none', + 'dependencies': [ + 'chromium_builder_tests', + ], + }, # target_name: chromium_builder_nacl_win_integration + { + 'target_name': 'chromium_builder_perf', + 'type': 'none', + 'dependencies': [ + '../cc/cc_tests.gyp:cc_perftests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:load_library_perf_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../gpu/gpu.gyp:gpu_perftests', + '../media/media.gyp:media_perftests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../third_party/catapult/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/installer/mini_installer.gyp:mini_installer', + '../gpu/gpu.gyp:angle_perftests', + ], + }], + ], + }, # target_name: chromium_builder_perf + { + 'target_name': 'chromium_gpu_builder', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:performance_browser_tests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test', + '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../gpu/gpu.gyp:command_buffer_gles2_tests', + '../third_party/catapult/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ], + }, # target_name: chromium_gpu_builder + { + 'target_name': 'chromium_gpu_debug_builder', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test', + '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../gpu/gpu.gyp:command_buffer_gles2_tests', + '../third_party/catapult/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ], + }, # target_name: chromium_gpu_debug_builder + { + # This target contains everything we need to run tests on the special + # device-equipped WebRTC bots. We have device-requiring tests in + # browser_tests and content_browsertests. + 'target_name': 'chromium_builder_webrtc', + 'type': 'none', + 'dependencies': [ + 'chromium_builder_perf', + '../chrome/chrome.gyp:browser_tests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../third_party/webrtc/tools/tools.gyp:frame_analyzer', + '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter', + ], + 'conditions': [ + ['remoting==1', { + 'dependencies': [ + '../remoting/remoting.gyp:*', + ], + }], + ], + }, # target_name: chromium_builder_webrtc + { + 'target_name': 'chromium_builder_chromedriver', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chromedriver', + '../chrome/chrome.gyp:chromedriver_tests', + '../chrome/chrome.gyp:chromedriver_unittests', + ], + }, # target_name: chromium_builder_chromedriver + { + 'target_name': 'chromium_builder_asan', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + + # We refer to content_shell directly rather than blink_tests + # because we don't want the _unittests binaries. + '../content/content_shell_and_tests.gyp:content_shell', + + '../v8/src/d8.gyp:d8', + ], + 'conditions': [ + ['OS!="win"', { + 'dependencies': [ + '../net/net.gyp:hpack_fuzz_wrapper', + '../net/net.gyp:dns_fuzz_stub', + '../skia/skia.gyp:filter_fuzz_stub', + ], + }], + ['enable_ipc_fuzzer==1 and component!="shared_library" and ' + '(OS=="linux" or OS=="win" or OS=="mac")', { + 'dependencies': [ + '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*', + ], + }], + ['chromeos==0', { + 'dependencies': [ + '../v8/samples/samples.gyp:v8_shell#host', + '../third_party/pdfium/samples/samples.gyp:pdfium_test', + ], + }], + # TODO(thakis): Remove this block, nothing ever sets this. + ['internal_filter_fuzzer==1', { + 'dependencies': [ + '../skia/tools/clusterfuzz-data/fuzzers/filter_fuzzer/filter_fuzzer.gyp:filter_fuzzer', + ], + }], # internal_filter_fuzzer + ['clang==1', { + 'dependencies': [ + 'sanitizers/sanitizers.gyp:llvm-symbolizer', + ], + }], + ['OS=="win" and fastbuild==0 and target_arch=="ia32" and syzyasan==1', { + 'dependencies': [ + '../chrome/chrome_syzygy.gyp:chrome_dll_syzygy', + '../content/content_shell_and_tests.gyp:content_shell_syzyasan', + ], + 'conditions': [ + ['chrome_multiple_dll==1', { + 'dependencies': [ + '../chrome/chrome_syzygy.gyp:chrome_child_dll_syzygy', + ], + }], + ], + }], + ], + }, + { + 'target_name': 'chromium_builder_nacl_sdk', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + ], + 'conditions': [ + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:chrome_nacl_win64', + ] + }], + ], + }, #target_name: chromium_builder_nacl_sdk + ], # targets + }], #OS!=ios and OS!=android + ['OS=="android"', { + 'targets': [ + { + # The current list of tests for android. This is temporary + # until the full set supported. + # + # WARNING: + # Do not add targets here without communicating the implications + # on tryserver triggers and load. Discuss with + # chrome-infrastructure-team please. + 'target_name': 'android_builder_tests', + 'type': 'none', + 'dependencies': [ + '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests', + '../base/base.gyp:base_unittests', + '../breakpad/breakpad.gyp:breakpad_unittests_deps', + # Also compile the tools needed to deal with minidumps, they are + # needed to run minidump tests upstream. + '../breakpad/breakpad.gyp:dump_syms#host', + '../breakpad/breakpad.gyp:symupload#host', + '../breakpad/breakpad.gyp:minidump_dump#host', + '../breakpad/breakpad.gyp:minidump_stackwalk#host', + '../build/android/pylib/device/commands/commands.gyp:chromium_commands', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_perftests_apk', + '../cc/cc_tests.gyp:cc_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_junit_tests', + '../content/content_shell_and_tests.gyp:chromium_linker_test_apk', + '../content/content_shell_and_tests.gyp:content_shell_test_apk', + '../content/content_shell_and_tests.gyp:content_unittests', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:gpu_perftests_apk', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_perftests_apk', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests_apk', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../sandbox/sandbox.gyp:sandbox_linux_unittests_deps', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../testing/android/junit/junit_test.gyp:junit_unit_tests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/WebKit/public/all.gyp:*', + '../tools/android/android_tools.gyp:android_tools', + '../tools/android/android_tools.gyp:memconsumer', + '../tools/android/android_tools.gyp:push_apps_to_background', + '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test', + '../tools/cygprofile/cygprofile.gyp:cygprofile_unittests', + '../ui/android/ui_android.gyp:ui_android_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/events/events_unittests.gyp:events_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + # Unit test bundles packaged as an apk. + '../base/base.gyp:base_unittests_apk', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk', + '../cc/cc_tests.gyp:cc_unittests_apk', + '../components/components_tests.gyp:components_browsertests_apk', + '../components/components_tests.gyp:components_unittests_apk', + '../content/content_shell_and_tests.gyp:content_browsertests_apk', + '../content/content_shell_and_tests.gyp:content_unittests_apk', + '../gpu/gpu.gyp:command_buffer_gles2_tests_apk', + '../gpu/gpu.gyp:gl_tests_apk', + '../gpu/gpu.gyp:gpu_unittests_apk', + '../ipc/ipc.gyp:ipc_tests_apk', + '../media/media.gyp:media_unittests_apk', + '../media/media.gyp:video_decode_accelerator_unittest_apk', + '../media/midi/midi.gyp:midi_unittests_apk', + '../net/net.gyp:net_unittests_apk', + '../skia/skia_tests.gyp:skia_unittests_apk', + '../sql/sql.gyp:sql_unittests_apk', + '../ui/android/ui_android.gyp:ui_android_unittests_apk', + '../ui/android/ui_android.gyp:ui_junit_tests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk', + '../ui/events/events_unittests.gyp:events_unittests_apk', + '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk', + '../ui/gl/gl_tests.gyp:gl_unittests_apk', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_apk', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:android_webview_unittests', + '../chrome/chrome.gyp:unit_tests', + # Unit test bundles packaged as an apk. + '../android_webview/android_webview.gyp:android_webview_test_apk', + '../android_webview/android_webview.gyp:android_webview_unittests_apk', + '../android_webview/android_webview_shell.gyp:system_webview_shell_layout_test_apk', + '../android_webview/android_webview_shell.gyp:system_webview_shell_page_cycler_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_test_apk', + '../chrome/android/chrome_apk.gyp:chrome_sync_shell_test_apk', + '../chrome/chrome.gyp:chrome_junit_tests', + '../chrome/chrome.gyp:chromedriver_webview_shell_apk', + '../chrome/chrome.gyp:unit_tests_apk', + '../third_party/custom_tabs_client/custom_tabs_client.gyp:custom_tabs_client_example_apk', + ], + }], + ], + }, + { + # WebRTC Chromium tests to run on Android. + 'target_name': 'android_builder_chromium_webrtc', + 'type': 'none', + 'dependencies': [ + '../build/android/pylib/device/commands/commands.gyp:chromium_commands', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../tools/android/android_tools.gyp:android_tools', + '../tools/android/android_tools.gyp:memconsumer', + '../content/content_shell_and_tests.gyp:content_browsertests_apk', + ], + }, # target_name: android_builder_chromium_webrtc + ], # targets + }], # OS="android" + ['OS=="mac"', { + 'targets': [ + { + # Target to build everything plus the dmg. We don't put the dmg + # in the All target because developers really don't need it. + 'target_name': 'all_and_dmg', + 'type': 'none', + 'dependencies': [ + 'All', + '../chrome/chrome.gyp:build_app_dmg', + ], + }, + # These targets are here so the build bots can use them to build + # subsets of a full tree for faster cycle times. + { + 'target_name': 'chromium_builder_dbg', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../rlz/rlz.gyp:*', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../third_party/catapult/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_rel', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../third_party/catapult/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_dbg_tsan_mac', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../url/url.gyp:url_unittests', + ], + }, + ], # targets + }], # OS="mac" + ['OS=="win"', { + 'targets': [ + # These targets are here so the build bots can use them to build + # subsets of a full tree for faster cycle times. + { + 'target_name': 'chromium_builder', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:gcapi_test', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:setup_unittests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + # ../chrome/test/mini_installer requires mini_installer. + '../chrome/installer/mini_installer.gyp:mini_installer', + '../courgette/courgette.gyp:courgette_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../third_party/catapult/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/events/events_unittests.gyp:events_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/views/views.gyp:views_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_dbg_tsan_win', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_lkgr_drmemory_win', + 'type': 'none', + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + ], + }, + { + 'target_name': 'chromium_builder_dbg_drmemory_win', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chrome_app_unittests', + '../chrome/chrome.gyp:chromedriver_unittests', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:setup_unittests', + '../chrome/chrome.gyp:unit_tests', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests', + '../components/components_tests.gyp:components_unittests', + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + '../content/content_shell_and_tests.gyp:content_unittests', + '../courgette/courgette.gyp:courgette_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../device/device_tests.gyp:device_unittests', + '../extensions/extensions_tests.gyp:extensions_browsertests', + '../extensions/extensions_tests.gyp:extensions_unittests', + '../gin/gin.gyp:gin_shell', + '../gin/gin.gyp:gin_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/capture/capture.gyp:capture_unittests', + '../media/cast/cast.gyp:cast_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../mojo/mojo.gyp:mojo', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests', + '../ui/accessibility/accessibility.gyp:accessibility_unittests', + '../ui/aura/aura.gyp:aura_unittests', + '../ui/compositor/compositor.gyp:compositor_unittests', + '../ui/display/display.gyp:display_unittests', + '../ui/events/events_unittests.gyp:events_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/keyboard/keyboard.gyp:keyboard_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chrome_official_builder_no_unittests', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:gcapi_dll', + '../chrome/chrome.gyp:pack_policy_templates', + '../chrome/installer/mini_installer.gyp:mini_installer', + '../courgette/courgette.gyp:courgette', + '../courgette/courgette.gyp:courgette64', + '../remoting/remoting.gyp:remoting_webapp', + '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter', + ], + 'conditions': [ + ['component != "shared_library" and wix_exists == "True"', { + # GN uses target_cpu==x86 && is_chrome_branded instead, and + # so doesn't need the wix_exists check. + 'dependencies': [ + '../remoting/remoting.gyp:remoting_host_installation', + ], + }], # component != "shared_library" + ] + }, + { + 'target_name': 'chrome_official_builder', + 'type': 'none', + 'dependencies': [ + 'chrome_official_builder_no_unittests', + '../base/base.gyp:base_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../ipc/ipc.gyp:ipc_tests', + '../media/capture/capture.gyp:capture_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../sql/sql.gyp:sql_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/views/views.gyp:views_unittests', + '../url/url.gyp:url_unittests', + ], + }, + ], # targets + }], # OS="win" + ['chromeos==1', { + 'targets': [ + { + 'target_name': 'chromiumos_preflight', + 'type': 'none', + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:chromedriver', + '../media/media.gyp:media_unittests', + '../media/media.gyp:video_decode_accelerator_unittest', + '../media/media.gyp:video_encode_accelerator_unittest', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode', + '../sandbox/sandbox.gyp:chrome_sandbox', + '../sandbox/sandbox.gyp:sandbox_linux_unittests', + '../third_party/catapult/telemetry/telemetry.gyp:bitmaptools#host', + '../third_party/mesa/mesa.gyp:osmesa', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache', + ], + 'conditions': [ + ['disable_nacl==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_helper', + '../native_client/src/trusted/service_runtime/linux/nacl_bootstrap.gyp:nacl_helper_bootstrap', + ], + }], + ], + }, + ], # targets + }], # "chromeos==1" + ['use_aura==1', { + 'targets': [ + { + 'target_name': 'aura_builder', + 'type': 'none', + 'dependencies': [ + '../ash/ash.gyp:ash_shell_with_content', + '../ash/ash.gyp:ash_unittests', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../ui/app_list/app_list.gyp:*', + '../ui/aura/aura.gyp:*', + '../ui/aura_extra/aura_extra.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/compositor/compositor.gyp:*', + '../ui/display/display.gyp:display_unittests', + '../ui/events/events.gyp:*', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/keyboard/keyboard.gyp:*', + '../ui/snapshot/snapshot.gyp:snapshot_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/wm/wm.gyp:*', + 'blink_tests', + ], + 'conditions': [ + ['OS=="linux"', { + # Tests that currently only work on Linux. + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../sql/sql.gyp:sql_unittests', + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../chromeos/chromeos.gyp:chromeos_unittests', + '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests', + ], + }], + ['use_ozone==1', { + 'dependencies': [ + '../ui/ozone/ozone.gyp:*', + '../ui/ozone/demo/ozone_demos.gyp:*', + ], + }], + ['chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:unit_tests', + '../ui/message_center/message_center.gyp:*', + '../ui/views/examples/examples.gyp:views_examples_with_content_exe', + '../ui/views/views.gyp:views', + '../ui/views/views.gyp:views_unittests', + ], + }], + ], + }, + ], # targets + }], # "use_aura==1" + ['test_isolation_mode != "noop"', { + 'targets': [ + { + 'target_name': 'chromium_swarm_tests', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests_run', + '../content/content_shell_and_tests.gyp:content_browsertests_run', + '../content/content_shell_and_tests.gyp:content_unittests_run', + '../net/net.gyp:net_unittests_run', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests_run', + '../chrome/chrome.gyp:interactive_ui_tests_run', + '../chrome/chrome.gyp:sync_integration_tests_run', + '../chrome/chrome.gyp:unit_tests_run', + ], + }], + ], + }, # target_name: chromium_swarm_tests + ], + }], + ['archive_chromoting_tests==1', { + 'targets': [ + { + 'target_name': 'chromoting_swarm_tests', + 'type': 'none', + 'dependencies': [ + '../testing/chromoting/integration_tests.gyp:*', + ], + }, # target_name: chromoting_swarm_tests + ] + }], + ['archive_media_router_tests==1', { + 'targets': [ + { + 'target_name': 'media_router_swarming_tests', + 'type': 'none', + 'dependencies': [ + '../chrome/test/media_router/e2e_tests.gyp:media_router_e2e_tests_run', + ], + }, # target_name: media_router_swarming_tests + { + 'target_name': 'media_router_swarming_perf_tests', + 'type': 'none', + 'dependencies': [ + '../chrome/test/media_router/e2e_tests.gyp:media_router_perf_tests_run', + ], + }, # target_name: media_router_swarming_perf_tests + ] + }], + ['OS=="mac" and toolkit_views==1', { + 'targets': [ + { + 'target_name': 'macviews_builder', + 'type': 'none', + 'dependencies': [ + '../ui/views/examples/examples.gyp:views_examples_with_content_exe', + '../ui/views/views.gyp:views', + '../ui/views/views.gyp:views_unittests', + ], + }, # target_name: macviews_builder + ], # targets + }], # os=='mac' and toolkit_views==1 + ], # conditions +} diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml new file mode 100644 index 00000000000..143de62e8e9 --- /dev/null +++ b/build/android/AndroidManifest.xml @@ -0,0 +1,20 @@ + + + + + + + + + diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn new file mode 100644 index 00000000000..d0486020ad9 --- /dev/null +++ b/build/android/BUILD.gn @@ -0,0 +1,157 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +if (enable_java_templates) { + import("//third_party/ijar/ijar.gni") + + sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar" + + # Create or update the API versions cache if necessary by running a + # functionally empty lint task. This prevents racy creation of the + # cache while linting java targets in android_lint. + android_lint("prepare_android_lint_cache") { + android_manifest = "//build/android/AndroidManifest.xml" + create_cache = true + } + + action("find_sun_tools_jar") { + script = "//build/android/gyp/find_sun_tools_jar.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + sun_tools_jar_path, + ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(sun_tools_jar_path, root_build_dir), + ] + } + + java_prebuilt("sun_tools_java") { + jar_path = sun_tools_jar_path + jar_dep = ":find_sun_tools_jar" + } + + generate_interface_jar("android_ijar") { + input_jar = android_sdk_jar + output_jar = "$root_out_dir/lib.java/android.interface.jar" + } + + _rebased_android_sdk_root = rebase_path(android_sdk_root, root_build_dir) + + # Record GN vars that are needed by generate_gradle.py. + # One statement per-line to make GN's formatter leave it alone. + CR = "$0x0A" + _json = "{$CR" + _json += " \"android_sdk_root\": \"$_rebased_android_sdk_root\",$CR" + _json += " \"android_sdk_root\": \"$_rebased_android_sdk_root\",$CR" + _json += " \"compile_sdk_version\": \"$android_sdk_version\",$CR" + _json += " \"build_tools_version\": \"$android_sdk_build_tools_version\"$CR" + _json += "}$CR" + write_file("$root_build_dir/gradle/config.json", _json) +} + +# Copy to the lib.unstripped directory so that gdb can easily find it. +copy("cpplib_unstripped") { + _soname = "libc++_shared.so" + sources = [ + "${android_libcpp_lib_dir}/${_soname}", + ] + outputs = [ + "${root_out_dir}/lib.unstripped/${_soname}", + ] +} + +action("cpplib_stripped") { + _strip_bin = "${android_tool_prefix}strip" + _soname = "libc++_shared.so" + _input_so = "${root_out_dir}/lib.unstripped/${_soname}" + _output_so = "${root_shlib_dir}/${_soname}" + + deps = [ + ":cpplib_unstripped", + ] + + script = "//build/gn_run_binary.py" + inputs = [ + _strip_bin, + ] + sources = [ + _input_so, + ] + outputs = [ + _output_so, + ] + data = [ + _output_so, + ] + + _rebased_strip_bin = rebase_path(_strip_bin, root_out_dir) + _rebased_input_so = rebase_path(_input_so, root_out_dir) + _rebased_output_so = rebase_path(_output_so, root_out_dir) + args = [ + _rebased_strip_bin, + "--strip-unneeded", + "-o", + _rebased_output_so, + _rebased_input_so, + ] +} + +group("test_runner_py") { + _py_files = read_file("test_runner.pydeps", "list lines") + + # Filter out comments. + set_sources_assignment_filter([ "#*" ]) + sources = _py_files + + data = sources + [ + "devil_chromium.json", + "pylib/gtest/filter/", + "test_wrapper/logdog_wrapper.py", + "//third_party/android_tools/sdk/build-tools/23.0.1/aapt", + "//third_party/android_tools/sdk/build-tools/23.0.1/dexdump", + "//third_party/android_tools/sdk/build-tools/23.0.1/lib/libc++.so", + "//third_party/android_tools/sdk/build-tools/23.0.1/split-select", + "//third_party/android_tools/sdk/platform-tools/adb", + "//third_party/catapult/third_party/gsutil/", + "//third_party/catapult/devil/devil/devil_dependencies.json", + "//third_party/proguard/lib/proguard.jar", + ] + data_deps = [ + "//tools/swarming_client:isolate_py", + ] +} + +# Create wrapper scripts in out/bin that takes care of setting the +# --output-directory. +_scripts_to_wrap = [ + # TODO(agrieve): Once GYP is no more, delete the checked-in adb_gdb_* scripts + # and generated a script for each android_apk() that has a native library. + "adb_gdb_android_webview_shell", + "adb_gdb_blimp_client", + "adb_gdb_chrome_public", + "adb_gdb_content_shell", + "adb_gdb_cronet_sample", + "adb_gdb_mojo_shell", + "asan_symbolize.py", + "tombstones.py", +] + +_wrapper_targets = [] +foreach(script, _scripts_to_wrap) { + _target_name = get_path_info(script, "name") + "_wrapper" + _wrapper_targets += [ ":$_target_name" ] + wrapper_script(_target_name) { + target = script + } +} + +group("wrapper_scripts") { + deps = _wrapper_targets +} diff --git a/build/android/CheckInstallApk-debug.apk b/build/android/CheckInstallApk-debug.apk new file mode 100644 index 00000000000..3dc31910a53 Binary files /dev/null and b/build/android/CheckInstallApk-debug.apk differ diff --git a/build/android/OWNERS b/build/android/OWNERS new file mode 100644 index 00000000000..13e19f570a6 --- /dev/null +++ b/build/android/OWNERS @@ -0,0 +1,5 @@ +jbudorick@chromium.org +mikecase@chromium.org +pasko@chromium.org +perezju@chromium.org +rnephew@chromium.org diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py new file mode 100644 index 00000000000..5fb2cc75b31 --- /dev/null +++ b/build/android/PRESUBMIT.py @@ -0,0 +1,76 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Presubmit script for android buildbot. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + + +def CommonChecks(input_api, output_api): + output = [] + + build_android_dir = input_api.PresubmitLocalPath() + + def J(*dirs): + """Returns a path relative to presubmit directory.""" + return input_api.os_path.join(build_android_dir, *dirs) + + build_pys = [ + r'gyp/.*\.py$', + r'gn/.*\.py', + ] + output.extend(input_api.canned_checks.RunPylint( + input_api, + output_api, + pylintrc='pylintrc', + black_list=build_pys, + extra_paths_list=[ + J(), + J('gyp'), + J('buildbot'), + J('..', '..', 'third_party', 'catapult', 'devil') + ])) + output.extend(input_api.canned_checks.RunPylint( + input_api, + output_api, + white_list=build_pys, + extra_paths_list=[J('gyp'), J('gn')])) + + # Disabled due to http://crbug.com/410936 + #output.extend(input_api.canned_checks.RunUnitTestsInDirectory( + #input_api, output_api, J('buildbot', 'tests'))) + + pylib_test_env = dict(input_api.environ) + pylib_test_env.update({ + 'PYTHONPATH': build_android_dir, + 'PYTHONDONTWRITEBYTECODE': '1', + }) + output.extend(input_api.canned_checks.RunUnitTests( + input_api, + output_api, + unit_tests=[ + J('.', 'emma_coverage_stats_test.py'), + J('gyp', 'util', 'md5_check_test.py'), + J('play_services', 'update_test.py'), + J('pylib', 'base', 'test_dispatcher_unittest.py'), + J('pylib', 'gtest', 'gtest_test_instance_test.py'), + J('pylib', 'instrumentation', + 'instrumentation_test_instance_test.py'), + J('pylib', 'local', 'device', 'local_device_test_run_test.py'), + J('pylib', 'results', 'json_results_test.py'), + J('pylib', 'symbols', 'elf_symbolizer_unittest.py'), + ], + env=pylib_test_env)) + + return output + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/build/android/adb_android_webview_command_line b/build/android/adb_android_webview_command_line new file mode 100644 index 00000000000..9075918dc89 --- /dev/null +++ b/build/android/adb_android_webview_command_line @@ -0,0 +1,17 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_android_webview_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_android_webview_command_line "" + +exec $(dirname $0)/adb_command_line.py --device-path \ + /data/local/tmp/android-webview-command-line "$@" diff --git a/build/android/adb_blimp_command_line b/build/android/adb_blimp_command_line new file mode 100644 index 00000000000..1ff376988c7 --- /dev/null +++ b/build/android/adb_blimp_command_line @@ -0,0 +1,17 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current Blimp flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the Blimp +# flags. For example: +# adb_blimp_command_line --enable-webgl +# +# To remove all Blimp flags, pass an empty string for the flags: +# adb_blimp_command_line "" + +exec $(dirname $0)/adb_command_line.py --device-path \ + /data/local/blimp-command-line "$@" diff --git a/build/android/adb_cast_shell_command_line b/build/android/adb_cast_shell_command_line new file mode 100644 index 00000000000..bcbcbeb2f9e --- /dev/null +++ b/build/android/adb_cast_shell_command_line @@ -0,0 +1,22 @@ +#!/bin/bash +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current cast shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the cast shell +# flags. For example: +# apk_command_line --enable-media-thread-for-media-playback +# +# If multiple devices are connected, use the --device argument to specify the +# device ID. You can use +# adb devices +# ... to find the device's ID. +# +# To remove all content shell flags, pass an empty string for the flags: +# apk_command_line "" + +exec $(dirname $0)/../../build/android/adb_command_line.py -e cast_shell \ + --device-path /data/local/tmp/castshell-command-line "$@" diff --git a/build/android/adb_chrome_public_command_line b/build/android/adb_chrome_public_command_line new file mode 100644 index 00000000000..ac379e8f785 --- /dev/null +++ b/build/android/adb_chrome_public_command_line @@ -0,0 +1,17 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current Chrome flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the Chrome +# flags. For example: +# adb_chrome_public_command_line --enable-webgl +# +# To remove all Chrome flags, pass an empty string for the flags: +# adb_chrome_public_command_line "" + +exec $(dirname $0)/adb_command_line.py --device-path \ + /data/local/chrome-command-line "$@" diff --git a/build/android/adb_command_line.py b/build/android/adb_command_line.py new file mode 100644 index 00000000000..948bc894449 --- /dev/null +++ b/build/android/adb_command_line.py @@ -0,0 +1,87 @@ +#!/usr/bin/python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility for reading / writing command-line flag files on device(s).""" + +import argparse +import os +import sys + +import devil_chromium + +from devil.android import device_utils +from devil.android import device_errors +from devil.utils import cmd_helper + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.usage = '''%(prog)s --device-path PATH [--device SERIAL] [flags...] + +No flags: Prints existing command-line file. +Empty string: Deletes command-line file. +Otherwise: Writes command-line file. + +''' + parser.add_argument('-d', '--device', dest='devices', action='append', + default=[], help='Target device serial (repeatable).') + parser.add_argument('--device-path', required=True, + help='Remote path to flags file.') + parser.add_argument('-e', '--executable', dest='executable', default='chrome', + help='Name of the executable.') + parser.add_argument('--adb-path', type=os.path.abspath, + help='Path to the adb binary.') + args, remote_args = parser.parse_known_args() + + devil_chromium.Initialize(adb_path=args.adb_path) + + as_root = not args.device_path.startswith('/data/local/tmp/') + + devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices, + default_retries=0) + all_devices = device_utils.DeviceUtils.parallel(devices) + + def print_args(): + def read_flags(device): + try: + return device.ReadFile(args.device_path, as_root=as_root).rstrip() + except device_errors.CommandFailedError: + return '' # File might not exist. + + descriptions = all_devices.pMap(lambda d: d.build_description).pGet(None) + flags = all_devices.pMap(read_flags).pGet(None) + for d, desc, flags in zip(devices, descriptions, flags): + print ' %s (%s): %r' % (d, desc, flags) + + # No args == print flags. + if not remote_args: + print 'Existing flags (in %s):' % args.device_path + print_args() + return 0 + + # Empty string arg == delete flags file. + if len(remote_args) == 1 and not remote_args[0]: + def delete_flags(device): + device.RunShellCommand(['rm', '-f', args.device_path], as_root=as_root) + all_devices.pMap(delete_flags).pGet(None) + print 'Deleted %s' % args.device_path + return 0 + + # Set flags. + quoted_args = ' '.join(cmd_helper.SingleQuote(x) for x in remote_args) + flags_str = ' '.join([args.executable, quoted_args]) + + def write_flags(device): + device.WriteFile(args.device_path, flags_str, as_root=as_root) + device.RunShellCommand(['chmod', '0664', args.device_path], as_root=as_root) + + all_devices.pMap(write_flags).pGet(None) + print 'Wrote flags to %s' % args.device_path + print_args() + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/adb_content_shell_command_line b/build/android/adb_content_shell_command_line new file mode 100644 index 00000000000..02ef8028d87 --- /dev/null +++ b/build/android/adb_content_shell_command_line @@ -0,0 +1,17 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_content_shell_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_content_shell_command_line "" + +exec $(dirname $0)/adb_command_line.py --device-path \ + /data/local/tmp/content-shell-command-line "$@" diff --git a/build/android/adb_device_functions.sh b/build/android/adb_device_functions.sh new file mode 100644 index 00000000000..66cc32fc4e3 --- /dev/null +++ b/build/android/adb_device_functions.sh @@ -0,0 +1,139 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# A collection of functions useful for maintaining android devices + + +# Run an adb command on all connected device in parallel. +# Usage: adb_all command line to eval. Quoting is optional. +# +# Examples: +# adb_all install Chrome.apk +# adb_all 'shell cat /path/to/file' +# +adb_all() { + if [[ $# == 0 ]]; then + echo "Usage: adb_all . Quoting is optional." + echo "Example: adb_all install Chrome.apk" + return 1 + fi + local DEVICES=$(adb_get_devices -b) + local NUM_DEVICES=$(echo $DEVICES | wc -w) + if (( $NUM_DEVICES > 1 )); then + echo "Looping over $NUM_DEVICES devices" + fi + _adb_multi "$DEVICES" "$*" +} + + +# Run a command on each connected device. Quoting the command is suggested but +# not required. The script setups up variable DEVICE to correspond to the +# current serial number. Intended for complex one_liners that don't work in +# adb_all +# Usage: adb_device_loop 'command line to eval' +adb_device_loop() { + if [[ $# == 0 ]]; then + echo "Intended for more complex one-liners that cannot be done with" \ + "adb_all." + echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \ + 'adb shell cat /data/local.prop)"' + return 1 + fi + local DEVICES=$(adb_get_devices) + if [[ -z $DEVICES ]]; then + return + fi + # Do not change DEVICE variable name - part of api + for DEVICE in $DEVICES; do + DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//') + echo "Running on $DEVICE ($DEV_TYPE)" + ANDROID_SERIAL=$DEVICE eval "$*" + done +} + +# Erases data from any devices visible on adb. To preserve a device, +# disconnect it or: +# 1) Reboot it into fastboot with 'adb reboot bootloader' +# 2) Run wipe_all_devices to wipe remaining devices +# 3) Restore device it with 'fastboot reboot' +# +# Usage: wipe_all_devices [-f] +# +wipe_all_devices() { + if [[ -z $(which adb) || -z $(which fastboot) ]]; then + echo "aborting: adb and fastboot not in path" + return 1 + elif ! $(groups | grep -q 'plugdev'); then + echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'" + fi + + local DEVICES=$(adb_get_devices -b) + + if [[ $1 != '-f' ]]; then + echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device." + read -p "Hit enter to continue" + fi + + _adb_multi "$DEVICES" "reboot bootloader" + # Subshell to isolate job list + ( + for DEVICE in $DEVICES; do + fastboot_erase $DEVICE & + done + wait + ) + + # Reboot devices together + for DEVICE in $DEVICES; do + fastboot -s $DEVICE reboot + done +} + +# Wipe a device in fastboot. +# Usage fastboot_erase [serial] +fastboot_erase() { + if [[ -n $1 ]]; then + echo "Wiping $1" + local SERIAL="-s $1" + else + if [ -z $(fastboot devices) ]; then + echo "No devices in fastboot, aborting." + echo "Check out wipe_all_devices to see if sufficient" + echo "You can put a device in fastboot using adb reboot bootloader" + return 1 + fi + local SERIAL="" + fi + fastboot $SERIAL erase cache + fastboot $SERIAL erase userdata +} + +# Get list of devices connected via adb +# Args: -b block until adb detects a device +adb_get_devices() { + local DEVICES="$(adb devices | grep 'device$')" + if [[ -z $DEVICES && $1 == '-b' ]]; then + echo '- waiting for device -' >&2 + local DEVICES="$(adb wait-for-device devices | grep 'device$')" + fi + echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/' +} + +################################################### +## HELPER FUNCTIONS +################################################### + +# Run an adb command in parallel over a device list +_adb_multi() { + local DEVICES=$1 + local ADB_ARGS=$2 + ( + for DEVICE in $DEVICES; do + adb -s $DEVICE $ADB_ARGS & + done + wait + ) +} diff --git a/build/android/adb_gdb b/build/android/adb_gdb new file mode 100644 index 00000000000..00c4f89cbdd --- /dev/null +++ b/build/android/adb_gdb @@ -0,0 +1,1040 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# + +# A generic script used to attach to a running Chromium process and +# debug it. Most users should not use this directly, but one of the +# wrapper scripts like adb_gdb_content_shell +# +# Use --help to print full usage instructions. +# + +PROGNAME=$(basename "$0") +PROGDIR=$(dirname "$0") + +# Force locale to C to allow recognizing output from subprocesses. +LC_ALL=C + +# Location of Chromium-top-level sources. +CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null) + +TMPDIR= +GDBSERVER_PIDFILE= +TARGET_GDBSERVER= +COMMAND_PREFIX= + +clean_exit () { + if [ "$TMPDIR" ]; then + GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null) + if [ "$GDBSERVER_PID" ]; then + log "Killing background gdbserver process: $GDBSERVER_PID" + kill -9 $GDBSERVER_PID >/dev/null 2>&1 + fi + if [ "$TARGET_GDBSERVER" ]; then + log "Removing target gdbserver binary: $TARGET_GDBSERVER." + "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" >/dev/null 2>&1 + fi + log "Cleaning up: $TMPDIR" + rm -rf "$TMPDIR" + fi + trap "" EXIT + exit $1 +} + +# Ensure clean exit on Ctrl-C or normal exit. +trap "clean_exit 1" INT HUP QUIT TERM +trap "clean_exit \$?" EXIT + +panic () { + echo "ERROR: $@" >&2 + exit 1 +} + +fail_panic () { + if [ $? != 0 ]; then panic "$@"; fi +} + +log () { + if [ "$VERBOSE" -gt 0 ]; then + echo "$@" + fi +} + +DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs + +# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX +# environment variables. This is only for cosmetic reasons, i.e. to +# display proper + +# Allow wrapper scripts to set the default activity through +# the ADB_GDB_ACTIVITY variable. Users are still able to change the +# final activity name through --activity= option. +# +# This is only for cosmetic reasons, i.e. to display the proper default +# in the --help output. +# +DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"} + +# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME +PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")} + +ACTIVITY=$DEFAULT_ACTIVITY +ADB= +ANNOTATE= +FORCE= +GDBEXEPOSTFIX=gdb +GDBINIT= +GDBSERVER= +HELP= +NDK_DIR= +NO_PULL_LIBS= +PACKAGE_NAME= +PID= +PORT= +PRIVILEGED= +PRIVILEGED_INDEX= +PROGRAM_NAME="activity" +PULL_LIBS= +PULL_LIBS_DIR= +SANDBOXED= +SANDBOXED_INDEX= +START= +START_URL= +ATTACH_DELAY=1 +SU_PREFIX= +SYMBOL_DIR= +TARGET_ARCH= +TOOLCHAIN= +VERBOSE=0 + +for opt; do + optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)') + case $opt in + --adb=*) + ADB=$optarg + ;; + --device=*) + export ANDROID_SERIAL=$optarg + ;; + --activity=*) + ACTIVITY=$optarg + ;; + --annotate=3) + ANNOTATE=$optarg + ;; + --force) + FORCE=true + ;; + --gdbserver=*) + GDBSERVER=$optarg + ;; + --gdb=*) + GDB=$optarg + ;; + --help|-h|-?) + HELP=true + ;; + --ndk-dir=*) + NDK_DIR=$optarg + ;; + --no-pull-libs) + NO_PULL_LIBS=true + ;; + --package-name=*) + PACKAGE_NAME=$optarg + ;; + --pid=*) + PID=$optarg + ;; + --port=*) + PORT=$optarg + ;; + --privileged) + PRIVILEGED=true + ;; + --privileged=*) + PRIVILEGED=true + PRIVILEGED_INDEX=$optarg + ;; + --program-name=*) + PROGRAM_NAME=$optarg + ;; + --pull-libs) + PULL_LIBS=true + ;; + --pull-libs-dir=*) + PULL_LIBS_DIR=$optarg + ;; + --sandboxed) + SANDBOXED=true + ;; + --sandboxed=*) + SANDBOXED=true + SANDBOXED_INDEX=$optarg + ;; + --script=*) + GDBINIT=$optarg + ;; + --start=*) + START_URL=$optarg + ;& # fallthrough + --start) + START=true + ;; + --attach-delay=*) + ATTACH_DELAY=$optarg + ;; + --su-prefix=*) + SU_PREFIX=$optarg + ;; + --symbol-dir=*) + SYMBOL_DIR=$optarg + ;; + --output-directory=*) + CHROMIUM_OUTPUT_DIR=$optarg + ;; + --target-arch=*) + TARGET_ARCH=$optarg + ;; + --toolchain=*) + TOOLCHAIN=$optarg + ;; + --ui) + GDBEXEPOSTFIX=gdbtui + ;; + --verbose) + VERBOSE=$(( $VERBOSE + 1 )) + ;; + -*) + panic "Unknown option $opt, see --help." >&2 + ;; + *) + if [ "$PACKAGE_NAME" ]; then + panic "You can only provide a single package name as argument!\ + See --help." + fi + PACKAGE_NAME=$opt + ;; + esac +done + +if [ "$HELP" ]; then + if [ "$ADB_GDB_PROGNAME" ]; then + # Assume wrapper scripts all provide a default package name. + cat <] + +Attach gdb to a running Android $PROGRAM_NAME process. + +If provided, must be the name of the Android application's +package name to be debugged. You can also use --package-name= to +specify it. +EOF + fi + + cat < option) or a privileged (--privileged or +--privileged=) service. + +This script needs several things to work properly. It will try to pick +them up automatically for you though: + + - target gdbserver binary + - host gdb client (e.g. arm-linux-androideabi-gdb) + - directory with symbolic version of $PROGRAM_NAME's shared libraries. + +You can also use --ndk-dir= to specify an alternative NDK installation +directory. + +The script tries to find the most recent version of the debug version of +shared libraries under one of the following directories: + + \$CHROMIUM_SRC//lib/ (used by GYP builds) + \$CHROMIUM_SRC//lib.unstripped/ (used by GN builds) + +Where is determined by CHROMIUM_OUTPUT_DIR, or --output-directory. + +You can set the path manually via --symbol-dir. + +The script tries to extract the target architecture from your target device, +but if this fails, will default to 'arm'. Use --target-arch= to force +its value. + +Otherwise, the script will complain, but you can use the --gdbserver, +--gdb and --symbol-lib options to specify everything manually. + +An alternative to --gdb= is to use --toollchain= to specify +the path to the host target-specific cross-toolchain. + +You will also need the 'adb' tool in your path. Otherwise, use the --adb +option. The script will complain if there is more than one device connected +and a device is not specified with either --device or ANDROID_SERIAL). + +The first time you use it on a device, the script will pull many system +libraries required by the process into a temporary directory. This +is done to strongly improve the debugging experience, like allowing +readable thread stacks and more. The libraries are copied to the following +directory by default: + + $DEFAULT_PULL_LIBS_DIR/ + +But you can use the --pull-libs-dir= option to specify an +alternative. The script can detect when you change the connected device, +and will re-pull the libraries only in this case. You can however force it +with the --pull-libs option. + +Any local .gdbinit script will be ignored, but it is possible to pass a +gdb command script with the --script= option. Note that its commands +will be passed to gdb after the remote connection and library symbol +loading have completed. + +Valid options: + --help|-h|-? Print this message. + --verbose Increase verbosity. + + --sandboxed Debug first sandboxed process we find. + --sandboxed= Debug specific sandboxed process. + --symbol-dir= Specify directory with symbol shared libraries. + --output-directory= Specify the output directory (e.g. "out/Debug"). + --package-name= Specify package name (alternative to 1st argument). + --privileged Debug first privileged process we find. + --privileged= Debug specific privileged process. + --program-name= Specify program name (cosmetic only). + --pid= Specify application process pid. + --force Kill any previous debugging session, if any. + --start[=] Start package's activity on device. + --attach-delay= Seconds to wait for gdbserver to attach to the + remote process before starting gdb. Default 1. + may be a float if your sleep(1) supports it. + --ui Use gdbtui instead of gdb + --activity= Activity name for --start [$DEFAULT_ACTIVITY]. + --annotate= Enable gdb annotation. + --script= Specify extra GDB init script. + + --gdbserver= Specify target gdbserver binary. + --gdb= Specify host gdb client binary. + --target-arch= Specify NDK target arch. + --adb= Specify host ADB binary. + --device= ADB device serial to use (-s flag). + --port= Specify the tcp port to use. + + --su-prefix= Prepend to 'adb shell' commands that are + run by this script. This can be useful to use + the 'su' program on rooted production devices. + e.g. --su-prefix="su -c" + + --pull-libs Force system libraries extraction. + --no-pull-libs Do not extract any system library. + --libs-dir= Specify system libraries extraction directory. + +EOF + exit 0 +fi + +if [ -z "$PACKAGE_NAME" ]; then + panic "Please specify a package name on the command line. See --help." +fi + +if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then + if [[ -e "build.ninja" ]]; then + CHROMIUM_OUTPUT_DIR=$PWD + else + panic "Please specify an output directory by using one of: + --output-directory=out/Debug + CHROMIUM_OUTPUT_DIR=out/Debug + Setting working directory to an output directory. + See --help." + fi +fi + +# Detect the build type and symbol directory. This is done by finding +# the most recent sub-directory containing debug shared libraries under +# $CHROMIUM_OUTPUT_DIR. +# +# Out: nothing, but this sets SYMBOL_DIR +# +detect_symbol_dir () { + # GYP places unstripped libraries under out/lib + # GN places them under out/lib.unstripped + local PARENT_DIR="$CHROMIUM_OUTPUT_DIR" + if [[ ! -e "$PARENT_DIR" ]]; then + PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR" + fi + SYMBOL_DIR="$PARENT_DIR/lib.unstripped" + if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + SYMBOL_DIR="$PARENT_DIR/lib" + if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + panic "Could not find any symbols under \ +$PARENT_DIR/lib{.unstripped}. Please build the program first!" + fi + fi + log "Auto-config: --symbol-dir=$SYMBOL_DIR" +} + +if [ -z "$SYMBOL_DIR" ]; then + detect_symbol_dir +elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + panic "Could not find any symbols under $SYMBOL_DIR" +fi + +if [ -z "$NDK_DIR" ]; then + ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \ +'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,') +else + if [ ! -d "$NDK_DIR" ]; then + panic "Invalid directory: $NDK_DIR" + fi + if [ ! -f "$NDK_DIR/ndk-build" ]; then + panic "Not a valid NDK directory: $NDK_DIR" + fi + ANDROID_NDK_ROOT=$NDK_DIR +fi + +if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then + panic "Unknown --script file: $GDBINIT" +fi + +# Check that ADB is in our path +if [ -z "$ADB" ]; then + ADB=$(which adb 2>/dev/null) + if [ -z "$ADB" ]; then + panic "Can't find 'adb' tool in your path. Install it or use \ +--adb=" + fi + log "Auto-config: --adb=$ADB" +fi + +# Check that it works minimally +ADB_VERSION=$($ADB version 2>/dev/null) +echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge" +if [ $? != 0 ]; then + panic "Your 'adb' tool seems invalid, use --adb= to specify a \ +different one: $ADB" +fi + +# If there are more than one device connected, and ANDROID_SERIAL is not +# defined, print an error message. +NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l) +if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then + echo "ERROR: There is more than one Android device connected to ADB." + echo "Please define ANDROID_SERIAL to specify which one to use." + exit 1 +fi + +# Run a command through adb shell, strip the extra \r from the output +# and return the correct status code to detect failures. This assumes +# that the adb shell command prints a final \n to stdout. +# $1+: command to run +# Out: command's stdout +# Return: command's status +# Note: the command's stderr is lost +adb_shell () { + local TMPOUT="$(mktemp)" + local LASTLINE RET + local ADB=${ADB:-adb} + + # The weird sed rule is to strip the final \r on each output line + # Since 'adb shell' never returns the command's proper exit/status code, + # we force it to print it as '%%' in the temporary output file, + # which we will later strip from it. + $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \ + sed -e 's![[:cntrl:]]!!g' > $TMPOUT + # Get last line in log, which contains the exit code from the command + LASTLINE=$(sed -e '$!d' $TMPOUT) + # Extract the status code from the end of the line, which must + # be '%%'. + RET=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }') + # Remove the status code from the last line. Note that this may result + # in an empty line. + LASTLINE=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }') + # The output itself: all lines except the status code. + sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE" + # Remove temp file. + rm -f $TMPOUT + # Exit with the appropriate status. + return $RET +} + +# Find the target architecture from a local shared library. +# This returns an NDK-compatible architecture name. +# out: NDK Architecture name, or empty string. +get_gyp_target_arch () { + local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so | head -n1) + local SO_DESC=$(file $RANDOM_LIB) + case $ARCH in + *32-bit*ARM,*) echo "arm";; + *64-bit*ARM,*) echo "arm64";; + *32-bit*Intel,*) echo "x86";; + *x86-64,*) echo "x86_64";; + *32-bit*MIPS,*) echo "mips";; + *) echo ""; + esac +} + +if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=$(get_gyp_target_arch) + if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=arm + fi +else + # Nit: accept Chromium's 'ia32' as a valid target architecture. This + # script prefers the NDK 'x86' name instead because it uses it to find + # NDK-specific files (host gdb) with it. + if [ "$TARGET_ARCH" = "ia32" ]; then + TARGET_ARCH=x86 + log "Auto-config: --arch=$TARGET_ARCH (equivalent to ia32)" + fi +fi + +# Detect the NDK system name, i.e. the name used to identify the host. +# out: NDK system name (e.g. 'linux' or 'darwin') +get_ndk_host_system () { + local HOST_OS + if [ -z "$NDK_HOST_SYSTEM" ]; then + HOST_OS=$(uname -s) + case $HOST_OS in + Linux) NDK_HOST_SYSTEM=linux;; + Darwin) NDK_HOST_SYSTEM=darwin;; + *) panic "You can't run this script on this system: $HOST_OS";; + esac + fi + echo "$NDK_HOST_SYSTEM" +} + +# Detect the NDK host architecture name. +# out: NDK arch name (e.g. 'x86' or 'x86_64') +get_ndk_host_arch () { + local HOST_ARCH HOST_OS + if [ -z "$NDK_HOST_ARCH" ]; then + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(uname -p) + case $HOST_ARCH in + i?86) NDK_HOST_ARCH=x86;; + x86_64|amd64) NDK_HOST_ARCH=x86_64;; + *) panic "You can't run this script on this host architecture: $HOST_ARCH";; + esac + # Darwin trick: "uname -p" always returns i386 on 64-bit installations. + if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then + # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts + # implementations of the tool. See http://b.android.com/53769 + HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64") + if [ "$HOST_64BITS" ]; then + NDK_HOST_ARCH=x86_64 + fi + fi + fi + echo "$NDK_HOST_ARCH" +} + +# Convert an NDK architecture name into a GNU configure triplet. +# $1: NDK architecture name (e.g. 'arm') +# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi') +get_arch_gnu_config () { + case $1 in + arm) + echo "arm-linux-androideabi" + ;; + arm64) + echo "aarch64-linux-android" + ;; + x86) + echo "i686-linux-android" + ;; + x86_64) + echo "x86_64-linux-android" + ;; + mips) + echo "mipsel-linux-android" + ;; + *) + echo "$ARCH-linux-android" + ;; + esac +} + +# Convert an NDK architecture name into a toolchain name prefix +# $1: NDK architecture name (e.g. 'arm') +# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi') +get_arch_toolchain_prefix () { + # Return the configure triplet, except for x86! + if [ "$1" = "x86" ]; then + echo "$1" + else + get_arch_gnu_config $1 + fi +} + +# Find a NDK toolchain prebuilt file or sub-directory. +# This will probe the various arch-specific toolchain directories +# in the NDK for the needed file. +# $1: NDK install path +# $2: NDK architecture name +# $3: prebuilt sub-path to look for. +# Out: file path, or empty if none is found. +get_ndk_toolchain_prebuilt () { + local NDK_DIR="${1%/}" + local ARCH="$2" + local SUBPATH="$3" + local NAME="$(get_arch_toolchain_prefix $ARCH)" + local FILE TARGET + FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE= + fi + fi + echo "$FILE" +} + +# Find the path to an NDK's toolchain full prefix for a given architecture +# $1: NDK install path +# $2: NDK target architecture name +# Out: install path + binary prefix (e.g. +# ".../path/to/bin/arm-linux-androideabi-") +get_ndk_toolchain_fullprefix () { + local NDK_DIR="$1" + local ARCH="$2" + local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG + + # NOTE: This will need to be updated if the NDK changes the names or moves + # the location of its prebuilt toolchains. + # + GCC= + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(get_ndk_host_arch) + CONFIG=$(get_arch_gnu_config $ARCH) + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc") + if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc") + fi + if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then + # Special case, the x86 toolchain used to be incorrectly + # named i686-android-linux-gcc! + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc") + fi + if [ -z "$GCC" ]; then + panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \ +Please verify your NDK installation!" + fi + echo "${GCC%%gcc}" +} + +# $1: NDK install path +# $2: target architecture. +get_ndk_gdbserver () { + local NDK_DIR="$1" + local ARCH=$2 + local BINARY + + # The location has moved after NDK r8 + BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver + if [ ! -f "$BINARY" ]; then + BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver) + fi + echo "$BINARY" +} + +# Check/probe the path to the Android toolchain installation. Always +# use the NDK versions of gdb and gdbserver. They must match to avoid +# issues when both binaries do not speak the same wire protocol. +# +if [ -z "$TOOLCHAIN" ]; then + ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \ + "$ANDROID_NDK_ROOT" "$TARGET_ARCH") + ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN") + log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN" +else + # Be flexible, allow one to specify either the install path or the bin + # sub-directory in --toolchain: + # + if [ -d "$TOOLCHAIN/bin" ]; then + TOOLCHAIN=$TOOLCHAIN/bin + fi + ANDROID_TOOLCHAIN=$TOOLCHAIN +fi + +# Cosmetic: Remove trailing directory separator. +ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/} + +# Find host GDB client binary +if [ -z "$GDB" ]; then + GDB=$(which $ANDROID_TOOLCHAIN/*-$GDBEXEPOSTFIX 2>/dev/null | head -1) + if [ -z "$GDB" ]; then + panic "Can't find Android gdb client in your path, check your \ +--toolchain or --gdb path." + fi + log "Host gdb client: $GDB" +fi + +# Find gdbserver binary, we will later push it to /data/local/tmp +# This ensures that both gdbserver and $GDB talk the same binary protocol, +# otherwise weird problems will appear. +# +if [ -z "$GDBSERVER" ]; then + GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH") + if [ -z "$GDBSERVER" ]; then + panic "Can't find NDK gdbserver binary. use --gdbserver to specify \ +valid one!" + fi + log "Auto-config: --gdbserver=$GDBSERVER" +fi + +# A unique ID for this script's session. This needs to be the same in all +# sub-shell commands we're going to launch, so take the PID of the launcher +# process. +TMP_ID=$$ + +# Temporary directory, will get cleaned up on exit. +TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID +mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/* + +GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid + +# If --force is specified, try to kill any gdbserver process started by the +# same user on the device. Normally, these are killed automatically by the +# script on exit, but there are a few corner cases where this would still +# be needed. +if [ "$FORCE" ]; then + GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }') + for GDB_PID in $GDBSERVER_PIDS; do + log "Killing previous gdbserver (PID=$GDB_PID)" + adb_shell kill -9 $GDB_PID + done +fi + +if [ "$START" ]; then + log "Starting $PROGRAM_NAME on device." + adb_shell am start -n $PACKAGE_NAME/$ACTIVITY ${START_URL:+-d "$START_URL"} + adb_shell ps | grep -q $PACKAGE_NAME + fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \ +package is installed?" +fi + +# Return the timestamp of a given file, as number of seconds since epoch. +# $1: file path +# Out: file timestamp +get_file_timestamp () { + stat -c %Y "$1" 2>/dev/null +} + +# Allow several concurrent debugging sessions +TARGET_GDBSERVER=/data/data/$PACKAGE_NAME/gdbserver-adb-gdb-$TMP_ID +TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID + +# Return the build fingerprint contained in a build.prop file. +# $1: path to build.prop file +get_build_fingerprint_from () { + cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2 +} + + +ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR +PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR} + +HOST_FINGERPRINT= +DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint) +[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint" +log "Device build fingerprint: $DEVICE_FINGERPRINT" + +# If --pull-libs-dir is not specified, and this is a platform build, look +# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/ +# directly, if the build fingerprint matches the device. +if [ -z "$ORG_PULL_LIBS_DIR" -a \ + "$ANDROID_PRODUCT_OUT" -a \ + -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then + ANDROID_FINGERPRINT=$(get_build_fingerprint_from \ + "$ANDROID_PRODUCT_OUT"/system/build.prop) + log "Android build fingerprint: $ANDROID_FINGERPRINT" + if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then + log "Perfect match!" + PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols + HOST_FINGERPRINT=$ANDROID_FINGERPRINT + if [ "$PULL_LIBS" ]; then + log "Ignoring --pull-libs since the device and platform build \ +fingerprints match." + NO_PULL_LIBS=true + fi + fi +fi + +# If neither --pull-libs an --no-pull-libs were specified, check the build +# fingerprints of the device, and the cached system libraries on the host. +# +if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then + if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then + log "Auto-config: --pull-libs (no cached libraries)" + PULL_LIBS=true + else + HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint") + log "Host build fingerprint: $HOST_FINGERPRINT" + if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then + log "Auto-config: --no-pull-libs (fingerprint match)" + NO_PULL_LIBS=true + else + log "Auto-config: --pull-libs (fingerprint mismatch)" + PULL_LIBS=true + fi + fi +fi + +# Extract the system libraries from the device if necessary. +if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then + echo "Extracting system libraries into: $PULL_LIBS_DIR" +fi + +mkdir -p "$PULL_LIBS_DIR" +fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR" + +# If requested, work for M-x gdb. The gdb indirections make it +# difficult to pass --annotate=3 to the gdb binary itself. +GDB_ARGS= +if [ "$ANNOTATE" ]; then + GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE" +fi + +# Get the PID from the first argument or else find the PID of the +# browser process. +if [ -z "$PID" ]; then + PROCESSNAME=$PACKAGE_NAME + if [ "$SANDBOXED_INDEX" ]; then + PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX + elif [ "$SANDBOXED" ]; then + PROCESSNAME=$PROCESSNAME:sandboxed_process + PID=$(adb_shell ps | \ + awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1) + elif [ "$PRIVILEGED_INDEX" ]; then + PROCESSNAME=$PROCESSNAME:privileged_process$PRIVILEGED_INDEX + elif [ "$PRIVILEGED" ]; then + PROCESSNAME=$PROCESSNAME:privileged_process + PID=$(adb_shell ps | \ + awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1) + fi + if [ -z "$PID" ]; then + PID=$(adb_shell ps | \ + awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1) + fi + if [ -z "$PID" ]; then + if [ "$START" ]; then + panic "Can't find application process PID, did it crash?" + else + panic "Can't find application process PID, are you sure it is \ +running? Try using --start." + fi + fi + log "Found process PID: $PID" +elif [ "$SANDBOXED" ]; then + echo "WARNING: --sandboxed option ignored due to use of --pid." +elif [ "$PRIVILEGED" ]; then + echo "WARNING: --privileged option ignored due to use of --pid." +fi + +# Determine if 'adb shell' runs as root or not. +# If so, we can launch gdbserver directly, otherwise, we have to +# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable. +# +if [ "$SU_PREFIX" ]; then + # Need to check that this works properly. + SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log + adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1 + if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then + echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:" + echo "$ adb shell $SU_PREFIX \"echo foo\"" + cat $SU_PREFIX_TEST_LOG + exit 1 + fi + COMMAND_PREFIX="$SU_PREFIX \"" + COMMAND_SUFFIX="\"" +else + SHELL_UID=$(adb shell cat /proc/self/status | \ + awk '$1 == "Uid:" { print $2; }') + log "Shell UID: $SHELL_UID" + if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then + COMMAND_PREFIX="run-as $PACKAGE_NAME" + COMMAND_SUFFIX= + else + COMMAND_PREFIX= + COMMAND_SUFFIX= + fi +fi +log "Command prefix: '$COMMAND_PREFIX'" +log "Command suffix: '$COMMAND_SUFFIX'" + +# Pull device's system libraries that are mapped by our process. +# Pulling all system libraries is too long, so determine which ones +# we need by looking at /proc/$PID/maps instead +if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then + echo "Extracting system libraries into: $PULL_LIBS_DIR" + MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX) + if [ $? != 0 ]; then + echo "ERROR: Could not list process's memory mappings." + if [ "$SU_PREFIX" ]; then + panic "Are you sure your --su-prefix is correct?" + else + panic "Use --su-prefix if the application is not debuggable." + fi + fi + # Remove the fingerprint file in case pulling one of the libs fails. + rm -f "$PULL_LIBS_DIR/build.fingerprint" + SYSTEM_LIBS=$(echo "$MAPPINGS" | \ + awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u) + for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do + echo "Pulling from device: $SYSLIB" + DST_FILE=$PULL_LIBS_DIR$SYSLIB + DST_DIR=$(dirname "$DST_FILE") + mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null + fail_panic "Could not pull $SYSLIB from device !?" + done + echo "Writing the device fingerprint" + echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint" +fi + +# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4 +# so we can add them to solib-search-path later. +SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \ + grep -v "^$" | tr '\n' ':') + +# This is a re-implementation of gdbclient, where we use compatible +# versions of gdbserver and $GDBNAME to ensure that everything works +# properly. +# + +# Push gdbserver to the device +log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER" +adb push $GDBSERVER $TMP_TARGET_GDBSERVER &>/dev/null +adb shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER +adb shell rm $TMP_TARGET_GDBSERVER +fail_panic "Could not copy gdbserver to the device!" + +if [ -z "$PORT" ]; then + PORT=5039 +fi +HOST_PORT=$PORT +TARGET_PORT=$PORT + +# Select correct app_process for architecture. +case $TARGET_ARCH in + arm|x86|mips) GDBEXEC=app_process32;; + arm64|x86_64) GDBEXEC=app_process64;; + *) fail_panic "Unknown app_process for architecture!";; +esac + +# Default to app_process if bit-width specific process isn't found. +adb_shell ls /system/bin/$GDBEXEC +if [ $? != 0 ]; then + GDBEXEC=app_process +fi + +# Detect AddressSanitizer setup on the device. In that case app_process is a +# script, and the real executable is app_process.real. +GDBEXEC_ASAN=app_process.real +adb_shell ls /system/bin/$GDBEXEC_ASAN +if [ $? == 0 ]; then + GDBEXEC=$GDBEXEC_ASAN +fi + +# Pull the app_process binary from the device. +log "Pulling $GDBEXEC from device" +adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null +fail_panic "Could not retrieve $GDBEXEC from the device!" + +# Setup network redirection +log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)" +adb forward tcp:$HOST_PORT tcp:$TARGET_PORT +fail_panic "Could not setup network redirection from \ +host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!" + +# Start gdbserver in the background +# Note that using run-as requires the package to be debuggable. +# +# If not, this will fail horribly. The alternative is to run the +# program as root, which requires of course root privileges. +# Maybe we should add a --root option to enable this? +# +log "Starting gdbserver in the background:" +GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log +log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \ + --attach $PID $COMMAND_SUFFIX" +"$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \ + --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 & +GDBSERVER_PID=$! +echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE +log "background job pid: $GDBSERVER_PID" + +# Sleep to allow gdbserver to attach to the remote process and be +# ready to connect to. +log "Sleeping ${ATTACH_DELAY}s to allow gdbserver to attach." +sleep "$ATTACH_DELAY" +log "Job control: $(jobs -l)" +STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }') +if [ "$STATE" != "Running" ]; then + echo "ERROR: GDBServer either failed to run or attach to PID $PID!" + if [ $(adb_shell su -c getenforce) != "Permissive" ]; then + echo "Device mode is Enforcing. Changing Device mode to Permissive " + $(adb_shell su -c setenforce 0) + if [ $(adb_shell su -c getenforce) != "Permissive" ]; then + echo "ERROR: Failed to Change Device mode to Permissive" + echo "Failure log (use --verbose for more information):" + cat $GDBSERVER_LOG + exit 1 + fi + else + echo "Failure log (use --verbose for more information):" + cat $GDBSERVER_LOG + exit 1 + fi +fi + +# Generate a file containing useful GDB initialization commands +readonly COMMANDS=$TMPDIR/gdb.init +log "Generating GDB initialization commands file: $COMMANDS" +echo -n "" > $COMMANDS +echo "set print pretty 1" >> $COMMANDS +echo "python" >> $COMMANDS +echo "import sys" >> $COMMANDS +echo "sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')" >> $COMMANDS +echo "try:" >> $COMMANDS +echo " import gdb_chrome" >> $COMMANDS +echo "finally:" >> $COMMANDS +echo " sys.path.pop(0)" >> $COMMANDS +echo "end" >> $COMMANDS +echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS +echo "directory $CHROMIUM_SRC" >> $COMMANDS +echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS +echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \ + >> $COMMANDS +echo "echo Attaching and reading symbols, this may take a while.." \ + >> $COMMANDS +echo "target remote :$HOST_PORT" >> $COMMANDS + +if [ "$GDBINIT" ]; then + cat "$GDBINIT" >> $COMMANDS +fi + +if [ "$VERBOSE" -gt 0 ]; then + echo "### START $COMMANDS" + cat $COMMANDS + echo "### END $COMMANDS" +fi + +log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS" +$GDB $GDB_ARGS -x $COMMANDS && +rm -f "$GDBSERVER_PIDFILE" diff --git a/build/android/adb_gdb_android_webview_shell b/build/android/adb_gdb_android_webview_shell new file mode 100644 index 00000000000..f685fda77c5 --- /dev/null +++ b/build/android/adb_gdb_android_webview_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.AwShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=AwShellApplication \ + --package-name=org.chromium.android_webview.shell \ + "$@" diff --git a/build/android/adb_gdb_blimp_client b/build/android/adb_gdb_blimp_client new file mode 100644 index 00000000000..3c2e21d6b90 --- /dev/null +++ b/build/android/adb_gdb_blimp_client @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a Blimp process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=org.chromium.blimp.BlimpRendererActivity +"$PROGDIR"/adb_gdb \ + --program-name=Blimp \ + --package-name=org.chromium.blimp \ + "$@" diff --git a/build/android/adb_gdb_chrome_public b/build/android/adb_gdb_chrome_public new file mode 100644 index 00000000000..4366c838e78 --- /dev/null +++ b/build/android/adb_gdb_chrome_public @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ChromePublic process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=com.google.android.apps.chrome.Main +"$PROGDIR"/adb_gdb \ + --program-name=ChromePublic \ + --package-name=org.chromium.chrome \ + "$@" diff --git a/build/android/adb_gdb_content_shell b/build/android/adb_gdb_content_shell new file mode 100644 index 00000000000..18e1a61d893 --- /dev/null +++ b/build/android/adb_gdb_content_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.ContentShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=ContentShell \ + --package-name=org.chromium.content_shell_apk \ + "$@" diff --git a/build/android/adb_gdb_cronet_sample b/build/android/adb_gdb_cronet_sample new file mode 100644 index 00000000000..8d0c864d133 --- /dev/null +++ b/build/android/adb_gdb_cronet_sample @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.CronetSampleActivity +"$PROGDIR"/adb_gdb \ + --program-name=CronetSample \ + --package-name=org.chromium.cronet_sample_apk \ + "$@" diff --git a/build/android/adb_gdb_mojo_shell b/build/android/adb_gdb_mojo_shell new file mode 100644 index 00000000000..ba91149cce9 --- /dev/null +++ b/build/android/adb_gdb_mojo_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.MojoShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=MojoShell \ + --package-name=org.chromium.mojo_shell_apk \ + "$@" diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py new file mode 100644 index 00000000000..7904b41a531 --- /dev/null +++ b/build/android/adb_install_apk.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility script to install APKs from the command line quickly.""" + +import argparse +import glob +import logging +import os +import sys + +import devil_chromium +from devil.android import apk_helper +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_utils +from devil.utils import run_tests_helper +from pylib import constants + + +def main(): + parser = argparse.ArgumentParser() + + apk_group = parser.add_mutually_exclusive_group(required=True) + apk_group.add_argument('--apk', dest='apk_name', + help='DEPRECATED The name of the apk containing the' + ' application (with the .apk extension).') + apk_group.add_argument('apk_path', nargs='?', + help='The path to the APK to install.') + + # TODO(jbudorick): Remove once no clients pass --apk_package + parser.add_argument('--apk_package', help='DEPRECATED unused') + parser.add_argument('--split', + action='append', + dest='splits', + help='A glob matching the apk splits. ' + 'Can be specified multiple times.') + parser.add_argument('--keep_data', + action='store_true', + default=False, + help='Keep the package data when installing ' + 'the application.') + parser.add_argument('--debug', action='store_const', const='Debug', + dest='build_type', + default=os.environ.get('BUILDTYPE', 'Debug'), + help='If set, run test suites under out/Debug. ' + 'Default is env var BUILDTYPE or Debug') + parser.add_argument('--release', action='store_const', const='Release', + dest='build_type', + help='If set, run test suites under out/Release. ' + 'Default is env var BUILDTYPE or Debug.') + parser.add_argument('-d', '--device', dest='devices', action='append', + default=[], + help='Target device for apk to install on. Enter multiple' + ' times for multiple devices.') + parser.add_argument('--adb-path', type=os.path.abspath, + help='Absolute path to the adb binary to use.') + parser.add_argument('--blacklist-file', help='Device blacklist JSON file.') + parser.add_argument('-v', '--verbose', action='count', + help='Enable verbose logging.') + parser.add_argument('--downgrade', action='store_true', + help='If set, allows downgrading of apk.') + parser.add_argument('--timeout', type=int, + default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT, + help='Seconds to wait for APK installation. ' + '(default: %(default)s)') + + args = parser.parse_args() + + run_tests_helper.SetLogLevel(args.verbose) + constants.SetBuildType(args.build_type) + + devil_chromium.Initialize( + output_directory=constants.GetOutDirectory(), + adb_path=args.adb_path) + + apk = args.apk_path or args.apk_name + if not apk.endswith('.apk'): + apk += '.apk' + if not os.path.exists(apk): + apk = os.path.join(constants.GetOutDirectory(), 'apks', apk) + if not os.path.exists(apk): + parser.error('%s not found.' % apk) + + if args.splits: + splits = [] + base_apk_package = apk_helper.ApkHelper(apk).GetPackageName() + for split_glob in args.splits: + apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')] + if not apks: + logging.warning('No apks matched for %s.', split_glob) + for f in apks: + helper = apk_helper.ApkHelper(f) + if (helper.GetPackageName() == base_apk_package + and helper.GetSplitName()): + splits.append(f) + + blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + devices = device_utils.DeviceUtils.HealthyDevices(blacklist=blacklist, + device_arg=args.devices) + + def blacklisting_install(device): + try: + if args.splits: + device.InstallSplitApk(apk, splits, reinstall=args.keep_data, + allow_downgrade=args.downgrade) + else: + device.Install(apk, reinstall=args.keep_data, + allow_downgrade=args.downgrade, + timeout=args.timeout) + except device_errors.CommandFailedError: + logging.exception('Failed to install %s', args.apk_name) + if blacklist: + blacklist.Extend([str(device)], reason='install_failure') + logging.warning('Blacklisting %s', str(device)) + except device_errors.CommandTimeoutError: + logging.exception('Timed out while installing %s', args.apk_name) + if blacklist: + blacklist.Extend([str(device)], reason='install_timeout') + logging.warning('Blacklisting %s', str(device)) + + device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/adb_kill_android_webview_shell b/build/android/adb_kill_android_webview_shell new file mode 100644 index 00000000000..5f287f08266 --- /dev/null +++ b/build/android/adb_kill_android_webview_shell @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running android webview shell. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.android_webview.shell') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running android webview shell." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Android webview shell does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_blimp_client b/build/android/adb_kill_blimp_client new file mode 100644 index 00000000000..6221e45234a --- /dev/null +++ b/build/android/adb_kill_blimp_client @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running instance of Blimp. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.blimp') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running Blimp." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Blimp does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_chrome_public b/build/android/adb_kill_chrome_public new file mode 100644 index 00000000000..5b539a043d4 --- /dev/null +++ b/build/android/adb_kill_chrome_public @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running instance of ChromePublic. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.chrome') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running ChromePublic." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "ChromePublic does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_content_shell b/build/android/adb_kill_content_shell new file mode 100644 index 00000000000..e379dd47149 --- /dev/null +++ b/build/android/adb_kill_content_shell @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running content shell. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running Content shell." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Content shell does not appear to be running." + fi +fi diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py new file mode 100644 index 00000000000..d3cc67dbcc8 --- /dev/null +++ b/build/android/adb_logcat_monitor.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Saves logcats from all connected devices. + +Usage: adb_logcat_monitor.py [] + +This script will repeatedly poll adb for new devices and save logcats +inside the directory, which it attempts to create. The +script will run until killed by an external signal. To test, run the +script in a shell and -C it after a while. It should be +resilient across phone disconnects and reconnects and start the logcat +early enough to not miss anything. +""" + +import logging +import os +import re +import shutil +import signal +import subprocess +import sys +import time + +# Map from device_id -> (process, logcat_num) +devices = {} + + +class TimeoutException(Exception): + """Exception used to signal a timeout.""" + pass + + +class SigtermError(Exception): + """Exception used to catch a sigterm.""" + pass + + +def StartLogcatIfNecessary(device_id, adb_cmd, base_dir): + """Spawns a adb logcat process if one is not currently running.""" + process, logcat_num = devices[device_id] + if process: + if process.poll() is None: + # Logcat process is still happily running + return + else: + logging.info('Logcat for device %s has died', device_id) + error_filter = re.compile('- waiting for device -') + for line in process.stderr: + if not error_filter.match(line): + logging.error(device_id + ': ' + line) + + logging.info('Starting logcat %d for device %s', logcat_num, + device_id) + logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num) + logcat_file = open(os.path.join(base_dir, logcat_filename), 'w') + process = subprocess.Popen([adb_cmd, '-s', device_id, + 'logcat', '-v', 'threadtime'], + stdout=logcat_file, + stderr=subprocess.PIPE) + devices[device_id] = (process, logcat_num + 1) + + +def GetAttachedDevices(adb_cmd): + """Gets the device list from adb. + + We use an alarm in this function to avoid deadlocking from an external + dependency. + + Args: + adb_cmd: binary to run adb + + Returns: + list of devices or an empty list on timeout + """ + signal.alarm(2) + try: + out, err = subprocess.Popen([adb_cmd, 'devices'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate() + if err: + logging.warning('adb device error %s', err.strip()) + return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE) + except TimeoutException: + logging.warning('"adb devices" command timed out') + return [] + except (IOError, OSError): + logging.exception('Exception from "adb devices"') + return [] + finally: + signal.alarm(0) + + +def main(base_dir, adb_cmd='adb'): + """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill.""" + # We create the directory to ensure 'run once' semantics + if os.path.exists(base_dir): + print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir + shutil.rmtree(base_dir, ignore_errors=True) + + os.makedirs(base_dir) + logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'), + level=logging.INFO, + format='%(asctime)-2s %(levelname)-8s %(message)s') + + # Set up the alarm for calling 'adb devices'. This is to ensure + # our script doesn't get stuck waiting for a process response + def TimeoutHandler(_signum, _unused_frame): + raise TimeoutException() + signal.signal(signal.SIGALRM, TimeoutHandler) + + # Handle SIGTERMs to ensure clean shutdown + def SigtermHandler(_signum, _unused_frame): + raise SigtermError() + signal.signal(signal.SIGTERM, SigtermHandler) + + logging.info('Started with pid %d', os.getpid()) + pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + + try: + with open(pid_file_path, 'w') as f: + f.write(str(os.getpid())) + while True: + for device_id in GetAttachedDevices(adb_cmd): + if not device_id in devices: + subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c']) + devices[device_id] = (None, 0) + + for device in devices: + # This will spawn logcat watchers for any device ever detected + StartLogcatIfNecessary(device, adb_cmd, base_dir) + + time.sleep(5) + except SigtermError: + logging.info('Received SIGTERM, shutting down') + except: # pylint: disable=bare-except + logging.exception('Unexpected exception in main.') + finally: + for process, _ in devices.itervalues(): + if process: + try: + process.terminate() + except OSError: + pass + os.remove(pid_file_path) + + +if __name__ == '__main__': + if 2 <= len(sys.argv) <= 3: + print 'adb_logcat_monitor: Initializing' + sys.exit(main(*sys.argv[1:3])) + + print 'Usage: %s []' % sys.argv[0] diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py new file mode 100644 index 00000000000..a715170759d --- /dev/null +++ b/build/android/adb_logcat_printer.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Shutdown adb_logcat_monitor and print accumulated logs. + +To test, call './adb_logcat_printer.py ' where + contains 'adb logcat -v threadtime' files named as +logcat__ + +The script will print the files to out, and will combine multiple +logcats from a single device if there is overlap. + +Additionally, if a /LOGCAT_MONITOR_PID exists, the script +will attempt to terminate the contained PID by sending a SIGINT and +monitoring for the deletion of the aforementioned file. +""" +# pylint: disable=W0702 + +import cStringIO +import logging +import optparse +import os +import re +import signal +import sys +import time + + +# Set this to debug for more verbose output +LOG_LEVEL = logging.INFO + + +def CombineLogFiles(list_of_lists, logger): + """Splices together multiple logcats from the same device. + + Args: + list_of_lists: list of pairs (filename, list of timestamped lines) + logger: handler to log events + + Returns: + list of lines with duplicates removed + """ + cur_device_log = [''] + for cur_file, cur_file_lines in list_of_lists: + # Ignore files with just the logcat header + if len(cur_file_lines) < 2: + continue + common_index = 0 + # Skip this step if list just has empty string + if len(cur_device_log) > 1: + try: + line = cur_device_log[-1] + # Used to make sure we only splice on a timestamped line + if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line): + common_index = cur_file_lines.index(line) + else: + logger.warning('splice error - no timestamp in "%s"?', line.strip()) + except ValueError: + # The last line was valid but wasn't found in the next file + cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****'] + logger.info('Unable to splice %s. Incomplete logcat?', cur_file) + + cur_device_log += ['*'*30 + ' %s' % cur_file] + cur_device_log.extend(cur_file_lines[common_index:]) + + return cur_device_log + + +def FindLogFiles(base_dir): + """Search a directory for logcat files. + + Args: + base_dir: directory to search + + Returns: + Mapping of device_id to a sorted list of file paths for a given device + """ + logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$') + # list of tuples (, , ) + filtered_list = [] + for cur_file in os.listdir(base_dir): + matcher = logcat_filter.match(cur_file) + if matcher: + filtered_list += [(matcher.group(1), int(matcher.group(2)), + os.path.join(base_dir, cur_file))] + filtered_list.sort() + file_map = {} + for device_id, _, cur_file in filtered_list: + if device_id not in file_map: + file_map[device_id] = [] + + file_map[device_id] += [cur_file] + return file_map + + +def GetDeviceLogs(log_filenames, logger): + """Read log files, combine and format. + + Args: + log_filenames: mapping of device_id to sorted list of file paths + logger: logger handle for logging events + + Returns: + list of formatted device logs, one for each device. + """ + device_logs = [] + + for device, device_files in log_filenames.iteritems(): + logger.debug('%s: %s', device, str(device_files)) + device_file_lines = [] + for cur_file in device_files: + with open(cur_file) as f: + device_file_lines += [(cur_file, f.read().splitlines())] + combined_lines = CombineLogFiles(device_file_lines, logger) + # Prepend each line with a short unique ID so it's easy to see + # when the device changes. We don't use the start of the device + # ID because it can be the same among devices. Example lines: + # AB324: foo + # AB324: blah + device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)] + return device_logs + + +def ShutdownLogcatMonitor(base_dir, logger): + """Attempts to shutdown adb_logcat_monitor and blocks while waiting.""" + try: + monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + with open(monitor_pid_path) as f: + monitor_pid = int(f.readline()) + + logger.info('Sending SIGTERM to %d', monitor_pid) + os.kill(monitor_pid, signal.SIGTERM) + i = 0 + while True: + time.sleep(.2) + if not os.path.exists(monitor_pid_path): + return + if not os.path.exists('/proc/%d' % monitor_pid): + logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid) + return + logger.info('Waiting for logcat process to terminate.') + i += 1 + if i >= 10: + logger.warning('Monitor pid did not terminate. Continuing anyway.') + return + + except (ValueError, IOError, OSError): + logger.exception('Error signaling logcat monitor - continuing') + + +def main(argv): + parser = optparse.OptionParser(usage='Usage: %prog [options] ') + parser.add_option('--output-path', + help='Output file path (if unspecified, prints to stdout)') + options, args = parser.parse_args(argv) + if len(args) != 1: + parser.error('Wrong number of unparsed args') + base_dir = args[0] + + log_stringio = cStringIO.StringIO() + logger = logging.getLogger('LogcatPrinter') + logger.setLevel(LOG_LEVEL) + sh = logging.StreamHandler(log_stringio) + sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s' + ' %(message)s')) + logger.addHandler(sh) + + if options.output_path: + if not os.path.exists(os.path.dirname(options.output_path)): + logger.warning('Output dir %s doesn\'t exist. Creating it.', + os.path.dirname(options.output_path)) + os.makedirs(os.path.dirname(options.output_path)) + output_file = open(options.output_path, 'w') + logger.info('Dumping logcat to local file %s. If running in a build, ' + 'this file will likely will be uploaded to google storage ' + 'in a later step. It can be downloaded from there.', + options.output_path) + else: + output_file = sys.stdout + + try: + # Wait at least 5 seconds after base_dir is created before printing. + # + # The idea is that 'adb logcat > file' output consists of 2 phases: + # 1 Dump all the saved logs to the file + # 2 Stream log messages as they are generated + # + # We want to give enough time for phase 1 to complete. There's no + # good method to tell how long to wait, but it usually only takes a + # second. On most bots, this code path won't occur at all, since + # adb_logcat_monitor.py command will have spawned more than 5 seconds + # prior to called this shell script. + try: + sleep_time = 5 - (time.time() - os.path.getctime(base_dir)) + except OSError: + sleep_time = 5 + if sleep_time > 0: + logger.warning('Monitor just started? Sleeping %.1fs', sleep_time) + time.sleep(sleep_time) + + assert os.path.exists(base_dir), '%s does not exist' % base_dir + ShutdownLogcatMonitor(base_dir, logger) + separator = '\n' + '*' * 80 + '\n\n' + for log in GetDeviceLogs(FindLogFiles(base_dir), logger): + output_file.write(log) + output_file.write(separator) + with open(os.path.join(base_dir, 'eventlog')) as f: + output_file.write('\nLogcat Monitor Event Log\n') + output_file.write(f.read()) + except: + logger.exception('Unexpected exception') + + logger.info('Done.') + sh.flush() + output_file.write('\nLogcat Printer Event Log\n') + output_file.write(log_stringio.getvalue()) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome new file mode 100644 index 00000000000..d3244ffdf60 --- /dev/null +++ b/build/android/adb_profile_chrome @@ -0,0 +1,9 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Start / stop profiling in chrome. +CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult +exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@" diff --git a/build/android/adb_profile_chrome_startup b/build/android/adb_profile_chrome_startup new file mode 100644 index 00000000000..d5836cdf702 --- /dev/null +++ b/build/android/adb_profile_chrome_startup @@ -0,0 +1,9 @@ +#!/bin/bash +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Start / stop profiling for chrome startup. +CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult +exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@" diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py new file mode 100644 index 00000000000..b0a8dc357fb --- /dev/null +++ b/build/android/adb_reverse_forwarder.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Command line tool for forwarding ports from a device to the host. + +Allows an Android device to connect to services running on the host machine, +i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder| +to be built. +""" + +import optparse +import sys +import time + +import devil_chromium + +from devil.android import device_blacklist +from devil.android import device_utils +from devil.android import forwarder +from devil.utils import run_tests_helper + +from pylib import constants + + +def main(argv): + parser = optparse.OptionParser(usage='Usage: %prog [options] device_port ' + 'host_port [device_port_2 host_port_2] ...', + description=__doc__) + parser.add_option('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + parser.add_option('--device', + help='Serial number of device we should use.') + parser.add_option('--blacklist-file', help='Device blacklist JSON file.') + parser.add_option('--debug', action='store_const', const='Debug', + dest='build_type', default='Release', + help='Use Debug build of host tools instead of Release.') + + options, args = parser.parse_args(argv) + run_tests_helper.SetLogLevel(options.verbose_count) + + devil_chromium.Initialize() + + if len(args) < 2 or not len(args) % 2: + parser.error('Need even number of port pairs') + sys.exit(1) + + try: + port_pairs = [int(a) for a in args[1:]] + port_pairs = zip(port_pairs[::2], port_pairs[1::2]) + except ValueError: + parser.error('Bad port number') + sys.exit(1) + + blacklist = (device_blacklist.Blacklist(options.blacklist_file) + if options.blacklist_file + else None) + device = device_utils.DeviceUtils.HealthyDevices( + blacklist=blacklist, device_arg=options.device)[0] + constants.SetBuildType(options.build_type) + try: + forwarder.Forwarder.Map(port_pairs, device) + while True: + time.sleep(60) + except KeyboardInterrupt: + sys.exit(0) + finally: + forwarder.Forwarder.UnmapAllDevicePorts(device) + +if __name__ == '__main__': + main(sys.argv) diff --git a/build/android/adb_run_android_webview_shell b/build/android/adb_run_android_webview_shell new file mode 100644 index 00000000000..1014a731f47 --- /dev/null +++ b/build/android/adb_run_android_webview_shell @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.android_webview.shell/.AwShellActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_blimp_client b/build/android/adb_run_blimp_client new file mode 100644 index 00000000000..4b3b4a888b0 --- /dev/null +++ b/build/android/adb_run_blimp_client @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.blimp/org.chromium.blimp.BlimpRendererActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_chrome_public b/build/android/adb_run_chrome_public new file mode 100644 index 00000000000..bf150711442 --- /dev/null +++ b/build/android/adb_run_chrome_public @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.chrome/com.google.android.apps.chrome.Main \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_content_shell b/build/android/adb_run_content_shell new file mode 100644 index 00000000000..3f01f3bf02f --- /dev/null +++ b/build/android/adb_run_content_shell @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.content_shell_apk/.ContentShellActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_mojo_shell b/build/android/adb_run_mojo_shell new file mode 100644 index 00000000000..b585e4a71f6 --- /dev/null +++ b/build/android/adb_run_mojo_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 +parameters=$2 + +adb logcat -c +adb shell am start -S \ + -a android.intent.action.VIEW \ + -n org.chromium.mojo_shell_apk/.MojoShellActivity \ + ${parameters:+--esa parameters "$parameters"} \ + ${optional_url:+-d "$optional_url"} +adb logcat -s MojoShellApplication MojoShellActivity chromium diff --git a/build/android/adb_run_system_webview_shell b/build/android/adb_run_system_webview_shell new file mode 100644 index 00000000000..5d0c0e48e8f --- /dev/null +++ b/build/android/adb_run_system_webview_shell @@ -0,0 +1,15 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Runs a 'mini-browser' using System WebView with an optional url as parameter. +# SystemWebViewShell.apk should be installed for this to work. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.webview_shell/.WebViewBrowserActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_system_webview_command_line b/build/android/adb_system_webview_command_line new file mode 100644 index 00000000000..376b0b3d233 --- /dev/null +++ b/build/android/adb_system_webview_command_line @@ -0,0 +1,17 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_system_webview_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_android_webview_command_line "" + +exec $(dirname $0)/adb_command_line.py --device-path \ + /data/local/tmp/webview-command-line "$@" diff --git a/build/android/android.isolate b/build/android/android.isolate new file mode 100644 index 00000000000..dfedc6f9383 --- /dev/null +++ b/build/android/android.isolate @@ -0,0 +1,29 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'variables': { + 'files': [ + '../../build/util/lib/common/', + '../../third_party/android_tools/sdk/build-tools/', + '../../third_party/android_tools/sdk/platform-tools/', + '../../third_party/appurify-python/', + '../../third_party/catapult/', + '../../third_party/requests/', + '../../tools/swarming_client/', + '<(PRODUCT_DIR)/icudtl.dat', + '<(PRODUCT_DIR)/lib.java/chromium_commands.dex.jar', + '<(PRODUCT_DIR)/host_forwarder', + '<(PRODUCT_DIR)/forwarder_dist/', + '<(PRODUCT_DIR)/md5sum_bin_host', + '<(PRODUCT_DIR)/md5sum_dist/', + 'devil_chromium.json', + 'devil_chromium.py', + 'gyp/util/', + 'incremental_install/', + 'lighttpd_server.py', + 'pylib/', + 'test_runner.py', + ] + } +} diff --git a/build/android/android_lint_cache.gyp b/build/android/android_lint_cache.gyp new file mode 100644 index 00000000000..72b9e9e0099 --- /dev/null +++ b/build/android/android_lint_cache.gyp @@ -0,0 +1,51 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + # This target runs a functionally empty lint to create or update the + # API versions cache if necessary. This prevents racy creation of the + # cache while linting java targets in lint_action.gypi. + 'target_name': 'android_lint_cache', + 'type': 'none', + 'actions': [ + { + 'action_name': 'prepare_android_lint_cache', + 'message': 'Preparing Android lint cache', + 'variables': { + 'android_lint_cache_stamp': '<(PRODUCT_DIR)/android_lint_cache/android_lint_cache.stamp', + 'android_manifest_path': '<(DEPTH)/build/android/AndroidManifest.xml', + 'result_path': '<(PRODUCT_DIR)/android_lint_cache/result.xml', + 'platform_xml_path': '<(android_sdk_root)/platform-tools/api/api-versions.xml', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/lint.py', + '<(android_manifest_path)', + '<(platform_xml_path)', + ], + 'outputs': [ + '<(android_lint_cache_stamp)', + '<(result_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/lint.py', + '--lint-path', '<(android_sdk_root)/tools/lint', + '--cache-dir', '<(PRODUCT_DIR)/android_lint_cache', + '--android-sdk-version=<(android_sdk_version)', + '--platform-xml-path', '<(platform_xml_path)', + '--manifest-path', '<(android_manifest_path)', + '--product-dir', '<(PRODUCT_DIR)', + '--result-path', '<(result_path)', + '--stamp', '<(android_lint_cache_stamp)', + '--create-cache', + '--silent', + '--enable' + ], + }, + ], + }, + ], +} diff --git a/build/android/android_no_jni_exports.lst b/build/android/android_no_jni_exports.lst new file mode 100644 index 00000000000..ffc6cf7028c --- /dev/null +++ b/build/android/android_no_jni_exports.lst @@ -0,0 +1,17 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script makes all JNI exported symbols local, to prevent the JVM from +# being able to find them, enforcing use of manual JNI function registration. +# This is used for all Android binaries by default, unless they explicitly state +# that they want JNI exported symbols to remain visible, as we need to ensure +# the manual registration path is correct to maintain compatibility with the +# crazy linker. +# Check ld version script manual: +# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION + +{ + local: + Java_*; +}; diff --git a/build/android/ant/apk-package.xml b/build/android/ant/apk-package.xml new file mode 100644 index 00000000000..cb795609181 --- /dev/null +++ b/build/android/ant/apk-package.xml @@ -0,0 +1,125 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/build/android/ant/chromium-debug.keystore b/build/android/ant/chromium-debug.keystore new file mode 100644 index 00000000000..67eb0aa34c5 Binary files /dev/null and b/build/android/ant/chromium-debug.keystore differ diff --git a/build/android/ant/empty/res/.keep b/build/android/ant/empty/res/.keep new file mode 100644 index 00000000000..1fd038b8cf3 --- /dev/null +++ b/build/android/ant/empty/res/.keep @@ -0,0 +1,2 @@ +# This empty res folder can be passed to aapt while building Java libraries or +# APKs that don't have any resources. diff --git a/build/android/apkbuilder_action.gypi b/build/android/apkbuilder_action.gypi new file mode 100644 index 00000000000..e073e9bdbec --- /dev/null +++ b/build/android/apkbuilder_action.gypi @@ -0,0 +1,84 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is a helper to java_apk.gypi. It should be used to create an +# action that runs ApkBuilder via ANT. +# +# Required variables: +# apk_name - File name (minus path & extension) of the output apk. +# apk_path - Path to output apk. +# package_input_paths - Late-evaluated list of resource zips. +# native_libs_dir - Path to lib/ directory to use. Set to an empty directory +# if no native libs are needed. +# Optional variables: +# has_code - Whether to include classes.dex in the apk. +# dex_path - Path to classes.dex. Used only when has_code=1. +# extra_inputs - List of extra action inputs. +{ + 'variables': { + 'variables': { + 'has_code%': 1, + }, + 'conditions': [ + ['has_code == 0', { + 'has_code_str': 'false', + }, { + 'has_code_str': 'true', + }], + ], + 'has_code%': '<(has_code)', + 'extra_inputs%': [], + # Write the inputs list to a file, so that its mtime is updated when + # the list of inputs changes. + 'inputs_list_file': '>|(apk_package.<(_target_name).<(apk_name).gypcmd >@(package_input_paths))', + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + }, + 'action_name': 'apkbuilder_<(apk_name)', + 'message': 'Packaging <(apk_name)', + 'inputs': [ + '<(DEPTH)/build/android/ant/apk-package.xml', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/ant.py', + '<(resource_packaged_apk_path)', + '<@(extra_inputs)', + '>@(package_input_paths)', + '>(inputs_list_file)', + ], + 'outputs': [ + '<(apk_path)', + ], + 'conditions': [ + ['has_code == 1', { + 'inputs': ['<(dex_path)'], + 'action': [ + '-DDEX_FILE_PATH=<(dex_path)', + ] + }], + ['enable_multidex == 1', { + 'action': [ + '-DMULTIDEX_ENABLED=1', + ] + }] + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/ant.py', + '--', + '-quiet', + '-DHAS_CODE=<(has_code_str)', + '-DANDROID_SDK_ROOT=<(android_sdk_root)', + '-DANDROID_SDK_TOOLS=<(android_sdk_tools)', + '-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)', + '-DNATIVE_LIBS_DIR=<(native_libs_dir)', + '-DAPK_NAME=<(apk_name)', + '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)', + '-DOUT_DIR=<(intermediate_dir)', + '-DUNSIGNED_APK_PATH=<(apk_path)', + '-DEMMA_INSTRUMENT=<(emma_instrument)', + '-DEMMA_DEVICE_JAR=<(emma_device_jar)', + '-Dbasedir=.', + '-buildfile', + '<(DEPTH)/build/android/ant/apk-package.xml', + ] +} diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py new file mode 100644 index 00000000000..d709f7e2217 --- /dev/null +++ b/build/android/asan_symbolize.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import optparse +import os +import re +import sys + +from pylib import constants +from pylib.constants import host_paths + +# Uses symbol.py from third_party/android_platform, not python's. +with host_paths.SysPath( + host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH, + position=0): + import symbol + + +_RE_ASAN = re.compile(r'(.*?)(#\S*?)\s+(\S*?)\s+\((.*?)\+(.*?)\)') + +def _ParseAsanLogLine(line): + m = re.match(_RE_ASAN, line) + if not m: + return None + return { + 'prefix': m.group(1), + 'library': m.group(4), + 'pos': m.group(2), + 'rel_address': '%08x' % int(m.group(5), 16), + } + + +def _FindASanLibraries(): + asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'llvm-build', + 'Release+Asserts', 'lib') + asan_libs = [] + for src_dir, _, files in os.walk(asan_lib_dir): + asan_libs += [os.path.relpath(os.path.join(src_dir, f)) + for f in files + if f.endswith('.so')] + return asan_libs + + +def _TranslateLibPath(library, asan_libs): + for asan_lib in asan_libs: + if os.path.basename(library) == os.path.basename(asan_lib): + return '/' + asan_lib + # pylint: disable=no-member + return symbol.TranslateLibPath(library) + + +def _Symbolize(asan_input): + asan_libs = _FindASanLibraries() + libraries = collections.defaultdict(list) + asan_lines = [] + for asan_log_line in [a.rstrip() for a in asan_input]: + m = _ParseAsanLogLine(asan_log_line) + if m: + libraries[m['library']].append(m) + asan_lines.append({'raw_log': asan_log_line, 'parsed': m}) + + all_symbols = collections.defaultdict(dict) + for library, items in libraries.iteritems(): + libname = _TranslateLibPath(library, asan_libs) + lib_relative_addrs = set([i['rel_address'] for i in items]) + # pylint: disable=no-member + info_dict = symbol.SymbolInformationForSet(libname, + lib_relative_addrs, + True) + if info_dict: + all_symbols[library]['symbols'] = info_dict + + for asan_log_line in asan_lines: + m = asan_log_line['parsed'] + if not m: + print asan_log_line['raw_log'] + continue + if (m['library'] in all_symbols and + m['rel_address'] in all_symbols[m['library']]['symbols']): + s = all_symbols[m['library']]['symbols'][m['rel_address']][0] + print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1]) + else: + print asan_log_line['raw_log'] + + +def main(): + parser = optparse.OptionParser() + parser.add_option('-l', '--logcat', + help='File containing adb logcat output with ASan stacks. ' + 'Use stdin if not specified.') + parser.add_option('--output-directory', + help='Path to the root build directory.') + options, _ = parser.parse_args() + + if options.output_directory: + constants.SetOutputDirectory(options.output_directory) + # Do an up-front test that the output directory is known. + constants.CheckOutputDirectory() + + if options.logcat: + asan_input = file(options.logcat, 'r') + else: + asan_input = sys.stdin + _Symbolize(asan_input.readlines()) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/build/android/avd.py b/build/android/avd.py new file mode 100644 index 00000000000..788ceaf053a --- /dev/null +++ b/build/android/avd.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Launches Android Virtual Devices with a set configuration for testing Chrome. + +The script will launch a specified number of Android Virtual Devices (AVD's). +""" + +import argparse +import logging +import os +import re +import sys + +import devil_chromium +import install_emulator_deps + +from devil.utils import cmd_helper +from pylib import constants +from pylib.utils import emulator + +def main(argv): + # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch + # the emulator to find the system images upon launch. + emulator_sdk = constants.ANDROID_SDK_ROOT + os.environ['ANDROID_SDK_ROOT'] = emulator_sdk + + arg_parser = argparse.ArgumentParser(description='AVD script.') + sub_parsers = arg_parser.add_subparsers(title='subparser', dest='command') + sub_parsers.add_parser( + 'kill', help='Shutdown all existing emulators') + sub_parsers.add_parser( + 'delete', help='Deleting all the avd files') + wait_parser = sub_parsers.add_parser( + 'wait', help='Wait for emulators to finish booting') + wait_parser.add_argument('-n', '--num', dest='wait_num', + help='Number of emulators to wait for', type=int, + default=1) + run_parser = sub_parsers.add_parser('run', help='Run emulators') + run_parser.add_argument('--name', help='Optinaly, name of existing AVD to ' + 'launch. If not specified, AVD\'s will be created') + run_parser.add_argument('-n', '--num', dest='emulator_count', + help='Number of emulators to launch (default is 1).', + type=int, default='1') + run_parser.add_argument('--abi', default='x86', + help='Platform of emulators to launch (x86 default)') + run_parser.add_argument('--api-level', dest='api_level', + help='API level for the image', + type=int, default=constants.ANDROID_SDK_VERSION) + run_parser.add_argument('--sdcard-size', dest='sdcard_size', + default=emulator.DEFAULT_SDCARD_SIZE, + help='Set sdcard size of the emulators' + ' e.g. --sdcard-size=512M') + run_parser.add_argument('--partition-size', dest='partition_size', + default=emulator.DEFAULT_STORAGE_SIZE, + help='Default internal storage size' + ' e.g. --partition-size=1024M') + run_parser.add_argument('--launch-without-kill', action='store_false', + dest='kill_and_launch', default=True, + help='Kill all emulators at launch') + run_parser.add_argument('--enable-kvm', action='store_true', + dest='enable_kvm', default=False, + help='Enable kvm for faster x86 emulator run') + run_parser.add_argument('--headless', action='store_true', + dest='headless', default=False, + help='Launch an emulator with no UI.') + + arguments = arg_parser.parse_args(argv[1:]) + + logging.root.setLevel(logging.INFO) + + devil_chromium.Initialize() + + if arguments.command == 'kill': + logging.info('Killing all existing emulator and existing the program') + emulator.KillAllEmulators() + elif arguments.command == 'delete': + emulator.DeleteAllTempAVDs() + elif arguments.command == 'wait': + emulator.WaitForEmulatorLaunch(arguments.wait_num) + else: + # Check if SDK exist in ANDROID_SDK_ROOT + if not install_emulator_deps.CheckSDK(): + raise Exception('Emulator SDK not installed in %s' + % constants.ANDROID_SDK_ROOT) + + # Check if KVM is enabled for x86 AVD + if arguments.abi == 'x86': + if not install_emulator_deps.CheckKVM(): + logging.warning('KVM is not installed or enabled') + arguments.enable_kvm = False + + # Check if targeted system image exist + if not install_emulator_deps.CheckSystemImage(arguments.abi, + arguments.api_level): + logging.critical('ERROR: System image for %s AVD not installed. Run ' + 'install_emulator_deps.py', arguments.abi) + return 1 + + # If AVD is specified, check that the SDK has the required target. If not, + # check that the SDK has the desired target for the temporary AVD's. + api_level = arguments.api_level + if arguments.name: + android = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', + 'android') + avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd']) + names = re.findall(r'Name: (\w+)', avds_output) + api_levels = re.findall(r'API level (\d+)', avds_output) + try: + avd_index = names.index(arguments.name) + except ValueError: + logging.critical('ERROR: Specified AVD %s does not exist.', + arguments.name) + return 1 + api_level = int(api_levels[avd_index]) + + if not install_emulator_deps.CheckSDKPlatform(api_level): + logging.critical('ERROR: Emulator SDK missing required target for API %d.' + ' Run install_emulator_deps.py.') + return 1 + + if arguments.name: + emulator.LaunchEmulator( + arguments.name, + arguments.abi, + enable_kvm=arguments.enable_kvm, + kill_and_launch=arguments.reset_and_launch, + sdcard_size=arguments.sdcard_size, + storage_size=arguments.partition_size, + headless=arguments.headless + ) + else: + emulator.LaunchTempEmulators( + arguments.emulator_count, + arguments.abi, + arguments.api_level, + enable_kvm=arguments.enable_kvm, + kill_and_launch=arguments.kill_and_launch, + sdcard_size=arguments.sdcard_size, + storage_size=arguments.partition_size, + wait_for_boot=True, + headless=arguments.headless + ) + logging.info('Emulator launch completed') + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/copy_ex.gypi b/build/android/copy_ex.gypi new file mode 100644 index 00000000000..8c49d247c34 --- /dev/null +++ b/build/android/copy_ex.gypi @@ -0,0 +1,79 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Copy files to a directory with the option to clear directory first. +# +# Variables: +# dest_path - directory to copy files to. +# src_files - optional, a list of files to copy without changing name. +# clear - optional, if set, clear directory before copying files. +# renaming_sources - optional, a list of files to copy and rename. +# renaming_destinations - optional, a list of new file names corresponding to +# renaming_sources. +# +# Exmaple +# { +# 'target_name': 'copy_assets', +# 'type': 'none', +# 'variables': { +# 'dest_path': 'apk/assets/path', +# 'src_files': ['path1/fr.pak'], +# 'clear': 1, +# # path2/old1 and path3/old2 will be copied to apk/assets/path and +# # renamed to new1, new2 respectly. +# 'renaming_sources': ['path2/old1', 'path3/old2'], +# 'renaming_destinations': ['new1', 'new2'], +# }, +# 'includes': [ '../build/android/copy_ex.gypi' ], +# }, +# +{ + 'variables': { + 'clear%': 0, + 'src_files%': [], + 'renaming_sources%': [], + 'renaming_destinations%': [], + }, + 'actions': [{ + 'action_name': '<(_target_name)_copy_ex', + 'variables': { + 'additional_args':[], + 'local_inputs': [], + 'dest_files': [], + 'conditions': [ + ['clear == 1', { + 'additional_args': ['--clear'], + }], + ['src_files != []', { + 'additional_args': ['--files', '<(src_files)'], + 'local_inputs': ['<@(src_files)'], + # src_files will be used to generate destination files path for + # outputs. + 'dest_files': ['<@(src_files)'], + }], + ['renaming_sources != []', { + 'additional_args': [ + '--renaming-sources', '<(renaming_sources)', + '--renaming-destinations', '<(renaming_destinations)' + ], + 'local_inputs': ['<@(renaming_sources)'], + 'dest_files': ['<@(renaming_destinations)'], + }], + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/copy_ex.py', + '<(DEPTH)/build/android/gyp/generate_copy_ex_outputs.py', + '<@(local_inputs)', + ], + 'outputs': [ + '@(inputs)', + ], + 'outputs': [ + '<(output_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py', + '--libraries-top-dir=<(libraries_top_dir)', + '--input-apk-path=<(input_apk_path)', + '--output-apk-path=<(output_apk_path)', + ], +} diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi new file mode 100644 index 00000000000..79c201deccb --- /dev/null +++ b/build/android/developer_recommended_flags.gypi @@ -0,0 +1,61 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is the set of recommended gyp variable settings for Chrome for Android development. +# +# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi. +# +# Even better, create chrome/supplement.gypi containing the following: +# { +# 'includes': [ '../build/android/developer_recommended_flags.gypi' ] +# } +# and you'll get new settings automatically. +# When using this method, you can override individual settings by setting them unconditionally (with +# no %) in chrome/supplement.gypi. +# I.e. to disable gyp_managed_install but use everything else: +# { +# 'variables': { +# 'gyp_managed_install': 0, +# }, +# 'includes': [ '../build/android/developer_recommended_flags.gypi' ] +# } + +{ + 'variables': { + 'variables': { + # Set component to 'shared_library' to enable the component build. This builds native code as + # many small shared libraries instead of one monolithic library. This slightly reduces the time + # required for incremental builds. + 'component%': 'shared_library', + }, + 'component%': '<(component)', + + # When gyp_managed_install is set to 1, building an APK will install that APK on the connected + # device(/emulator). To install on multiple devices (or onto a new device), build the APK once + # with each device attached. This greatly reduces the time required for incremental builds. + # + # This comes with some caveats: + # Only works with a single device connected (it will print a warning if + # zero or multiple devices are attached). + # Device must be flashed with a user-debug unsigned Android build. + # Some actions are always run (i.e. ninja will never say "no work to do"). + 'gyp_managed_install%': 1, + + # With gyp_managed_install, we do not necessarily need a standalone APK. + # When create_standalone_apk is set to 1, we will build a standalone APK + # anyway. For even faster builds, you can set create_standalone_apk to 0. + 'create_standalone_apk%': 1, + + # Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error + # messages than gcc. + # TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can + # set clang to 1, but Android stack traces will sometimes be incomplete. + #'clang%': 1, + + # Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking + # time. The downside is that stack traces will be missing useful information (like line + # numbers). + #'fastbuild%': 1, + }, +} diff --git a/build/android/devil_chromium.json b/build/android/devil_chromium.json new file mode 100644 index 00000000000..c1157fae9ef --- /dev/null +++ b/build/android/devil_chromium.json @@ -0,0 +1,69 @@ +{ + "config_type": "BaseConfig", + "dependencies": { + "aapt": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_tools/sdk/build-tools/23.0.1/aapt" + ] + } + } + }, + "adb": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_tools/sdk/platform-tools/adb" + ] + } + } + }, + "android_sdk": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_tools/sdk" + ] + } + } + }, + "dexdump": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_tools/sdk/build-tools/23.0.1/dexdump" + ] + } + } + }, + "split-select": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_tools/sdk/build-tools/23.0.1/split-select" + ] + } + } + }, + "pymock": { + "file_info": { + "darwin_x86_64": { + "local_paths": [ + "../../third_party/pymock" + ] + }, + "linux2_x86_64": { + "local_paths": [ + "../../third_party/pymock" + ] + }, + "win32_AMD64": { + "local_paths": [ + "../../third_party/pymock" + ] + } + } + } + } +} diff --git a/build/android/devil_chromium.py b/build/android/devil_chromium.py new file mode 100644 index 00000000000..0e085d8abdb --- /dev/null +++ b/build/android/devil_chromium.py @@ -0,0 +1,163 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Configures devil for use in chromium.""" + +import os +import sys + +from pylib.constants import host_paths + +if host_paths.DEVIL_PATH not in sys.path: + sys.path.append(host_paths.DEVIL_PATH) + +from devil import devil_env + +_DEVIL_CONFIG = os.path.abspath( + os.path.join(os.path.dirname(__file__), 'devil_chromium.json')) + +_DEVIL_BUILD_PRODUCT_DEPS = { + 'forwarder_device': [ + { + 'platform': 'android', + 'arch': 'armeabi-v7a', + 'name': 'forwarder_dist', + }, + { + 'platform': 'android', + 'arch': 'arm64-v8a', + 'name': 'forwarder_dist', + }, + { + 'platform': 'android', + 'arch': 'mips', + 'name': 'forwarder_dist', + }, + { + 'platform': 'android', + 'arch': 'mips64', + 'name': 'forwarder_dist', + }, + { + 'platform': 'android', + 'arch': 'x86', + 'name': 'forwarder_dist', + }, + { + 'platform': 'android', + 'arch': 'x86_64', + 'name': 'forwarder_dist', + }, + ], + 'forwarder_host': [ + { + 'platform': 'linux2', + 'arch': 'x86_64', + 'name': 'host_forwarder', + }, + ], + 'md5sum_device': [ + { + 'platform': 'android', + 'arch': 'armeabi-v7a', + 'name': 'md5sum_dist', + }, + { + 'platform': 'android', + 'arch': 'arm64-v8a', + 'name': 'md5sum_dist', + }, + { + 'platform': 'android', + 'arch': 'mips', + 'name': 'md5sum_dist', + }, + { + 'platform': 'android', + 'arch': 'mips64', + 'name': 'md5sum_dist', + }, + { + 'platform': 'android', + 'arch': 'x86', + 'name': 'md5sum_dist', + }, + { + 'platform': 'android', + 'arch': 'x86_64', + 'name': 'md5sum_dist', + }, + ], + 'md5sum_host': [ + { + 'platform': 'linux2', + 'arch': 'x86_64', + 'name': 'md5sum_bin_host', + }, + ], +} + + +def Initialize(output_directory=None, custom_deps=None, adb_path=None): + """Initializes devil with chromium's binaries and third-party libraries. + + This includes: + - Libraries: + - the android SDK ("android_sdk") + - pymock ("pymock") + - Build products: + - host & device forwarder binaries + ("forwarder_device" and "forwarder_host") + - host & device md5sum binaries ("md5sum_device" and "md5sum_host") + + Args: + output_directory: An optional path to the output directory. If not set, + no built dependencies are configured. + custom_deps: An optional dictionary specifying custom dependencies. + This should be of the form: + + { + 'dependency_name': { + 'platform': 'path', + ... + }, + ... + } + """ + + devil_dynamic_config = { + 'config_type': 'BaseConfig', + 'dependencies': {}, + } + if output_directory: + output_directory = os.path.abspath(output_directory) + devil_dynamic_config['dependencies'] = { + dep_name: { + 'file_info': { + '%s_%s' % (dep_config['platform'], dep_config['arch']): { + 'local_paths': [ + os.path.join(output_directory, dep_config['name']), + ], + } + for dep_config in dep_configs + } + } + for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems() + } + if custom_deps: + devil_dynamic_config['dependencies'].update(custom_deps) + if adb_path: + devil_dynamic_config['dependencies'].update({ + 'adb': { + 'file_info': { + devil_env.GetPlatform(): { + 'local_paths': [adb_path] + } + } + } + }) + + devil_env.config.Initialize( + configs=[devil_dynamic_config], config_files=[_DEVIL_CONFIG]) + diff --git a/build/android/dex_action.gypi b/build/android/dex_action.gypi new file mode 100644 index 00000000000..7d9638e035c --- /dev/null +++ b/build/android/dex_action.gypi @@ -0,0 +1,63 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that dexes +# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME == +# "Release", then it will dex the proguard_enabled_input_path instead of the +# normal dex_input_paths/dex_generated_input_paths. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'some name for the action' +# 'actions': [ +# 'variables': { +# 'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ], +# 'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ], +# +# # For targets that use proguard: +# 'proguard_enabled': 'true', +# 'proguard_enabled_input_path': 'path to dex when using proguard', +# }, +# 'includes': [ 'relative/path/to/dex_action.gypi' ], +# ], +# }, +# + +{ + 'message': 'Creating dex file: <(output_path)', + 'variables': { + 'dex_input_paths': [], + 'dex_generated_input_dirs': [], + 'proguard_enabled%': 'false', + # TODO(jbudorick): remove this once multidex is done. + 'debug_build_proguard_enabled%': 'false', + 'proguard_enabled_input_path%': '', + 'dex_no_locals%': 0, + 'dex_additional_options': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/util/md5_check.py', + '<(DEPTH)/build/android/gyp/dex.py', + '>@(dex_input_paths)', + ], + 'outputs': [ + '<(output_path)', + '<(output_path).inputs', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/dex.py', + '--dex-path=<(output_path)', + '--android-sdk-tools=<(android_sdk_tools)', + '--output-directory=<(PRODUCT_DIR)', + '--configuration-name=<(CONFIGURATION_NAME)', + '--proguard-enabled=>(proguard_enabled)', + '--debug-build-proguard-enabled=>(debug_build_proguard_enabled)', + '--proguard-enabled-input-path=<(proguard_enabled_input_path)', + '--no-locals=>(dex_no_locals)', + '>@(dex_additional_options)', + '>@(dex_input_paths)', + '>@(dex_generated_input_dirs)', + ] +} diff --git a/build/android/disable_gcc_lto.gypi b/build/android/disable_gcc_lto.gypi new file mode 100644 index 00000000000..a733c7aa0fc --- /dev/null +++ b/build/android/disable_gcc_lto.gypi @@ -0,0 +1,20 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included to disable GCC LTO on a target. + +{ + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['OS=="android" and clang==0 and (use_lto==1 or use_lto_o2==1)', { + 'cflags!': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ], + }], + ], +} diff --git a/build/android/docs/coverage.md b/build/android/docs/coverage.md new file mode 100644 index 00000000000..0a179876c65 --- /dev/null +++ b/build/android/docs/coverage.md @@ -0,0 +1,32 @@ +# Android code coverage instructions + +This is instruction for code coverage for android instrumentation and junit tests. + +[TOC] + +## How EMMA coverage works + +In order to use EMMA code coverage, we need to create build time **.em** file and runtime +**.ec** file. Then we need to process them using the +build/android/generate_emma_html.py script. + +## How to collect EMMA coverage data + +1. Build your APK with the GN arg emma_coverage=true. + ``` + gn args out-gn/Debug + > target_os = "android" + > emma_coverage = true + ``` + By doing so, **.em** files will be created in out-gn/Debug. +2. Run tests, with option `--coverage-dir `, to specify where to save + the .ec file. For example, you can run chrome junit tests: + `out-gn/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`. +3. Now we have both .em and .ec files. We can merge them and create a html file, + using generate_emma_html.py. For example, generate_emma_html.py can be called + this way: + `build/android/generate_emma_html.py --coverage-dir /tmp/coverage/ + --metadata-dir out-gn/Debug/ --output example.html`. + Then an example.html containing coverage info will be created: + `EMMA: writing [html] report to + [/example.html] …` diff --git a/build/android/docs/lint.md b/build/android/docs/lint.md new file mode 100644 index 00000000000..37f35502e5f --- /dev/null +++ b/build/android/docs/lint.md @@ -0,0 +1,91 @@ +# Lint + +Android's [**lint**](http://developer.android.com/tools/help/lint.html) is a static +analysis tool that Chromium uses to catch possible issues in Java code. + +[TOC] + +## How Chromium uses lint + +Chromium runs lint on a per-target basis for all targets using any of the +following templates if they are marked as Chromium code (i.e., +`chromium_code = true`): + + - `android_apk` + - `android_library` + - `instrumentation_test_apk` + - `unittest_apk` + +Chromium also runs lint on a per-target basis for all targets using any of the +following templates if they are marked as Chromium code and they support +Android (i.e., `supports_android = true`): + + - `java_library` + +This is implemented in the +[`android_lint`](https://code.google.com/p/chromium/codesearch#chromium/src/build/config/android/internal_rules.gni&q=android_lint%20file:internal_rules%5C.gni) +gn template. + +## My code has a lint error + +If lint reports an issue in your code, there are several possible remedies. +In descending order of preference: + +### Fix it + +While this isn't always the right response, fixing the lint error or warning +should be the default. + +### Suppress it in code + +Android provides an annotation, +[`@SuppressLint`](http://developer.android.com/reference/android/annotation/SuppressLint.html), +that tells lint to ignore the annotated element. It can be used on classes, +constructors, methods, parameters, fields, or local variables, though usage +in Chromium is typically limited to the first three. + +Like many suppression annotations, `@SuppressLint` takes a value that tells **lint** +what to ignore. It can be a single `String`: + +```java +@SuppressLint("NewApi") +public void foo() { + a.methodThatRequiresHighSdkLevel(); +} +``` + +It can also be a list of `String`s: + +```java +@SuppressLint({ + "NewApi", + "UseSparseArrays" + }) +public Map bar() { + Map shouldBeASparseArray = new HashMap(); + another.methodThatRequiresHighSdkLevel(shouldBeASparseArray); + return shouldBeASparseArray; +} +``` + +This is the preferred way of suppressing warnings in a limited scope. + +### Suppress it in the suppressions XML file + +**lint** can be given an XML configuration containing warnings or errors that +should be ignored. Chromium's lint suppression XML file can be found in +[`build/android/lint/suppressions.xml`](https://chromium.googlesource.com/chromium/src/+/master/build/android/lint/suppressions.xml). +It can be updated to suppress current warnings by running: + +```bash +$ python build/android/lint/suppress.py +``` + +e.g., to suppress lint errors found in `media_java`: + +```bash +$ python build/android/lint/suppress.py out/Debug/gen/media/base/android/media_java__lint/result.xml +``` + +**This mechanism should only be used for disabling warnings across the entire code base; class-specific lint warnings should be disabled inline.** + diff --git a/build/android/download_doclava.py b/build/android/download_doclava.py new file mode 100644 index 00000000000..f9f9ea2f4c4 --- /dev/null +++ b/build/android/download_doclava.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Minimal tool to download doclava from Google storage when building for +Android.""" + +import os +import subprocess +import sys + + +# Its existence signifies an Android checkout. +ANDROID_ONLY_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), + os.pardir, os.pardir, + 'third_party', 'android_tools') + + +def main(): + # Some Windows bots inadvertently have third_party/android_tools installed, + # but are unable to run download_from_google_storage because depot_tools + # is not in their path, so avoid failure and bail. + if sys.platform == 'win32': + return 0 + if not os.path.exists(ANDROID_ONLY_DIR): + return 0 + subprocess.check_call([ + 'download_from_google_storage', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-doclava', + '--extract', + '-s', + os.path.join('src', 'buildtools', 'android', 'doclava.tar.gz.sha1')]) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/emma_coverage_stats.py b/build/android/emma_coverage_stats.py new file mode 100644 index 00000000000..20ec8eae468 --- /dev/null +++ b/build/android/emma_coverage_stats.py @@ -0,0 +1,479 @@ +#!/usr/bin/python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates incremental code coverage reports for Java code in Chromium. + +Usage: + + build/android/emma_coverage_stats.py -v --out --emma-dir + --lines-for-coverage-file + + + Creates a JSON representation of the overall and file coverage stats and saves + this information to the specified output file. +""" + +import argparse +import collections +import json +import logging +import os +import re +import sys +from xml.etree import ElementTree + +import devil_chromium +from devil.utils import run_tests_helper + +NOT_EXECUTABLE = -1 +NOT_COVERED = 0 +COVERED = 1 +PARTIALLY_COVERED = 2 + +# Coverage information about a single line of code. +LineCoverage = collections.namedtuple( + 'LineCoverage', + ['lineno', 'source', 'covered_status', 'fractional_line_coverage']) + + +class _EmmaHtmlParser(object): + """Encapsulates HTML file parsing operations. + + This class contains all operations related to parsing HTML files that were + produced using the EMMA code coverage tool. + + Example HTML: + + Package links: + org.chromium.chrome + This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|. + + Class links: + DoActivity.java + This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|. + + Line coverage data: + + 108 + + if (index < 0 || index = mSelectors.size()) index = 0; + + + 109 + + + + 110 + if (mSelectors.get(index) != null) { + + + 111 + for (int i = 0; i < mSelectors.size(); i++) { + + Each element is returned by the selector |XPATH_SELECT_LOC|. + + We can parse this to get: + 1. Line number + 2. Line of source code + 3. Coverage status (c, z, or p) + 4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED) + """ + # Selector to match all elements within the rows that are in the table + # that displays all of the different packages. + _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A' + + # Selector to match all elements within the rows that are in the table + # that displays all of the different classes within a package. + _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A' + + # Selector to match all elements within the table containing Java source + # code in an EMMA HTML file. + _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR' + + # Children of HTML elements are represented as a list in ElementTree. These + # constants represent list indices corresponding to relevant child elements. + + # Child 1 contains percentage covered for a line. + _ELEMENT_PERCENT_COVERED = 1 + + # Child 1 contains the original line of source code. + _ELEMENT_CONTAINING_SOURCE_CODE = 1 + + # Child 0 contains the line number. + _ELEMENT_CONTAINING_LINENO = 0 + + # Maps CSS class names to corresponding coverage constants. + _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED} + + # UTF-8 no break space. + _NO_BREAK_SPACE = '\xc2\xa0' + + def __init__(self, emma_file_base_dir): + """Initializes _EmmaHtmlParser. + + Args: + emma_file_base_dir: Path to the location where EMMA report files are + stored. Should be where index.html is stored. + """ + self._base_dir = emma_file_base_dir + self._emma_files_path = os.path.join(self._base_dir, '_files') + self._index_path = os.path.join(self._base_dir, 'index.html') + + def GetLineCoverage(self, emma_file_path): + """Returns a list of LineCoverage objects for the given EMMA HTML file. + + Args: + emma_file_path: String representing the path to the EMMA HTML file. + + Returns: + A list of LineCoverage objects. + """ + line_tr_elements = self._FindElements( + emma_file_path, self._XPATH_SELECT_LOC) + line_coverage = [] + for tr in line_tr_elements: + # Get the coverage status. + coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE) + # Get the fractional coverage value. + if coverage_status == PARTIALLY_COVERED: + title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE')) + # Parse string that contains percent covered: "83% line coverage ...". + percent_covered = title_attribute.split('%')[0] + fractional_coverage = int(percent_covered) / 100.0 + else: + fractional_coverage = 1.0 + + # Get the line number. + lineno_element = tr[self._ELEMENT_CONTAINING_LINENO] + # Handles oddly formatted HTML (where there is an extra tag). + lineno = int(lineno_element.text or + lineno_element[self._ELEMENT_CONTAINING_LINENO].text) + # Get the original line of Java source code. + raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text + utf8_source = raw_source.encode('UTF-8') + source = utf8_source.replace(self._NO_BREAK_SPACE, ' ') + + line = LineCoverage(lineno, source, coverage_status, fractional_coverage) + line_coverage.append(line) + + return line_coverage + + def GetPackageNameToEmmaFileDict(self): + """Returns a dict mapping Java packages to EMMA HTML coverage files. + + Parses the EMMA index.html file to get a list of packages, then parses each + package HTML file to get a list of classes for that package, and creates + a dict with this info. + + Returns: + A dict mapping string representation of Java packages (with class + names appended) to the corresponding file paths of EMMA HTML files. + """ + # These elements contain each package name and the path of the file + # where all classes within said package are listed. + package_link_elements = self._FindElements( + self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS) + # Maps file path of package directory (EMMA generated) to package name. + # Example: emma_dir/f.html: org.chromium.chrome. + package_links = { + os.path.join(self._base_dir, link.attrib['HREF']): link.text + for link in package_link_elements if 'HREF' in link.attrib + } + + package_to_emma = {} + for package_emma_file_path, package_name in package_links.iteritems(): + # These elements contain each class name in the current package and + # the path of the file where the coverage info is stored for each class. + coverage_file_link_elements = self._FindElements( + package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS) + + for class_name_element in coverage_file_link_elements: + emma_coverage_file_path = os.path.join( + self._emma_files_path, class_name_element.attrib['HREF']) + full_package_name = '%s.%s' % (package_name, class_name_element.text) + package_to_emma[full_package_name] = emma_coverage_file_path + + return package_to_emma + + # pylint: disable=no-self-use + def _FindElements(self, file_path, xpath_selector): + """Reads a HTML file and performs an XPath match. + + Args: + file_path: String representing the path to the HTML file. + xpath_selector: String representing xpath search pattern. + + Returns: + A list of ElementTree.Elements matching the given XPath selector. + Returns an empty list if there is no match. + """ + with open(file_path) as f: + file_contents = f.read().decode('ISO-8859-1').encode('UTF-8') + root = ElementTree.fromstring(file_contents) + return root.findall(xpath_selector) + + +class _EmmaCoverageStats(object): + """Computes code coverage stats for Java code using the coverage tool EMMA. + + This class provides an API that allows users to capture absolute code coverage + and code coverage on a subset of lines for each Java source file. Coverage + reports are generated in JSON format. + """ + # Regular expression to get package name from Java package statement. + RE_PACKAGE_MATCH_GROUP = 'package' + RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP) + + def __init__(self, emma_file_base_dir, files_for_coverage): + """Initialize _EmmaCoverageStats. + + Args: + emma_file_base_dir: String representing the path to the base directory + where EMMA HTML coverage files are stored, i.e. parent of index.html. + files_for_coverage: A list of Java source code file paths to get EMMA + coverage for. + """ + self._emma_parser = _EmmaHtmlParser(emma_file_base_dir) + self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage) + + def GetCoverageDict(self, lines_for_coverage): + """Returns a dict containing detailed coverage information. + + Gets detailed coverage stats for each file specified in the + |lines_for_coverage| dict and the total incremental number of lines covered + and executable for all files in |lines_for_coverage|. + + Args: + lines_for_coverage: A dict mapping Java source file paths to lists of line + numbers. + + Returns: + A dict containing coverage stats for the given dict of files and lines. + Contains absolute coverage stats for each file, coverage stats for each + file's lines specified in |lines_for_coverage|, line by line coverage + for each file, and overall coverage stats for the lines specified in + |lines_for_coverage|. + """ + file_coverage = {} + for file_path, line_numbers in lines_for_coverage.iteritems(): + file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers) + if file_coverage_dict: + file_coverage[file_path] = file_coverage_dict + else: + logging.warning( + 'No code coverage data for %s, skipping.', file_path) + + covered_statuses = [s['incremental'] for s in file_coverage.itervalues()] + num_covered_lines = sum(s['covered'] for s in covered_statuses) + num_total_lines = sum(s['total'] for s in covered_statuses) + return { + 'files': file_coverage, + 'patch': { + 'incremental': { + 'covered': num_covered_lines, + 'total': num_total_lines + } + } + } + + def GetCoverageDictForFile(self, file_path, line_numbers): + """Returns a dict containing detailed coverage info for the given file. + + Args: + file_path: The path to the Java source file that we want to create the + coverage dict for. + line_numbers: A list of integer line numbers to retrieve additional stats + for. + + Returns: + A dict containing absolute, incremental, and line by line coverage for + a file. + """ + if file_path not in self._source_to_emma: + return None + emma_file = self._source_to_emma[file_path] + total_line_coverage = self._emma_parser.GetLineCoverage(emma_file) + incremental_line_coverage = [line for line in total_line_coverage + if line.lineno in line_numbers] + line_by_line_coverage = [ + { + 'line': line.source, + 'coverage': line.covered_status, + 'changed': line.lineno in line_numbers, + 'fractional_coverage': line.fractional_line_coverage, + } + for line in total_line_coverage + ] + total_covered_lines, total_lines = ( + self.GetSummaryStatsForLines(total_line_coverage)) + incremental_covered_lines, incremental_total_lines = ( + self.GetSummaryStatsForLines(incremental_line_coverage)) + + file_coverage_stats = { + 'absolute': { + 'covered': total_covered_lines, + 'total': total_lines + }, + 'incremental': { + 'covered': incremental_covered_lines, + 'total': incremental_total_lines + }, + 'source': line_by_line_coverage, + } + return file_coverage_stats + + # pylint: disable=no-self-use + def GetSummaryStatsForLines(self, line_coverage): + """Gets summary stats for a given list of LineCoverage objects. + + Args: + line_coverage: A list of LineCoverage objects. + + Returns: + A tuple containing the number of lines that are covered and the total + number of lines that are executable, respectively + """ + partially_covered_sum = 0 + covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0} + for line in line_coverage: + status = line.covered_status + if status == NOT_EXECUTABLE: + continue + covered_status_totals[status] += 1 + if status == PARTIALLY_COVERED: + partially_covered_sum += line.fractional_line_coverage + + total_covered = covered_status_totals[COVERED] + partially_covered_sum + total_lines = sum(covered_status_totals.values()) + return total_covered, total_lines + + def _GetSourceFileToEmmaFileDict(self, files): + """Gets a dict used to correlate Java source files with EMMA HTML files. + + This method gathers the information needed to correlate EMMA HTML + files with Java source files. EMMA XML and plain text reports do not provide + line by line coverage data, so HTML reports must be used instead. + Unfortunately, the HTML files that are created are given garbage names + (i.e 1.html) so we need to manually correlate EMMA HTML files + with the original Java source files. + + Args: + files: A list of file names for which coverage information is desired. + + Returns: + A dict mapping Java source file paths to EMMA HTML file paths. + """ + # Maps Java source file paths to package names. + # Example: /usr/code/file.java -> org.chromium.file.java. + source_to_package = {} + for file_path in files: + package = self.GetPackageNameFromFile(file_path) + if package: + source_to_package[file_path] = package + else: + logging.warning("Skipping %s because it doesn\'t have a package " + "statement.", file_path) + + # Maps package names to EMMA report HTML files. + # Example: org.chromium.file.java -> out/coverage/1a.html. + package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict() + # Finally, we have a dict mapping Java file paths to EMMA report files. + # Example: /usr/code/file.java -> out/coverage/1a.html. + source_to_emma = {source: package_to_emma[package] + for source, package in source_to_package.iteritems() + if package in package_to_emma} + return source_to_emma + + @staticmethod + def NeedsCoverage(file_path): + """Checks to see if the file needs to be analyzed for code coverage. + + Args: + file_path: A string representing path to the file. + + Returns: + True for Java files that exist, False for all others. + """ + if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path): + return True + else: + logging.info('Skipping file %s, cannot compute code coverage.', file_path) + return False + + @staticmethod + def GetPackageNameFromFile(file_path): + """Gets the full package name including the file name for a given file path. + + Args: + file_path: String representing the path to the Java source file. + + Returns: + A string representing the full package name with file name appended or + None if there is no package statement in the file. + """ + with open(file_path) as f: + file_content = f.read() + package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content) + if package_match: + package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP) + file_name = os.path.basename(file_path) + return '%s.%s' % (package, file_name) + else: + return None + + +def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir): + """Generates a coverage report for a given set of lines. + + Writes the results of the coverage analysis to the file specified by + |out_file_path|. + + Args: + line_coverage_file: The path to a file which contains a dict mapping file + names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means + that we should compute coverage information on lines 1 - 3 for file1. + out_file_path: A string representing the location to write the JSON report. + coverage_dir: A string representing the file path where the EMMA + HTML coverage files are located (i.e. folder where index.html is located). + """ + with open(line_coverage_file) as f: + potential_files_for_coverage = json.load(f) + + files_for_coverage = {f: lines + for f, lines in potential_files_for_coverage.iteritems() + if _EmmaCoverageStats.NeedsCoverage(f)} + + coverage_results = {} + if files_for_coverage: + code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys()) + coverage_results = code_coverage.GetCoverageDict(files_for_coverage) + else: + logging.info('No Java files requiring coverage were included in %s.', + line_coverage_file) + + with open(out_file_path, 'w+') as out_status_file: + json.dump(coverage_results, out_status_file) + + +def main(): + argparser = argparse.ArgumentParser() + argparser.add_argument('--out', required=True, type=str, + help='Report output file path.') + argparser.add_argument('--emma-dir', required=True, type=str, + help='EMMA HTML report directory.') + argparser.add_argument('--lines-for-coverage-file', required=True, type=str, + help='File containing a JSON object. Should contain a ' + 'dict mapping file names to lists of line numbers of ' + 'code for which coverage information is desired.') + argparser.add_argument('-v', '--verbose', action='count', + help='Print verbose log information.') + args = argparser.parse_args() + run_tests_helper.SetLogLevel(args.verbose) + devil_chromium.Initialize() + GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/emma_coverage_stats_test.py b/build/android/emma_coverage_stats_test.py new file mode 100644 index 00000000000..30b409e2567 --- /dev/null +++ b/build/android/emma_coverage_stats_test.py @@ -0,0 +1,563 @@ +#!/usr/bin/python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import unittest +from xml.etree import ElementTree + +import emma_coverage_stats +from pylib.constants import host_paths + +with host_paths.SysPath(host_paths.PYMOCK_PATH): + import mock # pylint: disable=import-error + +EMPTY_COVERAGE_STATS_DICT = { + 'files': {}, + 'patch': { + 'incremental': { + 'covered': 0, 'total': 0 + } + } +} + + +class _EmmaHtmlParserTest(unittest.TestCase): + """Tests for _EmmaHtmlParser. + + Uses modified EMMA report HTML that contains only the subset of tags needed + for test verification. + """ + + def setUp(self): + self.emma_dir = 'fake/dir/' + self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir) + self.simple_html = 'Test HTML' + self.index_html = ( + '' + '' + '' + '
' + '' + '
' + '' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
nameclass, %method, %block, %line, %
org.chromium.chrome.browser0% (0/3)
org.chromium.chrome.browser.tabmodel0% (0/8)
' + '' + '
' + '' + '' + ) + self.package_1_class_list_html = ( + '' + '' + '' + '
' + '' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
nameclass, %method, %block, %line, %
IntentHelper.java0% (0/3)0% (0/9)0% (0/97)0% (0/26)
' + '' + '
' + '' + '' + ) + self.package_2_class_list_html = ( + '' + '' + '' + '
' + '' + '
' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '
nameclass, %method, %block, %line, %
ContentSetting.java0% (0/1)
DevToolsServer.java
FileProviderHelper.java
ContextualMenuBar.java
AccessibilityUtil.java
NavigationPopup.java
' + '' + '
' + '' + '' + ) + self.partially_covered_tr_html = ( + '' + '108' + '' + 'if (index < 0 || index = mSelectors.size()) index = 0;' + '' + ) + self.covered_tr_html = ( + '' + '110' + ' if (mSelectors.get(index) != null) {' + '' + ) + self.not_executable_tr_html = ( + '' + '109' + ' ' + '' + ) + self.tr_with_extra_a_tag = ( + '' + '' + '
54' + '' + ' }' + '' + ) + + def testInit(self): + emma_dir = self.emma_dir + parser = emma_coverage_stats._EmmaHtmlParser(emma_dir) + self.assertEqual(parser._base_dir, emma_dir) + self.assertEqual(parser._emma_files_path, 'fake/dir/_files') + self.assertEqual(parser._index_path, 'fake/dir/index.html') + + def testFindElements_basic(self): + read_values = [self.simple_html] + found, _ = MockOpenForFunction(self.parser._FindElements, read_values, + file_path='fake', xpath_selector='.//TD') + self.assertIs(type(found), list) + self.assertIs(type(found[0]), ElementTree.Element) + self.assertEqual(found[0].text, 'Test HTML') + + def testFindElements_multipleElements(self): + multiple_trs = self.not_executable_tr_html + self.covered_tr_html + read_values = ['
' + multiple_trs + '
'] + found, _ = MockOpenForFunction(self.parser._FindElements, read_values, + file_path='fake', xpath_selector='.//TR') + self.assertEquals(2, len(found)) + + def testFindElements_noMatch(self): + read_values = [self.simple_html] + found, _ = MockOpenForFunction(self.parser._FindElements, read_values, + file_path='fake', xpath_selector='.//TR') + self.assertEqual(found, []) + + def testFindElements_badFilePath(self): + with self.assertRaises(IOError): + with mock.patch('os.path.exists', return_value=False): + self.parser._FindElements('fake', xpath_selector='//tr') + + def testGetPackageNameToEmmaFileDict_basic(self): + expected_dict = { + 'org.chromium.chrome.browser.AccessibilityUtil.java': + 'fake/dir/_files/23.html', + 'org.chromium.chrome.browser.ContextualMenuBar.java': + 'fake/dir/_files/22.html', + 'org.chromium.chrome.browser.tabmodel.IntentHelper.java': + 'fake/dir/_files/1e.html', + 'org.chromium.chrome.browser.ContentSetting.java': + 'fake/dir/_files/1f.html', + 'org.chromium.chrome.browser.DevToolsServer.java': + 'fake/dir/_files/20.html', + 'org.chromium.chrome.browser.NavigationPopup.java': + 'fake/dir/_files/24.html', + 'org.chromium.chrome.browser.FileProviderHelper.java': + 'fake/dir/_files/21.html'} + + read_values = [self.index_html, self.package_1_class_list_html, + self.package_2_class_list_html] + return_dict, mock_open = MockOpenForFunction( + self.parser.GetPackageNameToEmmaFileDict, read_values) + + self.assertDictEqual(return_dict, expected_dict) + self.assertEqual(mock_open.call_count, 3) + calls = [mock.call('fake/dir/index.html'), + mock.call('fake/dir/_files/1.html'), + mock.call('fake/dir/_files/0.html')] + mock_open.assert_has_calls(calls) + + def testGetPackageNameToEmmaFileDict_noPackageElements(self): + self.parser._FindElements = mock.Mock(return_value=[]) + return_dict = self.parser.GetPackageNameToEmmaFileDict() + self.assertDictEqual({}, return_dict) + + def testGetLineCoverage_status_basic(self): + line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) + self.assertEqual(line_coverage[0].covered_status, + emma_coverage_stats.COVERED) + + def testGetLineCoverage_status_statusMissing(self): + line_coverage = self.GetLineCoverageWithFakeElements( + [self.not_executable_tr_html]) + self.assertEqual(line_coverage[0].covered_status, + emma_coverage_stats.NOT_EXECUTABLE) + + def testGetLineCoverage_fractionalCoverage_basic(self): + line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) + self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0) + + def testGetLineCoverage_fractionalCoverage_partial(self): + line_coverage = self.GetLineCoverageWithFakeElements( + [self.partially_covered_tr_html]) + self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78) + + def testGetLineCoverage_lineno_basic(self): + line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html]) + self.assertEqual(line_coverage[0].lineno, 110) + + def testGetLineCoverage_lineno_withAlternativeHtml(self): + line_coverage = self.GetLineCoverageWithFakeElements( + [self.tr_with_extra_a_tag]) + self.assertEqual(line_coverage[0].lineno, 54) + + def testGetLineCoverage_source(self): + self.parser._FindElements = mock.Mock( + return_value=[ElementTree.fromstring(self.covered_tr_html)]) + line_coverage = self.parser.GetLineCoverage('fake_path') + self.assertEqual(line_coverage[0].source, + ' if (mSelectors.get(index) != null) {') + + def testGetLineCoverage_multipleElements(self): + line_coverage = self.GetLineCoverageWithFakeElements( + [self.covered_tr_html, self.partially_covered_tr_html, + self.tr_with_extra_a_tag]) + self.assertEqual(len(line_coverage), 3) + + def GetLineCoverageWithFakeElements(self, html_elements): + """Wraps GetLineCoverage so mock HTML can easily be used. + + Args: + html_elements: List of strings each representing an HTML element. + + Returns: + A list of LineCoverage objects. + """ + elements = [ElementTree.fromstring(string) for string in html_elements] + with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements', + return_value=elements): + return self.parser.GetLineCoverage('fake_path') + + +class _EmmaCoverageStatsTest(unittest.TestCase): + """Tests for _EmmaCoverageStats.""" + + def setUp(self): + self.good_source_to_emma = { + '/path/to/1/File1.java': '/emma/1.html', + '/path/2/File2.java': '/emma/2.html', + '/path/2/File3.java': '/emma/3.html' + } + self.line_coverage = [ + emma_coverage_stats.LineCoverage( + 1, '', emma_coverage_stats.COVERED, 1.0), + emma_coverage_stats.LineCoverage( + 2, '', emma_coverage_stats.COVERED, 1.0), + emma_coverage_stats.LineCoverage( + 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0), + emma_coverage_stats.LineCoverage( + 4, '', emma_coverage_stats.NOT_COVERED, 1.0), + emma_coverage_stats.LineCoverage( + 5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85), + emma_coverage_stats.LineCoverage( + 6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20) + ] + self.lines_for_coverage = [1, 3, 5, 6] + with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements', + return_value=[]): + self.simple_coverage = emma_coverage_stats._EmmaCoverageStats( + 'fake_dir', {}) + + def testInit(self): + coverage_stats = self.simple_coverage + self.assertIsInstance(coverage_stats._emma_parser, + emma_coverage_stats._EmmaHtmlParser) + self.assertIsInstance(coverage_stats._source_to_emma, dict) + + def testNeedsCoverage_withExistingJavaFile(self): + test_file = '/path/to/file/File.java' + with mock.patch('os.path.exists', return_value=True): + self.assertTrue( + emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) + + def testNeedsCoverage_withNonJavaFile(self): + test_file = '/path/to/file/File.c' + with mock.patch('os.path.exists', return_value=True): + self.assertFalse( + emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) + + def testNeedsCoverage_fileDoesNotExist(self): + test_file = '/path/to/file/File.java' + with mock.patch('os.path.exists', return_value=False): + self.assertFalse( + emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file)) + + def testGetPackageNameFromFile_basic(self): + test_file_text = """// Test Copyright + package org.chromium.chrome.browser; + import android.graphics.RectF;""" + result_package, _ = MockOpenForFunction( + emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile, + [test_file_text], file_path='/path/to/file/File.java') + self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java') + + def testGetPackageNameFromFile_noPackageStatement(self): + result_package, _ = MockOpenForFunction( + emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile, + ['not a package statement'], file_path='/path/to/file/File.java') + self.assertIsNone(result_package) + + def testGetSummaryStatsForLines_basic(self): + covered, total = self.simple_coverage.GetSummaryStatsForLines( + self.line_coverage) + self.assertEqual(covered, 3.05) + self.assertEqual(total, 5) + + def testGetSourceFileToEmmaFileDict(self): + package_names = { + '/path/to/1/File1.java': 'org.fake.one.File1.java', + '/path/2/File2.java': 'org.fake.File2.java', + '/path/2/File3.java': 'org.fake.File3.java' + } + package_to_emma = { + 'org.fake.one.File1.java': '/emma/1.html', + 'org.fake.File2.java': '/emma/2.html', + 'org.fake.File3.java': '/emma/3.html' + } + with mock.patch('os.path.exists', return_value=True): + coverage_stats = self.simple_coverage + coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock( + return_value=package_to_emma) + coverage_stats.GetPackageNameFromFile = lambda x: package_names[x] + result_dict = coverage_stats._GetSourceFileToEmmaFileDict( + package_names.keys()) + self.assertDictEqual(result_dict, self.good_source_to_emma) + + def testGetCoverageDictForFile(self): + line_coverage = self.line_coverage + self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage + self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'} + lines = self.lines_for_coverage + expected_dict = { + 'absolute': { + 'covered': 3.05, + 'total': 5 + }, + 'incremental': { + 'covered': 2.05, + 'total': 3 + }, + 'source': [ + { + 'line': line_coverage[0].source, + 'coverage': line_coverage[0].covered_status, + 'changed': True, + 'fractional_coverage': line_coverage[0].fractional_line_coverage, + }, + { + 'line': line_coverage[1].source, + 'coverage': line_coverage[1].covered_status, + 'changed': False, + 'fractional_coverage': line_coverage[1].fractional_line_coverage, + }, + { + 'line': line_coverage[2].source, + 'coverage': line_coverage[2].covered_status, + 'changed': True, + 'fractional_coverage': line_coverage[2].fractional_line_coverage, + }, + { + 'line': line_coverage[3].source, + 'coverage': line_coverage[3].covered_status, + 'changed': False, + 'fractional_coverage': line_coverage[3].fractional_line_coverage, + }, + { + 'line': line_coverage[4].source, + 'coverage': line_coverage[4].covered_status, + 'changed': True, + 'fractional_coverage': line_coverage[4].fractional_line_coverage, + }, + { + 'line': line_coverage[5].source, + 'coverage': line_coverage[5].covered_status, + 'changed': True, + 'fractional_coverage': line_coverage[5].fractional_line_coverage, + } + ] + } + result_dict = self.simple_coverage.GetCoverageDictForFile( + '/fake/src', lines) + self.assertDictEqual(result_dict, expected_dict) + + def testGetCoverageDictForFile_emptyCoverage(self): + expected_dict = { + 'absolute': {'covered': 0, 'total': 0}, + 'incremental': {'covered': 0, 'total': 0}, + 'source': [] + } + self.simple_coverage._emma_parser.GetLineCoverage = lambda x: [] + self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'} + result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {}) + self.assertDictEqual(result_dict, expected_dict) + + def testGetCoverageDictForFile_missingCoverage(self): + self.simple_coverage._source_to_emma = {} + result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {}) + self.assertIsNone(result_dict) + + def testGetCoverageDict_basic(self): + files_for_coverage = { + '/path/to/1/File1.java': [1, 3, 4], + '/path/2/File2.java': [1, 2] + } + self.simple_coverage._source_to_emma = { + '/path/to/1/File1.java': 'emma_1', + '/path/2/File2.java': 'emma_2' + } + coverage_info = { + 'emma_1': [ + emma_coverage_stats.LineCoverage( + 1, '', emma_coverage_stats.COVERED, 1.0), + emma_coverage_stats.LineCoverage( + 2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5), + emma_coverage_stats.LineCoverage( + 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0), + emma_coverage_stats.LineCoverage( + 4, '', emma_coverage_stats.COVERED, 1.0) + ], + 'emma_2': [ + emma_coverage_stats.LineCoverage( + 1, '', emma_coverage_stats.NOT_COVERED, 1.0), + emma_coverage_stats.LineCoverage( + 2, '', emma_coverage_stats.COVERED, 1.0) + ] + } + expected_dict = { + 'files': { + '/path/2/File2.java': { + 'absolute': {'covered': 1, 'total': 2}, + 'incremental': {'covered': 1, 'total': 2}, + 'source': [{'changed': True, 'coverage': 0, + 'line': '', 'fractional_coverage': 1.0}, + {'changed': True, 'coverage': 1, + 'line': '', 'fractional_coverage': 1.0}] + }, + '/path/to/1/File1.java': { + 'absolute': {'covered': 2.5, 'total': 3}, + 'incremental': {'covered': 2, 'total': 2}, + 'source': [{'changed': True, 'coverage': 1, + 'line': '', 'fractional_coverage': 1.0}, + {'changed': False, 'coverage': 2, + 'line': '', 'fractional_coverage': 0.5}, + {'changed': True, 'coverage': -1, + 'line': '', 'fractional_coverage': 1.0}, + {'changed': True, 'coverage': 1, + 'line': '', 'fractional_coverage': 1.0}] + } + }, + 'patch': {'incremental': {'covered': 3, 'total': 4}} + } + # Return the relevant coverage info for each file. + self.simple_coverage._emma_parser.GetLineCoverage = ( + lambda x: coverage_info[x]) + result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage) + self.assertDictEqual(result_dict, expected_dict) + + def testGetCoverageDict_noCoverage(self): + result_dict = self.simple_coverage.GetCoverageDict({}) + self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT) + + +class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase): + """Tests for GenerateCoverageReport.""" + + def testGenerateCoverageReport_missingJsonFile(self): + with self.assertRaises(IOError): + with mock.patch('os.path.exists', return_value=False): + emma_coverage_stats.GenerateCoverageReport('', '', '') + + def testGenerateCoverageReport_invalidJsonFile(self): + with self.assertRaises(ValueError): + with mock.patch('os.path.exists', return_value=True): + MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''], + line_coverage_file='', out_file_path='', + coverage_dir='') + + +def MockOpenForFunction(func, side_effects, **kwargs): + """Allows easy mock open and read for callables that open multiple files. + + Will mock the python open function in a way such that each time read() is + called on an open file, the next element in |side_effects| is returned. This + makes it easier to test functions that call open() multiple times. + + Args: + func: The callable to invoke once mock files are setup. + side_effects: A list of return values for each file to return once read. + Length of list should be equal to the number calls to open in |func|. + **kwargs: Keyword arguments to be passed to |func|. + + Returns: + A tuple containing the return value of |func| and the MagicMock object used + to mock all calls to open respectively. + """ + mock_open = mock.mock_open() + mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value + for side_effect in side_effects] + with mock.patch('__builtin__.open', mock_open): + return func(**kwargs), mock_open + + +if __name__ == '__main__': + # Suppress logging messages. + unittest.main(buffer=True) diff --git a/build/android/emma_instr_action.gypi b/build/android/emma_instr_action.gypi new file mode 100644 index 00000000000..0505eab21a6 --- /dev/null +++ b/build/android/emma_instr_action.gypi @@ -0,0 +1,46 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# instruments either java class files, or jars. + +{ + 'variables': { + 'input_path%': '', + 'output_path%': '', + 'stamp_path%': '', + 'extra_instr_args': [ + '--coverage-file=<(coverage_file)', + '--sources-list-file=<(sources_list_file)', + ], + 'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar', + 'conditions': [ + ['emma_instrument != 0', { + 'extra_instr_args': [ + '--source-dirs=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)', + '--src-root=<(DEPTH)', + '--emma-jar=<(emma_jar)', + '--filter-string=<(emma_filter)', + ], + 'instr_action': 'instrument_jar', + }, { + 'instr_action': 'copy', + 'extra_instr_args': [], + }] + ] + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/emma_instr.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/pylib/utils/command_option_parser.py', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/emma_instr.py', + '<(instr_action)', + '--input-path=<(input_path)', + '--output-path=<(output_path)', + '--stamp=<(stamp_path)', + '<@(extra_instr_args)', + ] +} diff --git a/build/android/empty/src/.keep b/build/android/empty/src/.keep new file mode 100644 index 00000000000..0f710b673dd --- /dev/null +++ b/build/android/empty/src/.keep @@ -0,0 +1,6 @@ +This is a file that needs to live here until http://crbug.com/158155 has +been fixed. + +The ant build system requires that a src folder is always present, and for +some of our targets that is not the case. Giving it an empty src-folder works +nicely though. diff --git a/build/android/empty_proguard.flags b/build/android/empty_proguard.flags new file mode 100644 index 00000000000..53484fe8154 --- /dev/null +++ b/build/android/empty_proguard.flags @@ -0,0 +1 @@ +# Used for apk targets that do not need proguard. See build/java_apk.gypi. diff --git a/build/android/enable_asserts.py b/build/android/enable_asserts.py new file mode 100644 index 00000000000..b303edad9a5 --- /dev/null +++ b/build/android/enable_asserts.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Enables dalvik vm asserts in the android device.""" + +import argparse +import sys + +import devil_chromium +from devil.android import device_blacklist +from devil.android import device_utils + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument('--blacklist-file', help='Device blacklist JSON file.') + + set_asserts_group = parser.add_mutually_exclusive_group(required=True) + set_asserts_group.add_argument( + '--enable_asserts', dest='set_asserts', action='store_true', + help='Sets the dalvik.vm.enableassertions property to "all"') + set_asserts_group.add_argument( + '--disable_asserts', dest='set_asserts', action='store_false', + help='Removes the dalvik.vm.enableassertions property') + + args = parser.parse_args() + + devil_chromium.Initialize() + + blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + + # TODO(jbudorick): Accept optional serial number and run only for the + # specified device when present. + devices = device_utils.DeviceUtils.parallel( + device_utils.DeviceUtils.HealthyDevices(blacklist)) + + def set_java_asserts_and_restart(device): + if device.SetJavaAsserts(args.set_asserts): + device.RunShellCommand('stop') + device.RunShellCommand('start') + + devices.pMap(set_java_asserts_and_restart) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh new file mode 100644 index 00000000000..0545330bb2b --- /dev/null +++ b/build/android/envsetup.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Sets up environment for building Chromium on Android. + +# Make sure we're being sourced (possibly by another script). Check for bash +# since zsh sets $0 when sourcing. +if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then + echo "ERROR: envsetup must be sourced." + exit 1 +fi + +# This only exists to set local variables. Don't call this manually. +android_envsetup_main() { + local SCRIPT_PATH="$1" + local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")" + + local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")" + if [[ -z "${CHROME_SRC}" ]]; then + # If $CHROME_SRC was not set, assume current directory is CHROME_SRC. + local CHROME_SRC="${CURRENT_DIR}" + fi + + if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then + # If current directory is not in $CHROME_SRC, it might be set for other + # source tree. If $CHROME_SRC was set correctly and we are in the correct + # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "". + # Otherwise, it will equal to "${CURRENT_DIR}" + echo "Warning: Current directory is out of CHROME_SRC, it may not be \ + the one you want." + echo "${CHROME_SRC}" + fi + + # Allow the caller to override a few environment variables. If any of them is + # unset, we default to a sane value that's known to work. This allows for + # experimentation with a custom SDK. + if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then + local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/" + fi + + # Add Android SDK tools to system path. + export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools + + # Add Android utility tools to the system path. + export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/ + + # Add Chromium Android development scripts to system path. + # Must be after CHROME_SRC is set. + export PATH=$PATH:${CHROME_SRC}/build/android + + export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC} # TODO(thakis): Remove. +} +# In zsh, $0 is the name of the file being sourced. +android_envsetup_main "${BASH_SOURCE:-$0}" +unset -f android_envsetup_main + +android_gyp() { + echo "Please call build/gyp_chromium instead. android_gyp is going away." + "${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@" +} diff --git a/build/android/finalize_apk_action.gypi b/build/android/finalize_apk_action.gypi new file mode 100644 index 00000000000..644f9e8ecc5 --- /dev/null +++ b/build/android/finalize_apk_action.gypi @@ -0,0 +1,49 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide an action that +# signs and zipaligns an APK. +# +# To use this, create a gyp action with the following form: +# { +# 'action_name': 'some descriptive action name', +# 'variables': { +# 'input_apk_path': 'relative/path/to/input.apk', +# 'output_apk_path': 'relative/path/to/output.apk', +# }, +# 'includes': [ '../../build/android/finalize_apk_action.gypi' ], +# }, +# + +{ + 'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)', + 'variables': { + 'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore', + 'keystore_name%': 'chromiumdebugkey', + 'keystore_password%': 'chromium', + 'zipalign_path%': '<(android_sdk_tools)/zipalign', + 'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar', + 'load_library_from_zip%': 0, + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/finalize_apk.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(keystore_path)', + '<(input_apk_path)', + ], + 'outputs': [ + '<(output_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/finalize_apk.py', + '--zipalign-path=<(zipalign_path)', + '--unsigned-apk-path=<(input_apk_path)', + '--final-apk-path=<(output_apk_path)', + '--key-path=<(keystore_path)', + '--key-name=<(keystore_name)', + '--key-passwd=<(keystore_password)', + '--load-library-from-zip=<(load_library_from_zip)', + '--rezip-apk-jar-path=<(rezip_apk_jar_path)', + ], +} diff --git a/build/android/finalize_splits_action.gypi b/build/android/finalize_splits_action.gypi new file mode 100644 index 00000000000..daa7f834ebb --- /dev/null +++ b/build/android/finalize_splits_action.gypi @@ -0,0 +1,76 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide an action that +# signs and zipaligns split APKs. +# +# Required variables: +# apk_name - Base name of the apk. +# Optional variables: +# density_splits - Whether to process density splits +# language_splits - Whether to language splits + +{ + 'variables': { + 'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore', + 'keystore_name%': 'chromiumdebugkey', + 'keystore_password%': 'chromium', + 'zipalign_path%': '<(android_sdk_tools)/zipalign', + 'density_splits%': 0, + 'language_splits%': [], + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + 'base_output_path': '<(PRODUCT_DIR)/apks/<(apk_name)', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/finalize_splits.py', + '<(DEPTH)/build/android/gyp/finalize_apk.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(keystore_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/finalize_splits.py', + '--resource-packaged-apk-path=<(resource_packaged_apk_path)', + '--base-output-path=<(base_output_path)', + '--zipalign-path=<(zipalign_path)', + '--key-path=<(keystore_path)', + '--key-name=<(keystore_name)', + '--key-passwd=<(keystore_password)', + ], + 'conditions': [ + ['density_splits == 1', { + 'message': 'Signing/aligning <(_target_name) density splits', + 'inputs': [ + '<(resource_packaged_apk_path)_hdpi', + '<(resource_packaged_apk_path)_xhdpi', + '<(resource_packaged_apk_path)_xxhdpi', + '<(resource_packaged_apk_path)_xxxhdpi', + '<(resource_packaged_apk_path)_tvdpi', + ], + 'outputs': [ + '<(base_output_path)-density-hdpi.apk', + '<(base_output_path)-density-xhdpi.apk', + '<(base_output_path)-density-xxhdpi.apk', + '<(base_output_path)-density-xxxhdpi.apk', + '<(base_output_path)-density-tvdpi.apk', + ], + 'action': [ + '--densities=hdpi,xhdpi,xxhdpi,xxxhdpi,tvdpi', + ], + }], + ['language_splits != []', { + 'message': 'Signing/aligning <(_target_name) language splits', + 'inputs': [ + " + + + + + + + + + + + + + + + + + + diff --git a/build/android/generate_emma_html.py b/build/android/generate_emma_html.py new file mode 100644 index 00000000000..9d1d7330b36 --- /dev/null +++ b/build/android/generate_emma_html.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python + +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Aggregates EMMA coverage files to produce html output.""" + +import fnmatch +import json +import optparse +import os +import sys + +import devil_chromium +from devil.utils import cmd_helper +from pylib import constants +from pylib.constants import host_paths + + +def _GetFilesWithExt(root_dir, ext): + """Gets all files with a given extension. + + Args: + root_dir: Directory in which to search for files. + ext: Extension to look for (including dot) + + Returns: + A list of absolute paths to files that match. + """ + files = [] + for root, _, filenames in os.walk(root_dir): + basenames = fnmatch.filter(filenames, '*.' + ext) + files.extend([os.path.join(root, basename) + for basename in basenames]) + + return files + + +def main(): + option_parser = optparse.OptionParser() + option_parser.add_option('--output', help='HTML output filename.') + option_parser.add_option('--coverage-dir', default=None, + help=('Root of the directory in which to search for ' + 'coverage data (.ec) files.')) + option_parser.add_option('--metadata-dir', default=None, + help=('Root of the directory in which to search for ' + 'coverage metadata (.em) files.')) + option_parser.add_option('--cleanup', action='store_true', + help=('If set, removes coverage files generated at ' + 'runtime.')) + options, _ = option_parser.parse_args() + + devil_chromium.Initialize() + + if not (options.coverage_dir and options.metadata_dir and options.output): + option_parser.error('One or more mandatory options are missing.') + + coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec') + metadata_files = _GetFilesWithExt(options.metadata_dir, 'em') + # Filter out zero-length files. These are created by emma_instr.py when a + # target has no classes matching the coverage filter. + metadata_files = [f for f in metadata_files if os.path.getsize(f)] + print 'Found coverage files: %s' % str(coverage_files) + print 'Found metadata files: %s' % str(metadata_files) + + sources = [] + for f in metadata_files: + sources_file = os.path.splitext(f)[0] + '_sources.txt' + with open(sources_file, 'r') as sf: + sources.extend(json.load(sf)) + sources = [os.path.join(host_paths.DIR_SOURCE_ROOT, s) for s in sources] + print 'Sources: %s' % sources + + input_args = [] + for f in coverage_files + metadata_files: + input_args.append('-in') + input_args.append(f) + + output_args = ['-Dreport.html.out.file', options.output] + source_args = ['-sp', ','.join(sources)] + + exit_code = cmd_helper.RunCmd( + ['java', '-cp', + os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'), + 'emma', 'report', '-r', 'html'] + + input_args + output_args + source_args) + + if options.cleanup: + for f in coverage_files: + os.remove(f) + + # Command tends to exit with status 0 when it actually failed. + if not exit_code and not os.path.exists(options.output): + exit_code = 1 + + return exit_code + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gn/generate_isolate.py b/build/android/gn/generate_isolate.py new file mode 100644 index 00000000000..1ac75b15053 --- /dev/null +++ b/build/android/gn/generate_isolate.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an .isolate given a list of files. + +""" + +import argparse +import os +import pprint +import re +import sys + + +_UNIVERSAL_BLACKLIST = ( + r'.*OWNERS', # Should never be included. +) + +_ANDROID_BLACKLIST = ( + r'.*\.crx', # Chrome extension zip files. + r'.*external_extensions\.json', # Chrome external extensions config file. + r'.*\.so', # Libraries packed into .apk. + r'.*\.mojom\.js', # Some test_support targets include python deps. + r'.*Mojo.*manifest\.json', # Some source_set()s pull these in. + r'.*jni_generator_tests', # Exists just to test the compile, not to be run. +) + +_DEVICE_BLACKLIST = ( + r'.*\.py', # Some test_support targets include python deps. + + # v8's blobs get packaged into APKs. + r'.*natives_blob.*\.bin', + r'.*snapshot_blob.*\.bin', +) + +_ASSERT_WHITELIST = ( + r'.*\.pak', + r'.*/', # Assume directories are always included on purpose. +) + + +def _IsExecutable(path): + return os.path.isfile(path) and os.access(path, os.X_OK) + + +def _MatchesAny(path, patterns): + return any(re.match(p, path) for p in patterns) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--command', + help='The command to put in the .isolate (optional)') + parser.add_argument('--runtime-deps-file', required=True, + help='Input .runtime_deps file.') + parser.add_argument('--output-directory', required=True, + help='Location of the ninja output directory') + parser.add_argument('--out-file', help='Write to file rather than stdout.') + parser.add_argument('--apply-android-filters', action='store_true', + help='Filter files not required for Android.') + parser.add_argument('--apply-device-filters', action='store_true', + help='Filter files not required in *.device.isolate.') + parser.add_argument('--assert-no-odd-data', action='store_true', + help='Fail if any data deps exist (after filtering) ' + 'that are not a part of the _ASSERT_WHITELIST. Use ' + 'this to prevent unexpected runtime_deps from ' + 'creeping in') + options = parser.parse_args() + + deps = [] + with open(options.runtime_deps_file) as deps_file: + for path in deps_file: + if path.startswith('./'): + path = path[2:] + deps.append(path.rstrip()) + + deps = (d for d in deps if not _MatchesAny(d, _UNIVERSAL_BLACKLIST)) + + if options.apply_android_filters: + deps = (d for d in deps if not _MatchesAny(d, _ANDROID_BLACKLIST)) + + if options.apply_device_filters: + deps = (d for d in deps if not _MatchesAny(d, _DEVICE_BLACKLIST)) + # Breakpad tests have a helper exe, which is packaged in the _dist. + deps = (d for d in deps if not _IsExecutable(d)) + + # Make them relative to out-file. + if options.out_file: + subdir = os.path.relpath(options.output_directory, + os.path.dirname(options.out_file)) + deps = (os.path.join(subdir, d) for d in deps) + + deps = sorted(deps) + + if options.assert_no_odd_data: + odd_files = [d for d in deps if not _MatchesAny(d, _ASSERT_WHITELIST)] + assert not odd_files, ('Found possibly undesired file in runtime_deps: %s' % + odd_files) + + isolate_dict = { + 'variables': { + 'files': deps, + } + } + if options.command: + isolate_dict['variables']['command'] = [options.command] + + isolate_data = pprint.pformat(isolate_dict) + if options.out_file: + with open(options.out_file, 'w') as f: + f.write(isolate_data + '\n') + else: + print isolate_data + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gn/zip.py b/build/android/gn/zip.py new file mode 100644 index 00000000000..b80e0a1e4c5 --- /dev/null +++ b/build/android/gn/zip.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Archives a set of files. +""" + +import ast +import optparse +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) +from util import build_utils + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--inputs', help='List of files to archive.') + parser.add_option('--output', help='Path to output archive.') + parser.add_option('--base-dir', + help='If provided, the paths in the archive will be ' + 'relative to this directory', default='.') + + options, _ = parser.parse_args() + + inputs = ast.literal_eval(options.inputs) + output = options.output + base_dir = options.base_dir + + build_utils.DoZip(inputs, output, base_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gradle/build.gradle.jinja b/build/android/gradle/build.gradle.jinja new file mode 100644 index 00000000000..406e7303041 --- /dev/null +++ b/build/android/gradle/build.gradle.jinja @@ -0,0 +1,98 @@ +{# Copyright 2016 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +// Generated by //build/android/generate_gradle.py +{% if template_type == 'root' %} + +buildscript { + repositories { + jcenter() + } + dependencies { + classpath "com.android.tools.build:gradle:2.1.2" + } +} + +{% elif template_type == 'java_library' %} + +apply plugin: "java" + +sourceSets { + main { + java.srcDirs = {{ java_dirs }} + } +} + +sourceCompatibility = JavaVersion.VERSION_1_7 +targetCompatibility = JavaVersion.VERSION_1_7 + +{% else %} + +{% if template_type == 'android_library' %} +apply plugin: "com.android.library" +{% elif template_type == 'android_apk' %} +apply plugin: "com.android.application" +{% endif %} + +android { + compileSdkVersion {{ compile_sdk_version }} + buildToolsVersion "{{ build_tools_version }}" + publishNonDefault true + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_7 + targetCompatibility JavaVersion.VERSION_1_7 + } + + sourceSets { + main { + manifest.srcFile "{{ android_manifest }}" + java.srcDirs = [ +{% for path in java_dirs %} + "{{ path }}", +{% endfor %} + ] + resources.srcDirs = [] + aidl.srcDirs = [] + renderscript.srcDirs = [] + res.srcDirs = [] + assets.srcDirs = [] + } + } +} +{% endif %} +{% if template_type != 'root' %} + +dependencies { +{% for path in prebuilts %} + compile files("{{ path }}") +{% endfor %} +{% for proj in java_project_deps %} + compile project(":{{ proj }}") +{% endfor %} +{% for proj in android_project_deps %} + debugCompile project(path: ":{{ proj }}", configuration: "debug") + releaseCompile project(path: ":{{ proj }}", configuration: "release") +{% endfor %} +} + +afterEvaluate { + def tasksToDisable = tasks.findAll { + return (it.name.equals('generateDebugSources') // causes unwanted AndroidManifest.java + || it.name.equals('generateReleaseSources') + || it.name.endsWith('Assets') + || it.name.endsWith('BuildConfig') // causes unwanted BuildConfig.java +{% if not use_gradle_process_resources %} + || it.name.endsWith('Resources') + || it.name.endsWith('ResValues') +{% endif %} + || it.name.endsWith('Aidl') + || it.name.endsWith('Renderscript') + || it.name.endsWith('Shaders')) + } + tasksToDisable.each { Task task -> + task.enabled = false + } +} + +{% endif %} diff --git a/build/android/gradle/generate_gradle.py b/build/android/gradle/generate_gradle.py new file mode 100644 index 00000000000..fdd7dcb8ab1 --- /dev/null +++ b/build/android/gradle/generate_gradle.py @@ -0,0 +1,385 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates an Android Studio project from a GN target.""" + +import argparse +import codecs +import logging +import os +import shutil +import subprocess +import sys +import zipfile + +_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir) +sys.path.append(_BUILD_ANDROID) +import devil_chromium +from devil.utils import run_tests_helper +from pylib import constants +from pylib.constants import host_paths + +sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp')) +import jinja_template +from util import build_utils + + +_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml') +_JINJA_TEMPLATE_PATH = os.path.join( + os.path.dirname(__file__), 'build.gradle.jinja') + +_JAVA_SUBDIR = 'symlinked-java' +_SRCJARS_SUBDIR = 'extracted-srcjars' + + +def _RebasePath(path_or_list, new_cwd=None, old_cwd=None): + """Makes the given path(s) relative to new_cwd, or absolute if not specified. + + If new_cwd is not specified, absolute paths are returned. + If old_cwd is not specified, constants.GetOutDirectory() is assumed. + """ + if not isinstance(path_or_list, basestring): + return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list] + if old_cwd is None: + old_cwd = constants.GetOutDirectory() + old_cwd = os.path.abspath(old_cwd) + if new_cwd: + new_cwd = os.path.abspath(new_cwd) + return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd) + return os.path.abspath(os.path.join(old_cwd, path_or_list)) + + +def _IsSubpathOf(child, parent): + """Returns whether |child| is a subpath of |parent|.""" + return not os.path.relpath(child, parent).startswith(os.pardir) + + +def _WriteFile(path, data): + """Writes |data| to |path|, constucting parent directories if necessary.""" + logging.info('Writing %s', path) + dirname = os.path.dirname(path) + if not os.path.exists(dirname): + os.makedirs(dirname) + with codecs.open(path, 'w', 'utf-8') as output_file: + output_file.write(data) + + +def _RunNinja(output_dir, ninja_targets): + cmd = ['ninja', '-C', output_dir, '-j50'] + cmd.extend(ninja_targets) + logging.info('Running: %r', cmd) + subprocess.check_call(cmd) + + +class _ProjectEntry(object): + """Helper class for various path transformations.""" + def __init__(self, gn_target): + assert gn_target.startswith('//'), gn_target + if ':' not in gn_target: + gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target)) + self._gn_target = gn_target + self._build_config = None + + @classmethod + def FromBuildConfigPath(cls, path): + prefix = 'gen/' + suffix = '.build_config' + assert path.startswith(prefix) and path.endswith(suffix), path + subdir = path[len(prefix):-len(suffix)] + return cls('//%s:%s' % (os.path.split(subdir))) + + def __hash__(self): + return hash(self._gn_target) + + def __eq__(self, other): + return self._gn_target == other.GnTarget() + + def GnTarget(self): + return self._gn_target + + def NinjaTarget(self): + return self._gn_target[2:] + + def GnBuildConfigTarget(self): + return '%s__build_config' % self._gn_target + + def NinjaBuildConfigTarget(self): + return '%s__build_config' % self.NinjaTarget() + + def GradleSubdir(self): + """Returns the output subdirectory.""" + return self.NinjaTarget().replace(':', os.path.sep) + + def ProjectName(self): + """Returns the Gradle project name.""" + return self.GradleSubdir().replace(os.path.sep, '\\$') + + def BuildConfig(self): + """Reads and returns the project's .build_config JSON.""" + if not self._build_config: + path = os.path.join('gen', self.GradleSubdir() + '.build_config') + self._build_config = build_utils.ReadJson(_RebasePath(path)) + return self._build_config + + +def _ComputeJavaSourceDirs(java_files): + """Returns the list of source directories for the given files.""" + found_roots = set() + for path in java_files: + path_root = path + # Recognize these tokens as top-level. + while os.path.basename(path_root) not in ('javax', 'org', 'com', 'src'): + assert path_root, 'Failed to find source dir for ' + path + path_root = os.path.dirname(path_root) + # Assume that if we've hit "src", the we're at the root. + if os.path.basename(path_root) != 'src': + path_root = os.path.dirname(path_root) + found_roots.add(path_root) + return list(found_roots) + + +def _CreateSymlinkTree(entry_output_dir, symlink_dir, desired_files, + parent_dirs): + """Creates a directory tree of symlinks to the given files. + + The idea here is to replicate a directory tree while leaving out files within + it not listed by |desired_files|. + """ + assert _IsSubpathOf(symlink_dir, entry_output_dir) + if os.path.exists(symlink_dir): + shutil.rmtree(symlink_dir) + + for target_path in desired_files: + prefix = next(d for d in parent_dirs if target_path.startswith(d)) + subpath = os.path.relpath(target_path, prefix) + symlinked_path = os.path.join(symlink_dir, subpath) + symlinked_dir = os.path.dirname(symlinked_path) + if not os.path.exists(symlinked_dir): + os.makedirs(symlinked_dir) + relpath = os.path.relpath(target_path, symlinked_dir) + logging.debug('Creating symlink %s -> %s', symlinked_path, relpath) + os.symlink(relpath, symlinked_path) + + +def _CreateJavaSourceDir(entry_output_dir, java_sources_file): + """Computes and constructs when necessary the list of java source directories. + + 1. Computes the root java source directories from the list of files. + 2. Determines whether there are any .java files in them that are not included + in |java_sources_file|. + 3. If not, returns the list of java source directories. If so, constructs a + tree of symlinks within |entry_output_dir| of all files in + |java_sources_file|. + """ + java_dirs = [] + if java_sources_file: + java_files = _RebasePath(build_utils.ReadSourcesList(java_sources_file)) + java_dirs = _ComputeJavaSourceDirs(java_files) + + found_java_files = build_utils.FindInDirectories(java_dirs, '*.java') + unwanted_java_files = set(found_java_files) - set(java_files) + missing_java_files = set(java_files) - set(found_java_files) + if unwanted_java_files: + logging.debug('Target requires .java symlinks: %s', entry_output_dir) + symlink_dir = os.path.join(entry_output_dir, _JAVA_SUBDIR) + _CreateSymlinkTree(entry_output_dir, symlink_dir, java_files, java_dirs) + java_dirs = [symlink_dir] + if missing_java_files: + logging.warning('Some java files were not found: %s', missing_java_files) + + return java_dirs + + +def _GenerateLocalProperties(sdk_dir): + """Returns the data for project.properties as a string.""" + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + 'sdk.dir=%s' % sdk_dir, + '']) + + +def _GenerateGradleFile(build_config, config_json, java_dirs, relativize, + use_gradle_process_resources): + """Returns the data for a project's build.gradle.""" + deps_info = build_config['deps_info'] + gradle = build_config['gradle'] + + if deps_info['type'] == 'android_apk': + target_type = 'android_apk' + elif deps_info['type'] == 'java_library' and not deps_info['is_prebuilt']: + if deps_info['requires_android']: + target_type = 'android_library' + else: + target_type = 'java_library' + else: + return None + + variables = {} + variables['template_type'] = target_type + variables['use_gradle_process_resources'] = use_gradle_process_resources + variables['build_tools_version'] = config_json['build_tools_version'] + variables['compile_sdk_version'] = config_json['compile_sdk_version'] + android_manifest = gradle.get('android_manifest', + _DEFAULT_ANDROID_MANIFEST_PATH) + variables['android_manifest'] = relativize(android_manifest) + variables['java_dirs'] = relativize(java_dirs) + variables['prebuilts'] = relativize(gradle['dependent_prebuilt_jars']) + deps = [_ProjectEntry.FromBuildConfigPath(p) + for p in gradle['dependent_android_projects']] + + variables['android_project_deps'] = [d.ProjectName() for d in deps] + deps = [_ProjectEntry.FromBuildConfigPath(p) + for p in gradle['dependent_java_projects']] + variables['java_project_deps'] = [d.ProjectName() for d in deps] + + processor = jinja_template.JinjaProcessor(host_paths.DIR_SOURCE_ROOT) + return processor.Render(_JINJA_TEMPLATE_PATH, variables) + + +def _GenerateRootGradle(): + """Returns the data for the root project's build.gradle.""" + variables = {'template_type': 'root'} + processor = jinja_template.JinjaProcessor(host_paths.DIR_SOURCE_ROOT) + return processor.Render(_JINJA_TEMPLATE_PATH, variables) + + +def _GenerateSettingsGradle(project_entries): + """Returns the data for settings.gradle.""" + project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT)) + lines = [] + lines.append('// Generated by //build/android/gradle/generate_gradle.py') + lines.append('rootProject.name = "%s"' % project_name) + lines.append('rootProject.projectDir = settingsDir') + lines.append('') + + for entry in project_entries: + # Example target: android_webview:android_webview_java__build_config + lines.append('include ":%s"' % entry.ProjectName()) + lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' % + (entry.ProjectName(), entry.GradleSubdir())) + return '\n'.join(lines) + + +def _ExtractSrcjars(entry_output_dir, srcjar_tuples): + """Extracts all srcjars to the directory given by the tuples.""" + extracted_paths = set(s[1] for s in srcjar_tuples) + for extracted_path in extracted_paths: + assert _IsSubpathOf(extracted_path, entry_output_dir) + if os.path.exists(extracted_path): + shutil.rmtree(extracted_path) + + for srcjar_path, extracted_path in srcjar_tuples: + logging.info('Extracting %s to %s', srcjar_path, extracted_path) + with zipfile.ZipFile(srcjar_path) as z: + z.extractall(extracted_path) + + +def _FindAllProjectEntries(main_entry): + """Returns the list of all _ProjectEntry instances given the root project.""" + found = set() + to_scan = [main_entry] + while to_scan: + cur_entry = to_scan.pop() + if cur_entry in found: + continue + found.add(cur_entry) + build_config = cur_entry.BuildConfig() + sub_config_paths = build_config['deps_info']['deps_configs'] + to_scan.extend( + _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths) + return list(found) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level') + parser.add_argument('--target', + help='GN target to generate project for.', + default='//chrome/android:chrome_public_apk') + parser.add_argument('--project-dir', + help='Root of the output project.', + default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle')) + parser.add_argument('--use-gradle-process-resources', + action='store_true', + help='Have gradle generate R.java rather than ninja') + args = parser.parse_args() + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + constants.CheckOutputDirectory() + output_dir = constants.GetOutDirectory() + devil_chromium.Initialize(output_directory=output_dir) + run_tests_helper.SetLogLevel(args.verbose_count) + + gradle_output_dir = os.path.abspath( + args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir)) + logging.warning('Creating project at: %s', gradle_output_dir) + + main_entry = _ProjectEntry(args.target) + logging.warning('Building .build_config files...') + _RunNinja(output_dir, [main_entry.NinjaBuildConfigTarget()]) + + all_entries = _FindAllProjectEntries(main_entry) + logging.info('Found %d dependent build_config targets.', len(all_entries)) + + config_json = build_utils.ReadJson( + os.path.join(output_dir, 'gradle', 'config.json')) + project_entries = [] + srcjar_tuples = [] + for entry in all_entries: + build_config = entry.BuildConfig() + if build_config['deps_info']['type'] not in ('android_apk', 'java_library'): + continue + + entry_output_dir = os.path.join(gradle_output_dir, entry.GradleSubdir()) + relativize = lambda x, d=entry_output_dir: _RebasePath(x, d) + + srcjars = _RebasePath(build_config['gradle'].get('bundled_srcjars', [])) + if not args.use_gradle_process_resources: + srcjars += _RebasePath(build_config['javac']['srcjars']) + + java_sources_file = build_config['gradle'].get('java_sources_file') + if java_sources_file: + java_sources_file = _RebasePath(java_sources_file) + + java_dirs = _CreateJavaSourceDir(entry_output_dir, java_sources_file) + if srcjars: + java_dirs.append(os.path.join(entry_output_dir, _SRCJARS_SUBDIR)) + + data = _GenerateGradleFile(build_config, config_json, java_dirs, relativize, + args.use_gradle_process_resources) + if data: + project_entries.append(entry) + srcjar_tuples.extend( + (s, os.path.join(entry_output_dir, _SRCJARS_SUBDIR)) for s in srcjars) + _WriteFile(os.path.join(entry_output_dir, 'build.gradle'), data) + + _WriteFile(os.path.join(gradle_output_dir, 'build.gradle'), + _GenerateRootGradle()) + + _WriteFile(os.path.join(gradle_output_dir, 'settings.gradle'), + _GenerateSettingsGradle(project_entries)) + + sdk_path = _RebasePath(config_json['android_sdk_root']) + _WriteFile(os.path.join(gradle_output_dir, 'local.properties'), + _GenerateLocalProperties(sdk_path)) + + if srcjar_tuples: + logging.warning('Building all .srcjar files...') + targets = _RebasePath([s[0] for s in srcjar_tuples], output_dir) + _RunNinja(output_dir, targets) + _ExtractSrcjars(gradle_output_dir, srcjar_tuples) + logging.warning('Project created successfully!') + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/aar.py b/build/android/gyp/aar.py new file mode 100644 index 00000000000..503f9e56a15 --- /dev/null +++ b/build/android/gyp/aar.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Processes an Android AAR file.""" + +import argparse +import os +import shutil +import sys +import zipfile + +from util import build_utils + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir))) +import gn_helpers + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--input-file', + help='Path to the AAR file.', + required=True, + metavar='FILE') + parser.add_argument('--extract', + help='Extract the files to output directory.', + action='store_true') + parser.add_argument('--list', + help='List all the resource and jar files.', + action='store_true') + parser.add_argument('--output-dir', + help='Output directory for the extracted files. Must ' + 'be set if --extract is set.', + metavar='DIR') + + args = parser.parse_args() + if not args.extract and not args.list: + parser.error('Either --extract or --list has to be specified.') + + aar_file = args.input_file + output_dir = args.output_dir + + if args.extract: + # Clear previously extracted versions of the AAR. + shutil.rmtree(output_dir, True) + build_utils.ExtractAll(aar_file, path=output_dir) + + if args.list: + data = {} + data['resources'] = [] + data['jars'] = [] + with zipfile.ZipFile(aar_file) as z: + for name in z.namelist(): + if name.startswith('res/') and not name.endswith('/'): + data['resources'].append(name) + if name.endswith('.jar'): + data['jars'].append(name) + print gn_helpers.ToGNString(data) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py new file mode 100644 index 00000000000..66030b2c8f7 --- /dev/null +++ b/build/android/gyp/aidl.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Invokes Android's aidl +""" + +import optparse +import os +import re +import sys +import zipfile + +from util import build_utils + + +def main(argv): + option_parser = optparse.OptionParser() + build_utils.AddDepfileOption(option_parser) + option_parser.add_option('--aidl-path', help='Path to the aidl binary.') + option_parser.add_option('--imports', help='Files to import.') + option_parser.add_option('--includes', + help='Directories to add as import search paths.') + option_parser.add_option('--srcjar', help='Path for srcjar output.') + options, args = option_parser.parse_args(argv[1:]) + + with build_utils.TempDir() as temp_dir: + for f in args: + classname = os.path.splitext(os.path.basename(f))[0] + output = os.path.join(temp_dir, classname + '.java') + aidl_cmd = [options.aidl_path] + aidl_cmd += [ + '-p' + s for s in build_utils.ParseGnList(options.imports) + ] + if options.includes is not None: + aidl_cmd += [ + '-I' + s for s in build_utils.ParseGnList(options.includes) + ] + aidl_cmd += [ + f, + output + ] + build_utils.CheckOutput(aidl_cmd) + + with zipfile.ZipFile(options.srcjar, 'w') as srcjar: + for path in build_utils.FindInDirectory(temp_dir, '*.java'): + with open(path) as fileobj: + data = fileobj.read() + pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1) + arcname = '%s/%s' % (pkg_name.replace('.', '/'), os.path.basename(path)) + srcjar.writestr(arcname, data) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py new file mode 100644 index 00000000000..5394b9ec7d5 --- /dev/null +++ b/build/android/gyp/ant.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""An Ant wrapper that suppresses useless Ant output. + +Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of +every build. In the Android build, this just adds a lot of useless noise to the +build output. This script forwards its arguments to ant, and prints Ant's +output up until the BUILD SUCCESSFUL line. + +Also, when a command fails, this script will re-run that ant command with the +'-verbose' argument so that the failure is easier to debug. +""" + +import optparse +import sys +import traceback + +from util import build_utils + + +def main(argv): + option_parser = optparse.OptionParser() + build_utils.AddDepfileOption(option_parser) + options, args = option_parser.parse_args(argv[1:]) + + try: + stdout = build_utils.CheckOutput(['ant'] + args) + except build_utils.CalledProcessError: + # It is very difficult to diagnose ant failures without the '-verbose' + # argument. So, when an ant command fails, re-run it with '-verbose' so that + # the cause of the failure is easier to identify. + verbose_args = ['-verbose'] + [a for a in args if a != '-quiet'] + try: + stdout = build_utils.CheckOutput(['ant'] + verbose_args) + except build_utils.CalledProcessError: + traceback.print_exc() + sys.exit(1) + + # If this did sys.exit(1), building again would succeed (which would be + # awkward). Instead, just print a big warning. + build_utils.PrintBigWarning( + 'This is unexpected. `ant ' + ' '.join(args) + '` failed.' + + 'But, running `ant ' + ' '.join(verbose_args) + '` passed.') + + stdout = stdout.strip().split('\n') + for line in stdout: + if line.strip() == 'BUILD SUCCESSFUL': + break + print line + + if options.depfile: + assert '-buildfile' in args + ant_buildfile = args[args.index('-buildfile') + 1] + + build_utils.WriteDepfile( + options.depfile, + [ant_buildfile] + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py new file mode 100644 index 00000000000..f43b0a17737 --- /dev/null +++ b/build/android/gyp/apk_install.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Installs an APK. + +""" + +import optparse +import os +import sys + +from util import build_device +from util import build_utils +from util import md5_check + +BUILD_ANDROID_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..')) +sys.path.append(BUILD_ANDROID_DIR) + +import devil_chromium +from devil.android import apk_helper +from pylib import constants + + +def HasInstallMetadataChanged(device, apk_package, metadata_path): + """Checks if the metadata on the device for apk_package has changed.""" + if not os.path.exists(metadata_path): + return True + + try: + expected_metadata = build_utils.ReadJson(metadata_path) + except ValueError: # File is not json encoded. + return True + + return expected_metadata != device.GetInstallMetadata(apk_package) + + +def RecordInstallMetadata(device, apk_package, metadata_path): + """Records the metadata from the device for apk_package.""" + metadata = device.GetInstallMetadata(apk_package, refresh=True) + if not metadata: + raise Exception('APK install failed unexpectedly.') + + build_utils.WriteJson(metadata, metadata_path) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--apk-path', + help='Path to .apk to install.') + parser.add_option('--split-apk-path', + help='Path to .apk splits (can specify multiple times, causes ' + '--install-multiple to be used.', + action='append') + parser.add_option('--android-sdk-tools', + help='Path to the Android SDK build tools folder. ' + + 'Required when using --split-apk-path.') + parser.add_option('--install-record', + help='Path to install record (touched only when APK is installed).') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--stamp', + help='Path to touch on success.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME') + parser.add_option('--output-directory', + help='The output directory.') + options, _ = parser.parse_args() + + constants.SetBuildType(options.configuration_name) + + devil_chromium.Initialize( + output_directory=os.path.abspath(options.output_directory)) + + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + serial_number = device.GetSerialNumber() + apk_package = apk_helper.GetPackageName(options.apk_path) + + metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) + + # If the APK on the device does not match the one that was last installed by + # the build, then the APK has to be installed (regardless of the md5 record). + force_install = HasInstallMetadataChanged(device, apk_package, metadata_path) + + + def Install(): + if options.split_apk_path: + device.InstallSplitApk(options.apk_path, options.split_apk_path) + else: + device.Install(options.apk_path, reinstall=True) + + RecordInstallMetadata(device, apk_package, metadata_path) + build_utils.Touch(options.install_record) + + + record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) + md5_check.CallAndRecordIfStale( + Install, + record_path=record_path, + input_paths=[options.apk_path], + force=force_install) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py new file mode 100644 index 00000000000..04a04b3dd86 --- /dev/null +++ b/build/android/gyp/apk_obfuscate.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates the obfuscated jar and test jar for an apk. + +If proguard is not enabled or 'Release' is not in the configuration name, +obfuscation will be a no-op. +""" + +import json +import optparse +import os +import sys +import tempfile + +from util import build_utils +from util import proguard_util + + +_PROGUARD_KEEP_CLASS = '''-keep class %s { + *; +} +''' + + +def ParseArgs(argv): + parser = optparse.OptionParser() + parser.add_option('--android-sdk', help='path to the Android SDK folder') + parser.add_option('--android-sdk-tools', + help='path to the Android SDK build tools folder') + parser.add_option('--android-sdk-jar', + help='path to Android SDK\'s android.jar') + parser.add_option('--proguard-jar-path', + help='Path to proguard.jar in the sdk') + parser.add_option('--input-jars-paths', + help='Path to jars to include in obfuscated jar') + + parser.add_option('--proguard-configs', + help='Paths to proguard config files') + + parser.add_option('--configuration-name', + help='Gyp configuration name (i.e. Debug, Release)') + + parser.add_option('--debug-build-proguard-enabled', action='store_true', + help='--proguard-enabled takes effect on release ' + 'build, this flag enable the proguard on debug ' + 'build.') + parser.add_option('--proguard-enabled', action='store_true', + help='Set if proguard is enabled for this target.') + + parser.add_option('--obfuscated-jar-path', + help='Output path for obfuscated jar.') + + parser.add_option('--testapp', action='store_true', + help='Set this if building an instrumentation test apk') + parser.add_option('--tested-apk-obfuscated-jar-path', + help='Path to obfusctated jar of the tested apk') + parser.add_option('--test-jar-path', + help='Output path for jar containing all the test apk\'s ' + 'code.') + + parser.add_option('--stamp', help='File to touch on success') + + parser.add_option('--main-dex-list-path', + help='The list of classes to retain in the main dex. ' + 'These will not be obfuscated.') + parser.add_option('--multidex-configuration-path', + help='A JSON file containing multidex build configuration.') + parser.add_option('--verbose', '-v', action='store_true', + help='Print all proguard output') + + (options, args) = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given. ' + str(args)) + + # Check that required options have been provided. + required_options = ( + 'android_sdk', + 'android_sdk_tools', + 'android_sdk_jar', + 'proguard_jar_path', + 'input_jars_paths', + 'configuration_name', + 'obfuscated_jar_path', + ) + + if options.testapp: + required_options += ( + 'test_jar_path', + ) + + build_utils.CheckOptions(options, parser, required=required_options) + return options, args + + +def DoProguard(options): + proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path) + proguard.outjar(options.obfuscated_jar_path) + + input_jars = build_utils.ParseGnList(options.input_jars_paths) + + exclude_paths = [] + configs = build_utils.ParseGnList(options.proguard_configs) + if options.tested_apk_obfuscated_jar_path: + # configs should only contain the process_resources.py generated config. + assert len(configs) == 1, ( + 'test apks should not have custom proguard configs: ' + str(configs)) + proguard.tested_apk_info(options.tested_apk_obfuscated_jar_path + '.info') + + proguard.libraryjars([options.android_sdk_jar]) + proguard_injars = [p for p in input_jars if p not in exclude_paths] + proguard.injars(proguard_injars) + + multidex_config = _PossibleMultidexConfig(options) + if multidex_config: + configs.append(multidex_config) + + proguard.configs(configs) + proguard.verbose(options.verbose) + proguard.CheckOutput() + + +def _PossibleMultidexConfig(options): + if not options.multidex_configuration_path: + return None + + with open(options.multidex_configuration_path) as multidex_config_file: + multidex_config = json.loads(multidex_config_file.read()) + + if not (multidex_config.get('enabled') and options.main_dex_list_path): + return None + + main_dex_list_config = '' + with open(options.main_dex_list_path) as main_dex_list: + for clazz in (l.strip() for l in main_dex_list): + if clazz.endswith('.class'): + clazz = clazz[:-len('.class')] + clazz = clazz.replace('/', '.') + main_dex_list_config += (_PROGUARD_KEEP_CLASS % clazz) + with tempfile.NamedTemporaryFile( + delete=False, + dir=os.path.dirname(options.main_dex_list_path), + prefix='main_dex_list_proguard', + suffix='.flags') as main_dex_config_file: + main_dex_config_file.write(main_dex_list_config) + return main_dex_config_file.name + + +def main(argv): + options, _ = ParseArgs(argv) + + input_jars = build_utils.ParseGnList(options.input_jars_paths) + + if options.testapp: + dependency_class_filters = [ + '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class'] + build_utils.MergeZips( + options.test_jar_path, input_jars, dependency_class_filters) + + if ((options.configuration_name == 'Release' and options.proguard_enabled) or + (options.configuration_name == 'Debug' and + options.debug_build_proguard_enabled)): + DoProguard(options) + else: + output_files = [ + options.obfuscated_jar_path, + options.obfuscated_jar_path + '.info', + options.obfuscated_jar_path + '.dump', + options.obfuscated_jar_path + '.seeds', + options.obfuscated_jar_path + '.usage', + options.obfuscated_jar_path + '.mapping'] + for f in output_files: + if os.path.exists(f): + os.remove(f) + build_utils.Touch(f) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/apkbuilder.py b/build/android/gyp/apkbuilder.py new file mode 100644 index 00000000000..82ac496ed97 --- /dev/null +++ b/build/android/gyp/apkbuilder.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python +# +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Adds the code parts to a resource APK.""" + +import argparse +import itertools +import os +import shutil +import sys +import zipfile + +from util import build_utils + + +# Taken from aapt's Package.cpp: +_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2', + '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid', + '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf', + '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2', + '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm') + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--assets', + help='GYP-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.', + default='[]') + parser.add_argument('--write-asset-list', + action='store_true', + help='Whether to create an assets/assets_list file.') + parser.add_argument('--uncompressed-assets', + help='Same as --assets, except disables compression.', + default='[]') + parser.add_argument('--resource-apk', + help='An .ap_ file built using aapt', + required=True) + parser.add_argument('--output-apk', + help='Path to the output file', + required=True) + parser.add_argument('--dex-file', + help='Path to the classes.dex to use') + parser.add_argument('--native-libs', + action='append', + help='GYP-list of native libraries to include. ' + 'Can be specified multiple times.', + default=[]) + parser.add_argument('--secondary-native-libs', + action='append', + help='GYP-list of native libraries for secondary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_argument('--android-abi', + help='Android architecture to use for native libraries') + parser.add_argument('--secondary-android-abi', + help='The secondary Android architecture to use for' + 'secondary native libraries') + parser.add_argument('--native-lib-placeholders', + help='GYP-list of native library placeholders to add.', + default='[]') + parser.add_argument('--emma-device-jar', + help='Path to emma_device.jar to include.') + parser.add_argument('--uncompress-shared-libraries', + action='store_true', + help='Uncompress shared libraries') + options = parser.parse_args(args) + options.assets = build_utils.ParseGnList(options.assets) + options.uncompressed_assets = build_utils.ParseGnList( + options.uncompressed_assets) + options.native_lib_placeholders = build_utils.ParseGnList( + options.native_lib_placeholders) + all_libs = [] + for gyp_list in options.native_libs: + all_libs.extend(build_utils.ParseGnList(gyp_list)) + options.native_libs = all_libs + secondary_libs = [] + for gyp_list in options.secondary_native_libs: + secondary_libs.extend(build_utils.ParseGnList(gyp_list)) + options.secondary_native_libs = secondary_libs + + + if not options.android_abi and (options.native_libs or + options.native_lib_placeholders): + raise Exception('Must specify --android-abi with --native-libs') + if not options.secondary_android_abi and options.secondary_native_libs: + raise Exception('Must specify --secondary-android-abi with' + ' --secondary-native-libs') + return options + + +def _SplitAssetPath(path): + """Returns (src, dest) given an asset path in the form src[:dest].""" + path_parts = path.split(':') + src_path = path_parts[0] + if len(path_parts) > 1: + dest_path = path_parts[1] + else: + dest_path = os.path.basename(src_path) + return src_path, dest_path + + +def _ExpandPaths(paths): + """Converts src:dst into tuples and enumerates files within directories. + + Args: + paths: Paths in the form "src_path:dest_path" + + Returns: + A list of (src_path, dest_path) tuples sorted by dest_path (for stable + ordering within output .apk). + """ + ret = [] + for path in paths: + src_path, dest_path = _SplitAssetPath(path) + if os.path.isdir(src_path): + for f in build_utils.FindInDirectory(src_path, '*'): + ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:]))) + else: + ret.append((src_path, dest_path)) + ret.sort(key=lambda t:t[1]) + return ret + + +def _AddAssets(apk, path_tuples, disable_compression=False): + """Adds the given paths to the apk. + + Args: + apk: ZipFile to write to. + paths: List of paths (with optional :zipPath suffix) to add. + disable_compression: Whether to disable compression. + """ + # Group all uncompressed assets together in the hope that it will increase + # locality of mmap'ed files. + for target_compress in (False, True): + for src_path, dest_path in path_tuples: + + compress = not disable_compression and ( + os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS) + if target_compress == compress: + apk_path = 'assets/' + dest_path + try: + apk.getinfo(apk_path) + # Should never happen since write_build_config.py handles merging. + raise Exception('Multiple targets specified the asset path: %s' % + apk_path) + except KeyError: + build_utils.AddToZipHermetic(apk, apk_path, src_path=src_path, + compress=compress) + + +def _CreateAssetsList(path_tuples): + """Returns a newline-separated list of asset paths for the given paths.""" + dests = sorted(t[1] for t in path_tuples) + return '\n'.join(dests) + '\n' + + +def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress): + """Add native libraries to APK.""" + for path in native_libs: + basename = os.path.basename(path) + apk_path = 'lib/%s/%s' % (android_abi, basename) + + compress = None + if (uncompress and os.path.splitext(basename)[1] == '.so'): + compress = False + + build_utils.AddToZipHermetic(out_apk, + apk_path, + src_path=path, + compress=compress) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + native_libs = sorted(options.native_libs) + + input_paths = [options.resource_apk, __file__] + native_libs + # Include native libs in the depfile_deps since GN doesn't know about the + # dependencies when is_component_build=true. + depfile_deps = list(native_libs) + + secondary_native_libs = [] + if options.secondary_native_libs: + secondary_native_libs = sorted(options.secondary_native_libs) + input_paths += secondary_native_libs + depfile_deps += secondary_native_libs + + if options.dex_file: + input_paths.append(options.dex_file) + + if options.emma_device_jar: + input_paths.append(options.emma_device_jar) + + input_strings = [options.android_abi, + options.native_lib_placeholders, + options.uncompress_shared_libraries] + + if options.secondary_android_abi: + input_strings.append(options.secondary_android_abi) + + _assets = _ExpandPaths(options.assets) + _uncompressed_assets = _ExpandPaths(options.uncompressed_assets) + + for src_path, dest_path in itertools.chain(_assets, _uncompressed_assets): + input_paths.append(src_path) + input_strings.append(dest_path) + + def on_stale_md5(): + tmp_apk = options.output_apk + '.tmp' + try: + # TODO(agrieve): It would be more efficient to combine this step + # with finalize_apk(), which sometimes aligns and uncompresses the + # native libraries. + with zipfile.ZipFile(options.resource_apk) as resource_apk, \ + zipfile.ZipFile(tmp_apk, 'w', zipfile.ZIP_DEFLATED) as out_apk: + def copy_resource(zipinfo): + compress = zipinfo.compress_type != zipfile.ZIP_STORED + build_utils.AddToZipHermetic(out_apk, zipinfo.filename, + data=resource_apk.read(zipinfo.filename), + compress=compress) + + # Make assets come before resources in order to maintain the same file + # ordering as GYP / aapt. http://crbug.com/561862 + resource_infos = resource_apk.infolist() + + # 1. AndroidManifest.xml + assert resource_infos[0].filename == 'AndroidManifest.xml' + copy_resource(resource_infos[0]) + + # 2. Assets + if options.write_asset_list: + data = _CreateAssetsList( + itertools.chain(_assets, _uncompressed_assets)) + build_utils.AddToZipHermetic(out_apk, 'assets/assets_list', data=data) + + _AddAssets(out_apk, _assets, disable_compression=False) + _AddAssets(out_apk, _uncompressed_assets, disable_compression=True) + + # 3. Dex files + if options.dex_file and options.dex_file.endswith('.zip'): + with zipfile.ZipFile(options.dex_file, 'r') as dex_zip: + for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')): + build_utils.AddToZipHermetic(out_apk, dex, data=dex_zip.read(dex)) + elif options.dex_file: + build_utils.AddToZipHermetic(out_apk, 'classes.dex', + src_path=options.dex_file) + + # 4. Native libraries. + _AddNativeLibraries(out_apk, + native_libs, + options.android_abi, + options.uncompress_shared_libraries) + + if options.secondary_android_abi: + _AddNativeLibraries(out_apk, + secondary_native_libs, + options.secondary_android_abi, + options.uncompress_shared_libraries) + + for name in sorted(options.native_lib_placeholders): + # Empty libs files are ignored by md5check, but rezip requires them + # to be empty in order to identify them as placeholders. + apk_path = 'lib/%s/%s' % (options.android_abi, name) + build_utils.AddToZipHermetic(out_apk, apk_path, data='') + + # 5. Resources + for info in resource_infos[1:]: + copy_resource(info) + + # 6. Java resources. Used only when coverage is enabled, so order + # doesn't matter). + if options.emma_device_jar: + # Add EMMA Java resources to APK. + with zipfile.ZipFile(options.emma_device_jar, 'r') as emma_device_jar: + for apk_path in emma_device_jar.namelist(): + apk_path_lower = apk_path.lower() + if apk_path_lower.startswith('meta-inf/'): + continue + + if apk_path_lower.endswith('/'): + continue + + if apk_path_lower.endswith('.class'): + continue + + build_utils.AddToZipHermetic(out_apk, apk_path, + data=emma_device_jar.read(apk_path)) + + shutil.move(tmp_apk, options.output_apk) + finally: + if os.path.exists(tmp_apk): + os.unlink(tmp_apk) + + build_utils.CallAndWriteDepfileIfStale( + on_stale_md5, + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=[options.output_apk], + depfile_deps=depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/configure_multidex.py b/build/android/gyp/configure_multidex.py new file mode 100644 index 00000000000..63c74f07be7 --- /dev/null +++ b/build/android/gyp/configure_multidex.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import json +import os +import sys + +from util import build_utils + + +_GCC_PREPROCESS_PATH = os.path.join( + os.path.dirname(__file__), 'gcc_preprocess.py') + + +def ParseArgs(): + parser = argparse.ArgumentParser() + parser.add_argument('--configuration-name', required=True, + help='The build CONFIGURATION_NAME.') + parser.add_argument('--enable-multidex', action='store_true', default=False, + help='If passed, multidex may be enabled.') + parser.add_argument('--enabled-configurations', default=[], + help='The configuration(s) for which multidex should be ' + 'enabled. If not specified and --enable-multidex is ' + 'passed, multidex will be enabled for all ' + 'configurations.') + parser.add_argument('--multidex-configuration-path', required=True, + help='The path to which the multidex configuration JSON ' + 'should be saved.') + parser.add_argument('--multidex-config-java-file', required=True) + parser.add_argument('--multidex-config-java-stamp', required=True) + parser.add_argument('--multidex-config-java-template', required=True) + + args = parser.parse_args() + + if args.enabled_configurations: + args.enabled_configurations = build_utils.ParseGnList( + args.enabled_configurations) + + return args + + +def _WriteConfigJson(multidex_enabled, multidex_configuration_path): + config = { + 'enabled': multidex_enabled, + } + + with open(multidex_configuration_path, 'w') as f: + f.write(json.dumps(config)) + + +def _GenerateMultidexConfigJava(multidex_enabled, args): + gcc_preprocess_cmd = [ + sys.executable, _GCC_PREPROCESS_PATH, + '--include-path=', + '--template', args.multidex_config_java_template, + '--stamp', args.multidex_config_java_stamp, + '--output', args.multidex_config_java_file, + ] + if multidex_enabled: + gcc_preprocess_cmd += [ + '--defines', 'ENABLE_MULTIDEX', + ] + + build_utils.CheckOutput(gcc_preprocess_cmd) + + +def main(): + args = ParseArgs() + + multidex_enabled = ( + args.enable_multidex + and (not args.enabled_configurations + or args.configuration_name in args.enabled_configurations)) + + _WriteConfigJson(multidex_enabled, args.multidex_configuration_path) + _GenerateMultidexConfigJava(multidex_enabled, args) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py new file mode 100644 index 00000000000..8103a93b1ba --- /dev/null +++ b/build/android/gyp/copy_ex.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies files to a directory.""" + +import itertools +import optparse +import os +import shutil +import sys + +from util import build_utils + + +def _get_all_files(base): + """Returns a list of all the files in |base|. Each entry is relative to the + last path entry of |base|.""" + result = [] + dirname = os.path.dirname(base) + for root, _, files in os.walk(base): + result.extend([os.path.join(root[len(dirname):], f) for f in files]) + return result + +def CopyFile(f, dest, deps): + """Copy file or directory and update deps.""" + if os.path.isdir(f): + shutil.copytree(f, os.path.join(dest, os.path.basename(f))) + deps.extend(_get_all_files(f)) + else: + shutil.copy(f, dest) + deps.append(f) + +def DoCopy(options, deps): + """Copy files or directories given in options.files and update deps.""" + files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f) + for f in options.files)) + + for f in files: + if os.path.isdir(f) and not options.clear: + print ('To avoid stale files you must use --clear when copying ' + 'directories') + sys.exit(-1) + CopyFile(f, options.dest, deps) + +def DoRenaming(options, deps): + """Copy and rename files given in options.renaming_sources and update deps.""" + src_files = list(itertools.chain.from_iterable( + build_utils.ParseGnList(f) + for f in options.renaming_sources)) + + dest_files = list(itertools.chain.from_iterable( + build_utils.ParseGnList(f) + for f in options.renaming_destinations)) + + if (len(src_files) != len(dest_files)): + print('Renaming source and destination files not match.') + sys.exit(-1) + + for src, dest in itertools.izip(src_files, dest_files): + if os.path.isdir(src): + print ('renaming diretory is not supported.') + sys.exit(-1) + else: + CopyFile(src, os.path.join(options.dest, dest), deps) + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--dest', help='Directory to copy files to.') + parser.add_option('--files', action='append', + help='List of files to copy.') + parser.add_option('--clear', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--renaming-sources', + action='append', + help='List of files need to be renamed while being ' + 'copied to dest directory') + parser.add_option('--renaming-destinations', + action='append', + help='List of destination file name without path, the ' + 'number of elements must match rename-sources.') + + options, _ = parser.parse_args(args) + + if options.clear: + build_utils.DeleteDirectory(options.dest) + build_utils.MakeDirectory(options.dest) + + deps = [] + + if options.files: + DoCopy(options, deps) + + if options.renaming_sources: + DoRenaming(options, deps) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + deps + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py new file mode 100644 index 00000000000..542030678ef --- /dev/null +++ b/build/android/gyp/create_device_library_links.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates symlinks to native libraries for an APK. + +The native libraries should have previously been pushed to the device (in +options.target_dir). This script then creates links in an apk's lib/ folder to +those native libraries. +""" + +import optparse +import os +import sys + +from util import build_device +from util import build_utils + +BUILD_ANDROID_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..')) +sys.path.append(BUILD_ANDROID_DIR) + +import devil_chromium +from devil.android import apk_helper +from pylib import constants + +def RunShellCommand(device, cmd): + output = device.RunShellCommand(cmd, check_return=True) + + if output: + raise Exception( + 'Unexpected output running command: ' + cmd + '\n' + + '\n'.join(output)) + + +def CreateSymlinkScript(options): + libraries = build_utils.ParseGnList(options.libraries) + + link_cmd = ( + 'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n' + 'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s ' + '$APK_LIBRARIES_DIR/%(lib_basename)s \n' + ) + + script = '#!/bin/sh \n' + + for lib in libraries: + script += link_cmd % { 'lib_basename': lib } + + with open(options.script_host_path, 'w') as scriptfile: + scriptfile.write(script) + + +def TriggerSymlinkScript(options): + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + apk_package = apk_helper.GetPackageName(options.apk) + apk_libraries_dir = '/data/data/%s/lib' % apk_package + + device_dir = os.path.dirname(options.script_device_path) + mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' % + { 'dir': device_dir }) + RunShellCommand(device, mkdir_cmd) + device.PushChangedFiles([(os.path.abspath(options.script_host_path), + options.script_device_path)]) + + trigger_cmd = ( + 'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; ' + 'STRIPPED_LIBRARIES_DIR=%(target_dir)s; ' + '. %(script_device_path)s' + ) % { + 'apk_libraries_dir': apk_libraries_dir, + 'target_dir': options.target_dir, + 'script_device_path': options.script_device_path + } + RunShellCommand(device, trigger_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + parser.add_option('--apk', help='Path to the apk.') + parser.add_option('--script-host-path', + help='Path on the host for the symlink script.') + parser.add_option('--script-device-path', + help='Path on the device to push the created symlink script.') + parser.add_option('--libraries', + help='List of native libraries.') + parser.add_option('--target-dir', + help='Device directory that contains the target libraries for symlinks.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME') + parser.add_option('--output-directory', + help='The output directory') + options, _ = parser.parse_args(args) + + required_options = ['apk', 'libraries', 'script_host_path', + 'script_device_path', 'target_dir', 'configuration_name'] + build_utils.CheckOptions(options, parser, required=required_options) + constants.SetBuildType(options.configuration_name) + + devil_chromium.Initialize( + output_directory=os.path.abspath(options.output_directory)) + + CreateSymlinkScript(options) + TriggerSymlinkScript(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py new file mode 100644 index 00000000000..582434f787e --- /dev/null +++ b/build/android/gyp/create_dist_jar.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merges a list of jars into a single jar.""" + +import optparse +import sys + +from util import build_utils + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--output', help='Path to output jar.') + parser.add_option('--inputs', action='append', help='List of jar inputs.') + options, _ = parser.parse_args(args) + build_utils.CheckOptions(options, parser, ['output', 'inputs']) + + input_jars = [] + for inputs_arg in options.inputs: + input_jars.extend(build_utils.ParseGnList(inputs_arg)) + + build_utils.MergeZips(options.output, input_jars) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + input_jars + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py new file mode 100644 index 00000000000..487bff41f90 --- /dev/null +++ b/build/android/gyp/create_java_binary_script.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple script to run a java "binary". + +This creates a script that sets up the java command line for running a java +jar. This includes correctly setting the classpath and the main class. +""" + +import optparse +import os +import sys + +from util import build_utils + +# The java command must be executed in the current directory because there may +# be user-supplied paths in the args. The script receives the classpath relative +# to the directory that the script is written in and then, when run, must +# recalculate the paths relative to the current directory. +script_template = """\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_java_binary_script.py + +import argparse +import os +import sys + +self_dir = os.path.dirname(__file__) +classpath = [{classpath}] +bootclasspath = [{bootclasspath}] +extra_program_args = {extra_program_args} +if os.getcwd() != self_dir: + offset = os.path.relpath(self_dir, os.getcwd()) + classpath = [os.path.join(offset, p) for p in classpath] + bootclasspath = [os.path.join(offset, p) for p in bootclasspath] +java_cmd = ["java"] +# This is a simple argparser for jvm and jar arguments. +parser = argparse.ArgumentParser() +parser.add_argument('--jar-args') +parser.add_argument('--jvm-args') + +known_args, unknown_args = parser.parse_known_args(sys.argv[1:]) +if known_args.jvm_args: + jvm_arguments = known_args.jvm_args.strip('"').split() + java_cmd.extend(jvm_arguments) +if known_args.jar_args: + jar_arguments = known_args.jar_args.strip('"').split() + if unknown_args: + raise Exception('There are unknown arguments') +else: + jar_arguments = unknown_args + +{noverify_flag} +if bootclasspath: + java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath)) +java_cmd.extend( + ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"]) +java_cmd.extend(extra_program_args) +java_cmd.extend(jar_arguments) +os.execvp("java", java_cmd) +""" + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--output', help='Output path for executable script.') + parser.add_option('--jar-path', help='Path to the main jar.') + parser.add_option('--main-class', + help='Name of the java class with the "main" entry point.') + parser.add_option('--classpath', action='append', default=[], + help='Classpath for running the jar.') + parser.add_option('--bootclasspath', action='append', default=[], + help='zip/jar files to add to bootclasspath for java cmd.') + parser.add_option('--noverify', action='store_true', + help='JVM flag: noverify.') + + options, extra_program_args = parser.parse_args(argv) + + if (options.noverify): + noverify_flag = 'java_cmd.append("-noverify")' + else: + noverify_flag = '' + + classpath = [options.jar_path] + for cp_arg in options.classpath: + classpath += build_utils.ParseGnList(cp_arg) + + bootclasspath = [] + for bootcp_arg in options.bootclasspath: + bootclasspath += build_utils.ParseGnList(bootcp_arg) + + run_dir = os.path.dirname(options.output) + bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath] + classpath = [os.path.relpath(p, run_dir) for p in classpath] + + with open(options.output, 'w') as script: + script.write(script_template.format( + classpath=('"%s"' % '", "'.join(classpath)), + bootclasspath=('"%s"' % '", "'.join(bootclasspath) + if bootclasspath else ''), + main_class=options.main_class, + extra_program_args=repr(extra_program_args), + noverify_flag=noverify_flag)) + + os.chmod(options.output, 0750) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py new file mode 100644 index 00000000000..103e1df7f2d --- /dev/null +++ b/build/android/gyp/create_placeholder_files.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create placeholder files. +""" + +import optparse +import os +import sys + +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option( + '--dest-lib-dir', + help='Destination directory to have placeholder files.') + parser.add_option( + '--stamp', + help='Path to touch on success') + + options, args = parser.parse_args() + + for name in args: + target_path = os.path.join(options.dest_lib_dir, name) + build_utils.Touch(target_path) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py new file mode 100644 index 00000000000..c5605992860 --- /dev/null +++ b/build/android/gyp/create_standalone_apk.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Combines stripped libraries and incomplete APK into single standalone APK. + +""" + +import optparse +import os +import shutil +import sys +import tempfile + +from util import build_utils +from util import md5_check + +def CreateStandaloneApk(options): + def DoZip(): + with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file: + intermediate_path = intermediate_file.name + shutil.copy(options.input_apk_path, intermediate_path) + apk_path_abs = os.path.abspath(intermediate_path) + build_utils.CheckOutput( + ['zip', '-r', '-1', apk_path_abs, 'lib'], + cwd=options.libraries_top_dir) + shutil.copy(intermediate_path, options.output_apk_path) + + input_paths = [options.input_apk_path, options.libraries_top_dir] + record_path = '%s.standalone.stamp' % options.input_apk_path + md5_check.CallAndRecordIfStale( + DoZip, + record_path=record_path, + input_paths=input_paths) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--libraries-top-dir', + help='Top directory that contains libraries ' + '(i.e. library paths are like ' + 'libraries_top_dir/lib/android_app_abi/foo.so).') + parser.add_option('--input-apk-path', help='Path to incomplete APK.') + parser.add_option('--output-apk-path', help='Path for standalone APK.') + parser.add_option('--stamp', help='Path to touch on success.') + options, _ = parser.parse_args() + + required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path'] + build_utils.CheckOptions(options, parser, required=required_options) + + CreateStandaloneApk(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/create_test_runner_script.py b/build/android/gyp/create_test_runner_script.py new file mode 100644 index 00000000000..d4301871520 --- /dev/null +++ b/build/android/gyp/create_test_runner_script.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a script to run an android test using build/android/test_runner.py. +""" + +import argparse +import os +import sys + +from util import build_utils + +SCRIPT_TEMPLATE = """\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_test_runner_script.py + +import os +import subprocess +import sys + +def main(): + script_directory = os.path.dirname(__file__) + + def ResolvePath(path): + \"\"\"Returns an absolute filepath given a path relative to this script. + \"\"\" + return os.path.abspath(os.path.join(script_directory, path)) + + test_runner_path = ResolvePath('{test_runner_path}') + test_runner_args = {test_runner_args} + test_runner_path_args = {test_runner_path_args} + for arg, path in test_runner_path_args: + test_runner_args.extend([arg, ResolvePath(path)]) + + test_runner_cmd = [test_runner_path] + test_runner_args + sys.argv[1:] + return subprocess.call(test_runner_cmd) + +if __name__ == '__main__': + sys.exit(main()) +""" + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + help='Output path for executable script.') + parser.add_argument('--depfile', + help='Path to the depfile. This must be specified as ' + "the action's first output.") + parser.add_argument('--test-runner-path', + help='Path to test_runner.py (optional).') + # We need to intercept any test runner path arguments and make all + # of the paths relative to the output script directory. + group = parser.add_argument_group('Test runner path arguments.') + group.add_argument('--additional-apk', action='append', + dest='additional_apks', default=[]) + group.add_argument('--additional-apk-list') + group.add_argument('--apk-under-test') + group.add_argument('--apk-under-test-incremental-install-script') + group.add_argument('--executable-dist-dir') + group.add_argument('--isolate-file-path') + group.add_argument('--output-directory') + group.add_argument('--test-apk') + group.add_argument('--test-apk-incremental-install-script') + group.add_argument('--coverage-dir') + args, test_runner_args = parser.parse_known_args( + build_utils.ExpandFileArgs(args)) + + def RelativizePathToScript(path): + """Returns the path relative to the output script directory.""" + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + test_runner_path = args.test_runner_path or os.path.join( + os.path.dirname(__file__), os.path.pardir, 'test_runner.py') + test_runner_path = RelativizePathToScript(test_runner_path) + + test_runner_path_args = [] + if args.additional_apk_list: + args.additional_apks.extend( + build_utils.ParseGnList(args.additional_apk_list)) + if args.additional_apks: + test_runner_path_args.extend( + ('--additional-apk', RelativizePathToScript(a)) + for a in args.additional_apks) + if args.apk_under_test: + test_runner_path_args.append( + ('--apk-under-test', RelativizePathToScript(args.apk_under_test))) + if args.apk_under_test_incremental_install_script: + test_runner_path_args.append( + ('--apk-under-test-incremental-install-script', + RelativizePathToScript( + args.apk_under_test_incremental_install_script))) + if args.executable_dist_dir: + test_runner_path_args.append( + ('--executable-dist-dir', + RelativizePathToScript(args.executable_dist_dir))) + if args.isolate_file_path: + test_runner_path_args.append( + ('--isolate-file-path', RelativizePathToScript(args.isolate_file_path))) + if args.output_directory: + test_runner_path_args.append( + ('--output-directory', RelativizePathToScript(args.output_directory))) + if args.test_apk: + test_runner_path_args.append( + ('--test-apk', RelativizePathToScript(args.test_apk))) + if args.test_apk_incremental_install_script: + test_runner_path_args.append( + ('--test-apk-incremental-install-script', + RelativizePathToScript(args.test_apk_incremental_install_script))) + if args.coverage_dir: + test_runner_path_args.append( + ('--coverage-dir', RelativizePathToScript(args.coverage_dir))) + + with open(args.script_output_path, 'w') as script: + script.write(SCRIPT_TEMPLATE.format( + test_runner_path=str(test_runner_path), + test_runner_args=str(test_runner_args), + test_runner_path_args=str(test_runner_path_args))) + + os.chmod(args.script_output_path, 0750) + + if args.depfile: + build_utils.WriteDepfile( + args.depfile, + build_utils.GetPythonDependencies()) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_tool_wrapper.py b/build/android/gyp/create_tool_wrapper.py new file mode 100644 index 00000000000..44330045410 --- /dev/null +++ b/build/android/gyp/create_tool_wrapper.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple wrapper script that passes the correct --output-directory. +""" + +import argparse +import os + +_TEMPLATE = """\ +#!/usr/bin/env python +# +# This file was generated by //build/android/gyp/create_tool_script.py + +import os +import sys + +cmd = '{cmd}' +args = [os.path.basename(cmd), '{flag_name}={output_directory}'] + sys.argv[1:] +os.execv(cmd, args) +""" + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--output', help='Output path for executable script.') + parser.add_argument('--target', help='Path to script being wrapped.') + parser.add_argument('--output-directory', help='Value for --output-directory') + parser.add_argument('--flag-name', + help='Flag name to use instead of --output-directory', + default='--output-directory') + args = parser.parse_args() + + with open(args.output, 'w') as script: + script.write(_TEMPLATE.format( + cmd=os.path.abspath(args.target), + flag_name=args.flag_name, + output_directory=os.path.abspath(args.output_directory))) + + os.chmod(args.output, 0750) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py new file mode 100644 index 00000000000..87e9b06bcd8 --- /dev/null +++ b/build/android/gyp/dex.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import optparse +import os +import sys +import tempfile +import zipfile + +from util import build_utils + + +def _CheckFilePathEndsWithJar(parser, file_path): + if not file_path.endswith(".jar"): + # dx ignores non .jar files. + parser.error("%s does not end in .jar" % file_path) + + +def _CheckFilePathsEndWithJar(parser, file_paths): + for file_path in file_paths: + _CheckFilePathEndsWithJar(parser, file_path) + + +def _RemoveUnwantedFilesFromZip(dex_path): + iz = zipfile.ZipFile(dex_path, 'r') + tmp_dex_path = '%s.tmp.zip' % dex_path + oz = zipfile.ZipFile(tmp_dex_path, 'w', zipfile.ZIP_DEFLATED) + for i in iz.namelist(): + if i.endswith('.dex'): + oz.writestr(i, iz.read(i)) + os.remove(dex_path) + os.rename(tmp_dex_path, dex_path) + + +def _ParseArgs(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--android-sdk-tools', + help='Android sdk build tools directory.') + parser.add_option('--output-directory', + default=os.getcwd(), + help='Path to the output build directory.') + parser.add_option('--dex-path', help='Dex output path.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME.') + parser.add_option('--proguard-enabled', + help='"true" if proguard is enabled.') + parser.add_option('--debug-build-proguard-enabled', + help='"true" if proguard is enabled for debug build.') + parser.add_option('--proguard-enabled-input-path', + help=('Path to dex in Release mode when proguard ' + 'is enabled.')) + parser.add_option('--no-locals', default='0', + help='Exclude locals list from the dex file.') + parser.add_option('--incremental', + action='store_true', + help='Enable incremental builds when possible.') + parser.add_option('--inputs', help='A list of additional input paths.') + parser.add_option('--excluded-paths', + help='A list of paths to exclude from the dex file.') + parser.add_option('--main-dex-list-path', + help='A file containing a list of the classes to ' + 'include in the main dex.') + parser.add_option('--multidex-configuration-path', + help='A JSON file containing multidex build configuration.') + parser.add_option('--multi-dex', default=False, action='store_true', + help='Generate multiple dex files.') + + options, paths = parser.parse_args(args) + + required_options = ('android_sdk_tools',) + build_utils.CheckOptions(options, parser, required=required_options) + + if options.multidex_configuration_path: + with open(options.multidex_configuration_path) as multidex_config_file: + multidex_config = json.loads(multidex_config_file.read()) + options.multi_dex = multidex_config.get('enabled', False) + + if options.multi_dex and not options.main_dex_list_path: + logging.warning('multidex cannot be enabled without --main-dex-list-path') + options.multi_dex = False + elif options.main_dex_list_path and not options.multi_dex: + logging.warning('--main-dex-list-path is unused if multidex is not enabled') + + if options.inputs: + options.inputs = build_utils.ParseGnList(options.inputs) + _CheckFilePathsEndWithJar(parser, options.inputs) + if options.excluded_paths: + options.excluded_paths = build_utils.ParseGnList(options.excluded_paths) + + if options.proguard_enabled_input_path: + _CheckFilePathEndsWithJar(parser, options.proguard_enabled_input_path) + _CheckFilePathsEndWithJar(parser, paths) + + return options, paths + + +def _AllSubpathsAreClassFiles(paths, changes): + for path in paths: + if any(not p.endswith('.class') for p in changes.IterChangedSubpaths(path)): + return False + return True + + +def _DexWasEmpty(paths, changes): + for path in paths: + if any(p.endswith('.class') + for p in changes.old_metadata.IterSubpaths(path)): + return False + return True + + +def _IterAllClassFiles(changes): + for path in changes.IterAllPaths(): + for subpath in changes.IterAllSubpaths(path): + if subpath.endswith('.class'): + yield path + + +def _MightHitDxBug(changes): + # We've seen dx --incremental fail for small libraries. It's unlikely a + # speed-up anyways in this case. + num_classes = sum(1 for x in _IterAllClassFiles(changes)) + if num_classes < 10: + return True + + # We've also been able to consistently produce a failure by adding an empty + # line to the top of the first .java file of a library. + # https://crbug.com/617935 + first_file = next(_IterAllClassFiles(changes)) + for path in changes.IterChangedPaths(): + for subpath in changes.IterChangedSubpaths(path): + if first_file == subpath: + return True + return False + + +def _RunDx(changes, options, dex_cmd, paths): + with build_utils.TempDir() as classes_temp_dir: + # --multi-dex is incompatible with --incremental. + if options.multi_dex: + dex_cmd.append('--main-dex-list=%s' % options.main_dex_list_path) + else: + # --incremental tells dx to merge all newly dex'ed .class files with + # what that already exist in the output dex file (existing classes are + # replaced). + # Use --incremental when .class files are added or modified, but not when + # any are removed (since it won't know to remove them). + if (options.incremental + and not _MightHitDxBug(changes) + and changes.AddedOrModifiedOnly()): + changed_inputs = set(changes.IterChangedPaths()) + changed_paths = [p for p in paths if p in changed_inputs] + if not changed_paths: + return + # When merging in other dex files, there's no easy way to know if + # classes were removed from them. + if (_AllSubpathsAreClassFiles(changed_paths, changes) + and not _DexWasEmpty(changed_paths, changes)): + dex_cmd.append('--incremental') + for path in changed_paths: + changed_subpaths = set(changes.IterChangedSubpaths(path)) + # Note: |changed_subpaths| may be empty if nothing changed. + if changed_subpaths: + build_utils.ExtractAll(path, path=classes_temp_dir, + predicate=lambda p: p in changed_subpaths) + paths = [classes_temp_dir] + + dex_cmd += paths + build_utils.CheckOutput(dex_cmd, print_stderr=False) + + if options.dex_path.endswith('.zip'): + _RemoveUnwantedFilesFromZip(options.dex_path) + + +def _OnStaleMd5(changes, options, dex_cmd, paths): + _RunDx(changes, options, dex_cmd, paths) + build_utils.WriteJson( + [os.path.relpath(p, options.output_directory) for p in paths], + options.dex_path + '.inputs') + + +def main(args): + options, paths = _ParseArgs(args) + if ((options.proguard_enabled == 'true' + and options.configuration_name == 'Release') + or (options.debug_build_proguard_enabled == 'true' + and options.configuration_name == 'Debug')): + paths = [options.proguard_enabled_input_path] + + if options.inputs: + paths += options.inputs + + if options.excluded_paths: + # Excluded paths are relative to the output directory. + exclude_paths = options.excluded_paths + paths = [p for p in paths if not + os.path.relpath(p, options.output_directory) in exclude_paths] + + input_paths = list(paths) + + dx_binary = os.path.join(options.android_sdk_tools, 'dx') + # See http://crbug.com/272064 for context on --force-jumbo. + # See https://github.com/android/platform_dalvik/commit/dd140a22d for + # --num-threads. + dex_cmd = [dx_binary, '--num-threads=8', '--dex', '--force-jumbo', + '--output', options.dex_path] + if options.no_locals != '0': + dex_cmd.append('--no-locals') + + if options.multi_dex: + input_paths.append(options.main_dex_list_path) + dex_cmd += [ + '--multi-dex', + '--minimal-main-dex', + ] + + output_paths = [ + options.dex_path, + options.dex_path + '.inputs', + ] + + # An escape hatch to be able to check if incremental dexing is causing + # problems. + force = int(os.environ.get('DISABLE_INCREMENTAL_DX', 0)) + + build_utils.CallAndWriteDepfileIfStale( + lambda changes: _OnStaleMd5(changes, options, dex_cmd, paths), + options, + input_paths=input_paths, + input_strings=dex_cmd, + output_paths=output_paths, + force=force, + pass_changes=True) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py new file mode 100644 index 00000000000..57be36be493 --- /dev/null +++ b/build/android/gyp/emma_instr.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Instruments classes and jar files. + +This script corresponds to the 'emma_instr' action in the java build process. +Depending on whether emma_instrument is set, the 'emma_instr' action will either +call the instrument command or the copy command. + +Possible commands are: +- instrument_jar: Accepts a jar and instruments it using emma.jar. +- copy: Called when EMMA coverage is not enabled. This allows us to make + this a required step without necessarily instrumenting on every build. + Also removes any stale coverage files. +""" + +import collections +import json +import os +import shutil +import sys +import tempfile + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.utils import command_option_parser + +from util import build_utils + + +def _AddCommonOptions(option_parser): + """Adds common options to |option_parser|.""" + build_utils.AddDepfileOption(option_parser) + option_parser.add_option('--input-path', + help=('Path to input file(s). Either the classes ' + 'directory, or the path to a jar.')) + option_parser.add_option('--output-path', + help=('Path to output final file(s) to. Either the ' + 'final classes directory, or the directory in ' + 'which to place the instrumented/copied jar.')) + option_parser.add_option('--stamp', help='Path to touch when done.') + option_parser.add_option('--coverage-file', + help='File to create with coverage metadata.') + option_parser.add_option('--sources-list-file', + help='File to create with the list of sources.') + + +def _AddInstrumentOptions(option_parser): + """Adds options related to instrumentation to |option_parser|.""" + _AddCommonOptions(option_parser) + option_parser.add_option('--source-dirs', + help='Space separated list of source directories. ' + 'source-files should not be specified if ' + 'source-dirs is specified') + option_parser.add_option('--source-files', + help='Space separated list of source files. ' + 'source-dirs should not be specified if ' + 'source-files is specified') + option_parser.add_option('--java-sources-file', + help='File containing newline-separated .java paths') + option_parser.add_option('--src-root', + help='Root of the src repository.') + option_parser.add_option('--emma-jar', + help='Path to emma.jar.') + option_parser.add_option( + '--filter-string', default='', + help=('Filter string consisting of a list of inclusion/exclusion ' + 'patterns separated with whitespace and/or comma.')) + + +def _RunCopyCommand(_command, options, _, option_parser): + """Copies the jar from input to output locations. + + Also removes any old coverage/sources file. + + Args: + command: String indicating the command that was received to trigger + this function. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_list_file): + option_parser.error('All arguments are required.') + + if os.path.exists(options.coverage_file): + os.remove(options.coverage_file) + if os.path.exists(options.sources_list_file): + os.remove(options.sources_list_file) + + shutil.copy(options.input_path, options.output_path) + + if options.stamp: + build_utils.Touch(options.stamp) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, + build_utils.GetPythonDependencies()) + + +def _GetSourceDirsFromSourceFiles(source_files): + """Returns list of directories for the files in |source_files|. + + Args: + source_files: List of source files. + + Returns: + List of source directories. + """ + return list(set(os.path.dirname(source_file) for source_file in source_files)) + + +def _CreateSourcesListFile(source_dirs, sources_list_file, src_root): + """Adds all normalized source directories to |sources_list_file|. + + Args: + source_dirs: List of source directories. + sources_list_file: File into which to write the JSON list of sources. + src_root: Root which sources added to the file should be relative to. + + Returns: + An exit code. + """ + src_root = os.path.abspath(src_root) + relative_sources = [] + for s in source_dirs: + abs_source = os.path.abspath(s) + if abs_source[:len(src_root)] != src_root: + print ('Error: found source directory not under repository root: %s %s' + % (abs_source, src_root)) + return 1 + rel_source = os.path.relpath(abs_source, src_root) + + relative_sources.append(rel_source) + + with open(sources_list_file, 'w') as f: + json.dump(relative_sources, f) + + +def _RunInstrumentCommand(_command, options, _, option_parser): + """Instruments jar files using EMMA. + + Args: + command: String indicating the command that was received to trigger + this function. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_list_file and + (options.source_files or options.source_dirs or + options.java_sources_file) and + options.src_root and options.emma_jar): + option_parser.error('All arguments are required.') + + if os.path.exists(options.coverage_file): + os.remove(options.coverage_file) + temp_dir = tempfile.mkdtemp() + try: + cmd = ['java', '-cp', options.emma_jar, + 'emma', 'instr', + '-ip', options.input_path, + '-ix', options.filter_string, + '-d', temp_dir, + '-out', options.coverage_file, + '-m', 'fullcopy'] + build_utils.CheckOutput(cmd) + + # File is not generated when filter_string doesn't match any files. + if not os.path.exists(options.coverage_file): + build_utils.Touch(options.coverage_file) + + temp_jar_dir = os.path.join(temp_dir, 'lib') + jars = os.listdir(temp_jar_dir) + if len(jars) != 1: + print('Error: multiple output files in: %s' % (temp_jar_dir)) + return 1 + + # Delete output_path first to avoid modifying input_path in the case where + # input_path is a hardlink to output_path. http://crbug.com/571642 + if os.path.exists(options.output_path): + os.unlink(options.output_path) + shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path) + finally: + shutil.rmtree(temp_dir) + + if options.source_dirs: + source_dirs = build_utils.ParseGnList(options.source_dirs) + else: + source_files = [] + if options.source_files: + source_files += build_utils.ParseGnList(options.source_files) + if options.java_sources_file: + source_files.extend( + build_utils.ReadSourcesList(options.java_sources_file)) + source_dirs = _GetSourceDirsFromSourceFiles(source_files) + + # TODO(GYP): In GN, we are passed the list of sources, detecting source + # directories, then walking them to re-establish the list of sources. + # This can obviously be simplified! + _CreateSourcesListFile(source_dirs, options.sources_list_file, + options.src_root) + + if options.stamp: + build_utils.Touch(options.stamp) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, + build_utils.GetPythonDependencies()) + + return 0 + + +CommandFunctionTuple = collections.namedtuple( + 'CommandFunctionTuple', ['add_options_func', 'run_command_func']) +VALID_COMMANDS = { + 'copy': CommandFunctionTuple(_AddCommonOptions, + _RunCopyCommand), + 'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions, + _RunInstrumentCommand), +} + + +def main(): + option_parser = command_option_parser.CommandOptionParser( + commands_dict=VALID_COMMANDS) + command_option_parser.ParseAndExecute(option_parser) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py new file mode 100644 index 00000000000..532d001f723 --- /dev/null +++ b/build/android/gyp/finalize_apk.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and zipaligns APK. + +""" + +import optparse +import os +import shutil +import sys +import tempfile +import zipfile + +# resource_sizes modifies zipfile for zip64 compatibility. See +# https://bugs.python.org/issue14315. +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +import resource_sizes # pylint: disable=unused-import + +from util import build_utils + +def RenameInflateAndAddPageAlignment( + rezip_apk_jar_path, in_zip_file, out_zip_file): + rezip_apk_cmd = [ + 'java', + '-classpath', + rezip_apk_jar_path, + 'RezipApk', + 'renamealign', + in_zip_file, + out_zip_file, + ] + build_utils.CheckOutput(rezip_apk_cmd) + + +def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file): + rezip_apk_cmd = [ + 'java', + '-classpath', + rezip_apk_jar_path, + 'RezipApk', + 'reorder', + in_zip_file, + out_zip_file, + ] + build_utils.CheckOutput(rezip_apk_cmd) + + +def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path): + shutil.copy(unsigned_path, signed_path) + sign_cmd = [ + 'jarsigner', + '-sigalg', 'MD5withRSA', + '-digestalg', 'SHA1', + '-keystore', key_path, + '-storepass', key_passwd, + signed_path, + key_name, + ] + build_utils.CheckOutput(sign_cmd) + + +def AlignApk(zipalign_path, package_align, unaligned_path, final_path): + align_cmd = [ + zipalign_path, + '-f' + ] + + if package_align: + align_cmd += ['-p'] + + align_cmd += [ + '4', # 4 bytes + unaligned_path, + final_path, + ] + build_utils.CheckOutput(align_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--rezip-apk-jar-path', + help='Path to the RezipApk jar file.') + parser.add_option('--zipalign-path', help='Path to the zipalign tool.') + parser.add_option('--page-align-shared-libraries', + action='store_true', + help='Page align shared libraries.') + parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.') + parser.add_option('--final-apk-path', + help='Path to output signed and aligned APK.') + parser.add_option('--key-path', help='Path to keystore for signing.') + parser.add_option('--key-passwd', help='Keystore password') + parser.add_option('--key-name', help='Keystore name') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--load-library-from-zip', type='int', + help='If non-zero, build the APK such that the library can be loaded ' + + 'directly from the zip file using the crazy linker. The library ' + + 'will be renamed, uncompressed and page aligned.') + + options, _ = parser.parse_args() + + input_paths = [ + options.unsigned_apk_path, + options.key_path, + ] + + if options.load_library_from_zip: + input_paths.append(options.rezip_apk_jar_path) + + input_strings = [ + options.load_library_from_zip, + options.key_name, + options.key_passwd, + options.page_align_shared_libraries, + ] + + build_utils.CallAndWriteDepfileIfStale( + lambda: FinalizeApk(options), + options, + record_path=options.unsigned_apk_path + '.finalize.md5.stamp', + input_paths=input_paths, + input_strings=input_strings, + output_paths=[options.final_apk_path]) + + +def FinalizeApk(options): + with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \ + tempfile.NamedTemporaryFile() as apk_to_sign_tmp: + + if options.load_library_from_zip: + # We alter the name of the library so that the Android Package Manager + # does not extract it into a separate file. This must be done before + # signing, as the filename is part of the signed manifest. At the same + # time we uncompress the library, which is necessary so that it can be + # loaded directly from the APK. + # Move the library to a page boundary by adding a page alignment file. + apk_to_sign = apk_to_sign_tmp.name + RenameInflateAndAddPageAlignment( + options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign) + else: + apk_to_sign = options.unsigned_apk_path + + signed_apk_path = signed_apk_path_tmp.name + JarSigner(options.key_path, options.key_name, options.key_passwd, + apk_to_sign, signed_apk_path) + + # Make the signing files hermetic. + with tempfile.NamedTemporaryFile(suffix='.zip') as hermetic_signed_apk: + with zipfile.ZipFile(signed_apk_path, 'r') as zi: + with zipfile.ZipFile(hermetic_signed_apk, 'w') as zo: + for info in zi.infolist(): + # Ignore 'extended local file headers'. Python doesn't write them + # properly (see https://bugs.python.org/issue1742205) which causes + # zipalign to miscalculate alignment. Since we don't use them except + # for alignment anyway, we write a stripped file here and let + # zipalign add them properly later. eLFHs are controlled by 'general + # purpose bit flag 03' (0x08) so we mask that out. + info.flag_bits = info.flag_bits & 0xF7 + + info.date_time = build_utils.HERMETIC_TIMESTAMP + zo.writestr(info, zi.read(info.filename)) + + shutil.copy(hermetic_signed_apk.name, signed_apk_path) + + if options.load_library_from_zip: + # Reorder the contents of the APK. This re-establishes the canonical + # order which means the library will be back at its page aligned location. + # This step also aligns uncompressed items to 4 bytes. + ReorderAndAlignApk( + options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path) + else: + # Align uncompressed items to 4 bytes + AlignApk(options.zipalign_path, + options.page_align_shared_libraries, + signed_apk_path, + options.final_apk_path) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/finalize_splits.py b/build/android/gyp/finalize_splits.py new file mode 100644 index 00000000000..cb80d037457 --- /dev/null +++ b/build/android/gyp/finalize_splits.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and zipaligns split APKs. + +This script is require only by GYP (not GN). +""" + +import optparse +import sys + +import finalize_apk +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option('--zipalign-path', help='Path to the zipalign tool.') + parser.add_option('--resource-packaged-apk-path', + help='Base path to input .ap_s.') + parser.add_option('--base-output-path', + help='Path to output .apk, minus extension.') + parser.add_option('--key-path', help='Path to keystore for signing.') + parser.add_option('--key-passwd', help='Keystore password') + parser.add_option('--key-name', help='Keystore name') + parser.add_option('--densities', + help='Comma separated list of densities finalize.') + parser.add_option('--languages', + help='GYP list of language splits to finalize.') + + options, _ = parser.parse_args() + options.load_library_from_zip = 0 + + if options.densities: + for density in options.densities.split(','): + options.unsigned_apk_path = ("%s_%s" % + (options.resource_packaged_apk_path, density)) + options.final_apk_path = ("%s-density-%s.apk" % + (options.base_output_path, density)) + finalize_apk.FinalizeApk(options) + + if options.languages: + for lang in build_utils.ParseGnList(options.languages): + options.unsigned_apk_path = ("%s_%s" % + (options.resource_packaged_apk_path, lang)) + options.final_apk_path = ("%s-lang-%s.apk" % + (options.base_output_path, lang)) + finalize_apk.FinalizeApk(options) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py new file mode 100644 index 00000000000..a9f1d498556 --- /dev/null +++ b/build/android/gyp/find.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Finds files in directories. +""" + +import fnmatch +import optparse +import os +import sys + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--pattern', default='*', help='File pattern to match.') + options, directories = parser.parse_args(argv) + + for d in directories: + if not os.path.exists(d): + print >> sys.stderr, '%s does not exist' % d + return 1 + for root, _, filenames in os.walk(d): + for f in fnmatch.filter(filenames, options.pattern): + print os.path.join(root, f) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/find_sun_tools_jar.py b/build/android/gyp/find_sun_tools_jar.py new file mode 100644 index 00000000000..2f15a154abd --- /dev/null +++ b/build/android/gyp/find_sun_tools_jar.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This finds the java distribution's tools.jar and copies it somewhere. +""" + +import argparse +import os +import re +import shutil +import sys + +from util import build_utils + +RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]') + +def main(): + parser = argparse.ArgumentParser(description='Find Sun Tools Jar') + parser.add_argument('--depfile', + help='Path to depfile. This must be specified as the ' + 'action\'s first output.') + parser.add_argument('--output', required=True) + args = parser.parse_args() + + sun_tools_jar_path = FindSunToolsJarPath() + + if sun_tools_jar_path is None: + raise Exception("Couldn\'t find tools.jar") + + # Using copyfile instead of copy() because copy() calls copymode() + # We don't want the locked mode because we may copy over this file again + shutil.copyfile(sun_tools_jar_path, args.output) + + if args.depfile: + build_utils.WriteDepfile( + args.depfile, + [sun_tools_jar_path] + build_utils.GetPythonDependencies()) + + +def FindSunToolsJarPath(): + # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7 + stdout = build_utils.CheckOutput( + ["java", "-verbose", "-version"], print_stderr=False) + for ln in stdout.splitlines(): + match = RT_JAR_FINDER.match(ln) + if match: + return os.path.join(match.group(1), 'lib', 'tools.jar') + + return None + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py new file mode 100644 index 00000000000..03becf918fe --- /dev/null +++ b/build/android/gyp/gcc_preprocess.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils + +def DoGcc(options): + build_utils.MakeDirectory(os.path.dirname(options.output)) + + gcc_cmd = [ 'gcc' ] # invoke host gcc. + if options.defines: + gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), [])) + gcc_cmd.extend([ + '-E', # stop after preprocessing. + '-D', 'ANDROID', # Specify ANDROID define for pre-processor. + '-x', 'c-header', # treat sources as C header files + '-P', # disable line markers, i.e. '#line 309' + '-I', options.include_path, + '-o', options.output, + options.template + ]) + + build_utils.CheckOutput(gcc_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--include-path', help='Include path for gcc.') + parser.add_option('--template', help='Path to template.') + parser.add_option('--output', help='Path for generated file.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--defines', help='Pre-defines macros', action='append') + + options, _ = parser.parse_args(args) + + DoGcc(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/generate_copy_ex_outputs.py b/build/android/gyp/generate_copy_ex_outputs.py new file mode 100644 index 00000000000..e425b4a6afe --- /dev/null +++ b/build/android/gyp/generate_copy_ex_outputs.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Generate outputs according source files and destination path for +# copy_ex.gypi + +import argparse +import os +import sys + +def DoMain(argv): + parser = argparse.ArgumentParser(prog='generate_copy_ex_outputs') + parser.add_argument('--src-files', + nargs = '+', + help = 'a list of files to copy') + parser.add_argument('--dest-path', + required = True, + help = 'the directory to copy file to') + options = parser.parse_args(argv) + # Quote each element so filename spaces don't mess up gyp's attempt to parse + # it into a list. + return ' '.join('"%s"' % os.path.join(options.dest_path, + os.path.basename(src)) + for src in options.src_files) + +if __name__ == '__main__': + results = DoMain(sys.argv[1:]) + if results: + print results + diff --git a/build/android/gyp/generate_resource_rewriter.py b/build/android/gyp/generate_resource_rewriter.py new file mode 100644 index 00000000000..82ddc21dc15 --- /dev/null +++ b/build/android/gyp/generate_resource_rewriter.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate ResourceRewriter.java which overwrites the given package's + resource id. +""" + +import argparse +import os +import sys +import zipfile + +from util import build_utils + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', + '..', + '..', + 'third_party'))) +import jinja2 + + +RESOURCE_REWRITER_JAVA="ResourceRewriter.java" + +RESOURCE_REWRITER="""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; +/** + * Helper class used to fix up resource ids. + */ +class ResourceRewriter { + /** + * Rewrite the R 'constants' for the WebView. + */ + public static void rewriteRValues(final int packageId) { + {% for res_package in res_packages %} + {{ res_package }}.R.onResourcesLoaded(packageId); + {% endfor %} + } +} +""" + +def ParseArgs(args): + """Parses command line options. + + Returns: + An Namespace from argparse.parse_args() + """ + parser = argparse.ArgumentParser(prog='generate_resource_rewriter') + + parser.add_argument('--package-name', + required=True, + help='The package name of ResourceRewriter.') + parser.add_argument('--dep-packages', + required=True, + help='A list of packages whose resource id will be' + 'overwritten in ResourceRewriter.') + parser.add_argument('--output-dir', + help='A output directory of generated' + ' ResourceRewriter.java') + parser.add_argument('--srcjar', + help='The path of generated srcjar which has' + ' ResourceRewriter.java') + + return parser.parse_args(args) + + +def CreateResourceRewriter(package, res_packages, output_dir): + build_utils.MakeDirectory(output_dir) + java_path = os.path.join(output_dir, RESOURCE_REWRITER_JAVA) + template = jinja2.Template(RESOURCE_REWRITER, + trim_blocks=True, + lstrip_blocks=True) + output = template.render(package=package, res_packages=res_packages) + with open(java_path, 'w') as f: + f.write(output) + +def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path): + with build_utils.TempDir() as temp_dir: + output_dir = os.path.join(temp_dir, *package.split('.')) + CreateResourceRewriter(package, res_packages, output_dir) + build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)], + srcjar_path, + temp_dir) + + +def main(): + options = ParseArgs(build_utils.ExpandFileArgs(sys.argv[1:])) + package = options.package_name + if options.output_dir: + output_dir = os.path.join(options.output_dir, *package.split('.')) + CreateResourceRewriter( + package, + build_utils.ParseGnList(options.dep_packages), + output_dir) + else: + CreateResourceRewriterSrcjar( + package, + build_utils.ParseGnList(options.dep_packages), + options.srcjar) + + return 0 + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/generate_split_manifest.py b/build/android/gyp/generate_split_manifest.py new file mode 100644 index 00000000000..9cb3bca4b55 --- /dev/null +++ b/build/android/gyp/generate_split_manifest.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates an AndroidManifest.xml for an APK split. + +Given the manifest file for the main APK, generates an AndroidManifest.xml with +the value required for a Split APK (package, versionCode, etc). +""" + +import optparse +import xml.etree.ElementTree + +from util import build_utils + +MANIFEST_TEMPLATE = """ + + + + + +""" + +def ParseArgs(): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--main-manifest', help='The main manifest of the app') + parser.add_option('--out-manifest', help='The output manifest') + parser.add_option('--split', help='The name of the split') + parser.add_option( + '--has-code', + action='store_true', + default=False, + help='Whether the split will contain a .dex file') + + (options, args) = parser.parse_args() + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('main_manifest', 'out_manifest', 'split') + build_utils.CheckOptions(options, parser, required=required_options) + + return options + + +def Build(main_manifest, split, has_code): + """Builds a split manifest based on the manifest of the main APK. + + Args: + main_manifest: the XML manifest of the main APK as a string + split: the name of the split as a string + has_code: whether this split APK will contain .dex files + + Returns: + The XML split manifest as a string + """ + + doc = xml.etree.ElementTree.fromstring(main_manifest) + package = doc.get('package') + + return MANIFEST_TEMPLATE % { + 'package': package, + 'split': split.replace('-', '_'), + 'has_code': str(has_code).lower() + } + + +def main(): + options = ParseArgs() + main_manifest = file(options.main_manifest).read() + split_manifest = Build( + main_manifest, + options.split, + options.has_code) + + with file(options.out_manifest, 'w') as f: + f.write(split_manifest) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + [options.main_manifest] + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py new file mode 100644 index 00000000000..fc7abbaf0c6 --- /dev/null +++ b/build/android/gyp/generate_v14_compatible_resources.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Convert Android xml resources to API 14 compatible. + +There are two reasons that we cannot just use API 17 attributes, +so we are generating another set of resources by this script. + +1. paddingStart attribute can cause a crash on Galaxy Tab 2. +2. There is a bug that paddingStart does not override paddingLeft on + JB-MR1. This is fixed on JB-MR2. b/8654490 + +Therefore, this resource generation script can be removed when +we drop the support for JB-MR1. + +Please refer to http://crbug.com/235118 for the details. +""" + +import codecs +import optparse +import os +import re +import shutil +import sys +import xml.dom.minidom as minidom + +from util import build_utils + +# Note that we are assuming 'android:' is an alias of +# the namespace 'http://schemas.android.com/apk/res/android'. + +GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity') + +# Almost all the attributes that has "Start" or "End" in +# its name should be mapped. +ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft', + 'drawableStart' : 'drawableLeft', + 'layout_alignStart' : 'layout_alignLeft', + 'layout_marginStart' : 'layout_marginLeft', + 'layout_alignParentStart' : 'layout_alignParentLeft', + 'layout_toStartOf' : 'layout_toLeftOf', + 'paddingEnd' : 'paddingRight', + 'drawableEnd' : 'drawableRight', + 'layout_alignEnd' : 'layout_alignRight', + 'layout_marginEnd' : 'layout_marginRight', + 'layout_alignParentEnd' : 'layout_alignParentRight', + 'layout_toEndOf' : 'layout_toRightOf'} + +ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + +ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + + +def IterateXmlElements(node): + """minidom helper function that iterates all the element nodes. + Iteration order is pre-order depth-first.""" + if node.nodeType == node.ELEMENT_NODE: + yield node + for child_node in node.childNodes: + for child_node_element in IterateXmlElements(child_node): + yield child_node_element + + +def ParseAndReportErrors(filename): + try: + return minidom.parse(filename) + except Exception: # pylint: disable=broad-except + import traceback + traceback.print_exc() + sys.stderr.write('Failed to parse XML file: %s\n' % filename) + sys.exit(1) + + +def AssertNotDeprecatedAttribute(name, value, filename): + """Raises an exception if the given attribute is deprecated.""" + msg = None + if name in ATTRIBUTES_TO_MAP_REVERSED: + msg = '{0} should use {1} instead of {2}'.format(filename, + ATTRIBUTES_TO_MAP_REVERSED[name], name) + elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value): + msg = '{0} should use start/end instead of left/right for {1}'.format( + filename, name) + + if msg: + msg += ('\nFor background, see: http://android-developers.blogspot.com/' + '2013/03/native-rtl-support-in-android-42.html\n' + 'If you have a legitimate need for this attribute, discuss with ' + 'kkimlabs@chromium.org or newt@chromium.org') + raise Exception(msg) + + +def WriteDomToFile(dom, filename): + """Write the given dom to filename.""" + build_utils.MakeDirectory(os.path.dirname(filename)) + with codecs.open(filename, 'w', 'utf-8') as f: + dom.writexml(f, '', ' ', '\n', encoding='utf-8') + + +def HasStyleResource(dom): + """Return True if the dom is a style resource, False otherwise.""" + root_node = IterateXmlElements(dom).next() + return bool(root_node.nodeName == 'resources' and + list(root_node.getElementsByTagName('style'))) + + +def ErrorIfStyleResourceExistsInDir(input_dir): + """If a style resource is in input_dir, raises an exception.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + dom = ParseAndReportErrors(input_filename) + if HasStyleResource(dom): + # Allow style file in third_party to exist in non-v17 directories so long + # as they do not contain deprecated attributes. + if not 'third_party' in input_dir or ( + GenerateV14StyleResourceDom(dom, input_filename)): + raise Exception('error: style file ' + input_filename + + ' should be under ' + input_dir + + '-v17 directory. Please refer to ' + 'http://crbug.com/243952 for the details.') + + +def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True): + """Convert layout resource to API 14 compatible layout resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + # Iterate all the elements' attributes to find attributes to convert. + for element in IterateXmlElements(dom): + for name, value in list(element.attributes.items()): + # Convert any API 17 Start/End attributes to Left/Right attributes. + # For example, from paddingStart="10dp" to paddingLeft="10dp" + # Note: gravity attributes are not necessary to convert because + # start/end values are backward-compatible. Explained at + # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom + if name in ATTRIBUTES_TO_MAP: + element.setAttribute(ATTRIBUTES_TO_MAP[name], value) + del element.attributes[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True): + """Convert style resource to API 14 compatible style resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + for style_element in dom.getElementsByTagName('style'): + for item_element in style_element.getElementsByTagName('item'): + name = item_element.attributes['name'].value + value = item_element.childNodes[0].nodeValue + if name in ATTRIBUTES_TO_MAP: + item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename): + """Convert API 17 layout resource to API 14 compatible layout resource. + + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + If the generated resource is identical to the original resource, + don't do anything. If not, write the generated resource to + output_v14_filename, and copy the original resource to output_v17_filename. + """ + dom = ParseAndReportErrors(input_filename) + is_modified = GenerateV14LayoutResourceDom(dom, input_filename) + + if is_modified: + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + # Copy the original resource. + build_utils.MakeDirectory(os.path.dirname(output_v17_filename)) + shutil.copy2(input_filename, output_v17_filename) + + +def GenerateV14StyleResource(input_filename, output_v14_filename): + """Convert API 17 style resources to API 14 compatible style resource. + + Write the generated style resource to output_v14_filename. + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + """ + dom = ParseAndReportErrors(input_filename) + GenerateV14StyleResourceDom(dom, input_filename) + + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + +def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir): + """Convert layout resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + output_v17_filename = os.path.join(output_v17_dir, rel_filename) + GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename) + + +def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir): + """Convert style resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + GenerateV14StyleResource(input_filename, output_v14_filename) + + +def ParseArgs(): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + parser.add_option('--res-dir', + help='directory containing resources ' + 'used to generate v14 compatible resources') + parser.add_option('--res-v14-compatibility-dir', + help='output directory into which ' + 'v14 compatible resources will be generated') + parser.add_option('--stamp', help='File to touch on success') + + options, args = parser.parse_args() + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('res_dir', 'res_v14_compatibility_dir') + build_utils.CheckOptions(options, parser, required=required_options) + return options + +def GenerateV14Resources(res_dir, res_v14_dir): + for name in os.listdir(res_dir): + if not os.path.isdir(os.path.join(res_dir, name)): + continue + + dir_pieces = name.split('-') + resource_type = dir_pieces[0] + qualifiers = dir_pieces[1:] + + api_level_qualifier_index = -1 + api_level_qualifier = '' + for index, qualifier in enumerate(qualifiers): + if re.match('v[0-9]+$', qualifier): + api_level_qualifier_index = index + api_level_qualifier = qualifier + break + + # Android pre-v17 API doesn't support RTL. Skip. + if 'ldrtl' in qualifiers: + continue + + input_dir = os.path.abspath(os.path.join(res_dir, name)) + + # We also need to copy the original v17 resource to *-v17 directory + # because the generated v14 resource will hide the original resource. + output_v14_dir = os.path.join(res_v14_dir, name) + output_v17_dir = os.path.join(res_v14_dir, name + '-v17') + + # We only convert layout resources under layout*/, xml*/, + # and style resources under values*/. + if resource_type in ('layout', 'xml'): + if not api_level_qualifier: + GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, + output_v17_dir) + elif resource_type == 'values': + if api_level_qualifier == 'v17': + output_qualifiers = qualifiers[:] + del output_qualifiers[api_level_qualifier_index] + output_v14_dir = os.path.join(res_v14_dir, + '-'.join([resource_type] + + output_qualifiers)) + GenerateV14StyleResourcesInDir(input_dir, output_v14_dir) + elif not api_level_qualifier: + ErrorIfStyleResourceExistsInDir(input_dir) + +def main(): + options = ParseArgs() + + res_v14_dir = options.res_v14_compatibility_dir + + build_utils.DeleteDirectory(res_v14_dir) + build_utils.MakeDirectory(res_v14_dir) + + GenerateV14Resources(options.res_dir, res_v14_dir) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py new file mode 100644 index 00000000000..0ec08ef95d6 --- /dev/null +++ b/build/android/gyp/get_device_configuration.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Gets and writes the configurations of the attached devices. + +This configuration is used by later build steps to determine which devices to +install to and what needs to be installed to those devices. +""" + +import optparse +import os +import sys + +from util import build_device +from util import build_utils + +BUILD_ANDROID_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..')) +sys.path.append(BUILD_ANDROID_DIR) + +import devil_chromium + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--stamp', action='store') + parser.add_option('--output', action='store') + parser.add_option('--output-directory', action='store') + options, _ = parser.parse_args(argv) + + devil_chromium.Initialize( + output_directory=os.path.abspath(options.output_directory)) + + devices = build_device.GetAttachedDevices() + + device_configurations = [] + for d in devices: + configuration, is_online, has_root = ( + build_device.GetConfigurationForDevice(d)) + + if not is_online: + build_utils.PrintBigWarning( + '%s is not online. Skipping managed install for this device. ' + 'Try rebooting the device to fix this warning.' % d) + continue + + if not has_root: + build_utils.PrintBigWarning( + '"adb root" failed on device: %s\n' + 'Skipping managed install for this device.' + % configuration['description']) + continue + + device_configurations.append(configuration) + + if len(device_configurations) == 0: + build_utils.PrintBigWarning( + 'No valid devices attached. Skipping managed install steps.') + elif len(devices) > 1: + # Note that this checks len(devices) and not len(device_configurations). + # This way, any time there are multiple devices attached it is + # explicitly stated which device we will install things to even if all but + # one device were rejected for other reasons (e.g. two devices attached, + # one w/o root). + build_utils.PrintBigWarning( + 'Multiple devices attached. ' + 'Installing to the preferred device: ' + '%(id)s (%(description)s)' % (device_configurations[0])) + + + build_device.WriteConfigurations(device_configurations, options.output) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py new file mode 100644 index 00000000000..990b05b6c80 --- /dev/null +++ b/build/android/gyp/jar.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import shutil +import sys + +from util import build_utils + + +_RESOURCE_CLASSES = [ + "R.class", + "R##*.class", + "Manifest.class", + "Manifest##*.class", +] + + +def Jar(class_files, classes_dir, jar_path, manifest_file=None, + provider_configurations=None, additional_files=None): + jar_path = os.path.abspath(jar_path) + + # The paths of the files in the jar will be the same as they are passed in to + # the command. Because of this, the command should be run in + # options.classes_dir so the .class file paths in the jar are correct. + jar_cwd = classes_dir + class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] + jar_cmd = ['jar', 'cf0', jar_path] + if manifest_file: + jar_cmd[1] += 'm' + jar_cmd.append(os.path.abspath(manifest_file)) + jar_cmd.extend(class_files_rel) + + for filepath, jar_filepath in additional_files or []: + full_jar_filepath = os.path.join(jar_cwd, jar_filepath) + jar_dir = os.path.dirname(full_jar_filepath) + if not os.path.exists(jar_dir): + os.makedirs(jar_dir) + shutil.copy(filepath, full_jar_filepath) + jar_cmd.append(jar_filepath) + + if provider_configurations: + service_dir = os.path.join(jar_cwd, 'META-INF', 'services') + if not os.path.exists(service_dir): + os.makedirs(service_dir) + for config in provider_configurations: + config_jar_path = os.path.join(service_dir, os.path.basename(config)) + shutil.copy(config, config_jar_path) + jar_cmd.append(os.path.relpath(config_jar_path, jar_cwd)) + + if not class_files_rel: + empty_file = os.path.join(classes_dir, '.empty') + build_utils.Touch(empty_file) + jar_cmd.append(os.path.relpath(empty_file, jar_cwd)) + build_utils.CheckOutput(jar_cmd, cwd=jar_cwd) + build_utils.Touch(jar_path, fail_if_missing=True) + + +def JarDirectory(classes_dir, jar_path, manifest_file=None, predicate=None, + provider_configurations=None, additional_files=None): + class_files = build_utils.FindInDirectory(classes_dir, '*.class') + if predicate: + class_files = [f for f in class_files if predicate(f)] + + Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file, + provider_configurations=provider_configurations, + additional_files=additional_files) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--classes-dir', help='Directory containing .class files.') + parser.add_option('--input-jar', help='Jar to include .class files from') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option('--excluded-classes', + help='GN list of .class file patterns to exclude from the jar.') + parser.add_option('--strip-resource-classes-for', + help='GN list of java package names exclude R.class files in.') + parser.add_option('--stamp', help='Path to touch on success.') + + args = build_utils.ExpandFileArgs(sys.argv[1:]) + options, _ = parser.parse_args(args) + # Current implementation supports just one or the other of these: + assert not options.classes_dir or not options.input_jar + + excluded_classes = [] + if options.excluded_classes: + excluded_classes = build_utils.ParseGnList(options.excluded_classes) + + if options.strip_resource_classes_for: + packages = build_utils.ParseGnList(options.strip_resource_classes_for) + excluded_classes.extend(p.replace('.', '/') + '/' + f + for p in packages for f in _RESOURCE_CLASSES) + + predicate = None + if excluded_classes: + predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) + + with build_utils.TempDir() as temp_dir: + classes_dir = options.classes_dir + if options.input_jar: + build_utils.ExtractAll(options.input_jar, temp_dir) + classes_dir = temp_dir + JarDirectory(classes_dir, options.jar_path, predicate=predicate) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py new file mode 100644 index 00000000000..b8309561fbc --- /dev/null +++ b/build/android/gyp/jar_toc.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a TOC file from a Java jar. + +The TOC file contains the non-package API of the jar. This includes all +public/protected/package classes/functions/members and the values of static +final variables (members with package access are kept because in some cases we +have multiple libraries with the same package, particularly test+non-test). Some +other information (major/minor javac version) is also included. + +This TOC file then can be used to determine if a dependent library should be +rebuilt when this jar changes. I.e. any change to the jar that would require a +rebuild, will have a corresponding change in the TOC file. +""" + +import optparse +import os +import re +import sys +import zipfile + +from util import build_utils +from util import md5_check + + +def GetClassesInZipFile(zip_file): + classes = [] + files = zip_file.namelist() + for f in files: + if f.endswith('.class'): + # f is of the form org/chromium/base/Class$Inner.class + classes.append(f.replace('/', '.')[:-6]) + return classes + + +def CallJavap(classpath, classes): + javap_cmd = [ + 'javap', + '-package', # Show public/protected/package. + # -verbose is required to get constant values (which can be inlined in + # dependents). + '-verbose', + '-J-XX:NewSize=4m', + '-classpath', classpath + ] + classes + return build_utils.CheckOutput(javap_cmd) + + +def ExtractToc(disassembled_classes): + # javap output is structured by indent (2-space) levels. + good_patterns = [ + '^[^ ]', # This includes all class signatures. + '^ SourceFile:', + '^ minor version:', + '^ major version:', + '^ Constant value:', + '^ public ', + '^ protected ', + ] + bad_patterns = [ + '^const #', # Matches the constant pool (i.e. literals used in the class). + ] + + def JavapFilter(line): + return (re.match('|'.join(good_patterns), line) and + not re.match('|'.join(bad_patterns), line)) + toc = filter(JavapFilter, disassembled_classes.split('\n')) + + return '\n'.join(toc) + + +def UpdateToc(jar_path, toc_path): + classes = GetClassesInZipFile(zipfile.ZipFile(jar_path)) + toc = '' + if len(classes) != 0: + javap_output = CallJavap(classpath=jar_path, classes=classes) + toc = ExtractToc(javap_output) + + with open(toc_path, 'w') as tocfile: + tocfile.write(toc) + + +def DoJarToc(options): + jar_path = options.jar_path + toc_path = options.toc_path + record_path = '%s.md5.stamp' % toc_path + md5_check.CallAndRecordIfStale( + lambda: UpdateToc(jar_path, toc_path), + record_path=record_path, + input_paths=[jar_path], + force=not os.path.exists(toc_path), + ) + build_utils.Touch(toc_path, fail_if_missing=True) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--jar-path', help='Input .jar path.') + parser.add_option('--toc-path', help='Output .jar.TOC path.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args() + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + DoJarToc(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py new file mode 100644 index 00000000000..ffab05c9155 --- /dev/null +++ b/build/android/gyp/java_cpp_enum.py @@ -0,0 +1,418 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +from datetime import date +import re +import optparse +import os +from string import Template +import sys +import textwrap +import zipfile + +from util import build_utils + +# List of C++ types that are compatible with the Java code generated by this +# script. +# +# This script can parse .idl files however, at present it ignores special +# rules such as [cpp_enum_prefix_override="ax_attr"]. +ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char', + 'short', 'unsigned short', + 'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t'] + +class EnumDefinition(object): + def __init__(self, original_enum_name=None, class_name_override=None, + enum_package=None, entries=None, comments=None, fixed_type=None): + self.original_enum_name = original_enum_name + self.class_name_override = class_name_override + self.enum_package = enum_package + self.entries = collections.OrderedDict(entries or []) + self.comments = collections.OrderedDict(comments or []) + self.prefix_to_strip = None + self.fixed_type = fixed_type + + def AppendEntry(self, key, value): + if key in self.entries: + raise Exception('Multiple definitions of key %s found.' % key) + self.entries[key] = value + + def AppendEntryComment(self, key, value): + if key in self.comments: + raise Exception('Multiple definitions of key %s found.' % key) + self.comments[key] = value + + @property + def class_name(self): + return self.class_name_override or self.original_enum_name + + def Finalize(self): + self._Validate() + self._AssignEntryIndices() + self._StripPrefix() + + def _Validate(self): + assert self.class_name + assert self.enum_package + assert self.entries + if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST: + raise Exception('Fixed type %s for enum %s not whitelisted.' % + (self.fixed_type, self.class_name)) + + def _AssignEntryIndices(self): + # Enums, if given no value, are given the value of the previous enum + 1. + if not all(self.entries.values()): + prev_enum_value = -1 + for key, value in self.entries.iteritems(): + if not value: + self.entries[key] = prev_enum_value + 1 + elif value in self.entries: + self.entries[key] = self.entries[value] + else: + try: + self.entries[key] = int(value) + except ValueError: + raise Exception('Could not interpret integer from enum value "%s" ' + 'for key %s.' % (value, key)) + prev_enum_value = self.entries[key] + + + def _StripPrefix(self): + prefix_to_strip = self.prefix_to_strip + if not prefix_to_strip: + prefix_to_strip = self.original_enum_name + prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper() + prefix_to_strip += '_' + if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]): + prefix_to_strip = '' + + entries = collections.OrderedDict() + for (k, v) in self.entries.iteritems(): + stripped_key = k.replace(prefix_to_strip, '', 1) + if isinstance(v, basestring): + stripped_value = v.replace(prefix_to_strip, '', 1) + else: + stripped_value = v + entries[stripped_key] = stripped_value + + self.entries = entries + +class DirectiveSet(object): + class_name_override_key = 'CLASS_NAME_OVERRIDE' + enum_package_key = 'ENUM_PACKAGE' + prefix_to_strip_key = 'PREFIX_TO_STRIP' + + known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key] + + def __init__(self): + self._directives = {} + + def Update(self, key, value): + if key not in DirectiveSet.known_keys: + raise Exception("Unknown directive: " + key) + self._directives[key] = value + + @property + def empty(self): + return len(self._directives) == 0 + + def UpdateDefinition(self, definition): + definition.class_name_override = self._directives.get( + DirectiveSet.class_name_override_key, '') + definition.enum_package = self._directives.get( + DirectiveSet.enum_package_key) + definition.prefix_to_strip = self._directives.get( + DirectiveSet.prefix_to_strip_key) + + +class HeaderParser(object): + single_line_comment_re = re.compile(r'\s*//\s*([^\n]+)') + multi_line_comment_start_re = re.compile(r'\s*/\*') + enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?') + enum_end_re = re.compile(r'^\s*}\s*;\.*$') + generator_directive_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$') + multi_line_generator_directive_start_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$') + multi_line_directive_continuation_re = re.compile( + r'^\s*//\s+([\.\w]+)$') + multi_line_directive_end_re = re.compile( + r'^\s*//\s+([\.\w]*)\)$') + + optional_class_or_struct_re = r'(class|struct)?' + enum_name_re = r'(\w+)' + optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?' + enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' + + optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' + + optional_fixed_type_re + '\s*{\s*$') + + def __init__(self, lines, path=None): + self._lines = lines + self._path = path + self._enum_definitions = [] + self._in_enum = False + self._current_definition = None + self._current_comments = [] + self._generator_directives = DirectiveSet() + self._multi_line_generator_directive = None + + def _ApplyGeneratorDirectives(self): + self._generator_directives.UpdateDefinition(self._current_definition) + self._generator_directives = DirectiveSet() + + def ParseDefinitions(self): + for line in self._lines: + self._ParseLine(line) + return self._enum_definitions + + def _ParseLine(self, line): + if self._multi_line_generator_directive: + self._ParseMultiLineDirectiveLine(line) + elif not self._in_enum: + self._ParseRegularLine(line) + else: + self._ParseEnumLine(line) + + def _ParseEnumLine(self, line): + enum_comment = HeaderParser.single_line_comment_re.match(line) + if enum_comment: + self._current_comments.append(enum_comment.groups()[0]) + return + if HeaderParser.multi_line_comment_start_re.match(line): + raise Exception('Multi-line comments in enums are not supported in ' + + self._path) + enum_end = HeaderParser.enum_end_re.match(line) + enum_entry = HeaderParser.enum_line_re.match(line) + if enum_end: + self._ApplyGeneratorDirectives() + self._current_definition.Finalize() + self._enum_definitions.append(self._current_definition) + self._in_enum = False + elif enum_entry: + enum_key = enum_entry.groups()[0] + enum_value = enum_entry.groups()[2] + self._current_definition.AppendEntry(enum_key, enum_value) + if self._current_comments: + self._current_definition.AppendEntryComment( + enum_key, ' '.join(self._current_comments)) + self._current_comments = [] + + def _ParseMultiLineDirectiveLine(self, line): + multi_line_directive_continuation = ( + HeaderParser.multi_line_directive_continuation_re.match(line)) + multi_line_directive_end = ( + HeaderParser.multi_line_directive_end_re.match(line)) + + if multi_line_directive_continuation: + value_cont = multi_line_directive_continuation.groups()[0] + self._multi_line_generator_directive[1].append(value_cont) + elif multi_line_directive_end: + directive_name = self._multi_line_generator_directive[0] + directive_value = "".join(self._multi_line_generator_directive[1]) + directive_value += multi_line_directive_end.groups()[0] + self._multi_line_generator_directive = None + self._generator_directives.Update(directive_name, directive_value) + else: + raise Exception('Malformed multi-line directive declaration in ' + + self._path) + + def _ParseRegularLine(self, line): + enum_start = HeaderParser.enum_start_re.match(line) + generator_directive = HeaderParser.generator_directive_re.match(line) + multi_line_generator_directive_start = ( + HeaderParser.multi_line_generator_directive_start_re.match(line)) + + if generator_directive: + directive_name = generator_directive.groups()[0] + directive_value = generator_directive.groups()[1] + self._generator_directives.Update(directive_name, directive_value) + elif multi_line_generator_directive_start: + directive_name = multi_line_generator_directive_start.groups()[0] + directive_value = multi_line_generator_directive_start.groups()[1] + self._multi_line_generator_directive = (directive_name, [directive_value]) + elif enum_start: + if self._generator_directives.empty: + return + self._current_definition = EnumDefinition( + original_enum_name=enum_start.groups()[1], + fixed_type=enum_start.groups()[3]) + self._in_enum = True + +def GetScriptName(): + return os.path.basename(os.path.abspath(sys.argv[0])) + +def DoGenerate(source_paths): + for source_path in source_paths: + enum_definitions = DoParseHeaderFile(source_path) + if not enum_definitions: + raise Exception('No enums found in %s\n' + 'Did you forget prefixing enums with ' + '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' % + source_path) + for enum_definition in enum_definitions: + package_path = enum_definition.enum_package.replace('.', os.path.sep) + file_name = enum_definition.class_name + '.java' + output_path = os.path.join(package_path, file_name) + output = GenerateOutput(source_path, enum_definition) + yield output_path, output + + +def DoParseHeaderFile(path): + with open(path) as f: + return HeaderParser(f.readlines(), path).ParseDefinitions() + + +def GenerateOutput(source_path, enum_definition): + template = Template(""" +// Copyright ${YEAR} The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +import android.support.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +public class ${CLASS_NAME} { + @IntDef({ +${INT_DEF} + }) + @Retention(RetentionPolicy.SOURCE) + public @interface ${ANNOTATION} {} +${ENUM_ENTRIES} +} +""") + + enum_template = Template(' public static final int ${NAME} = ${VALUE};') + enum_entries_string = [] + enum_names = [] + for enum_name, enum_value in enum_definition.entries.iteritems(): + values = { + 'NAME': enum_name, + 'VALUE': enum_value, + } + enum_comments = enum_definition.comments.get(enum_name) + if enum_comments: + enum_comments_indent = ' * ' + comments_line_wrapper = textwrap.TextWrapper( + initial_indent=enum_comments_indent, + subsequent_indent=enum_comments_indent, + width=100) + enum_entries_string.append(' /**') + enum_entries_string.append( + '\n'.join(comments_line_wrapper.wrap(enum_comments))) + enum_entries_string.append(' */') + enum_entries_string.append(enum_template.substitute(values)) + enum_names.append(enum_name) + enum_entries_string = '\n'.join(enum_entries_string) + + enum_names_indent = ' ' * 6 + wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent, + subsequent_indent = enum_names_indent, + width = 100) + enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names))) + + annotation_template = Template('${NAME}Enum') + annotation_values = { 'NAME': enum_definition.class_name, } + annotation_name = annotation_template.substitute(annotation_values) + + values = { + 'CLASS_NAME': enum_definition.class_name, + 'ENUM_ENTRIES': enum_entries_string, + 'PACKAGE': enum_definition.enum_package, + 'INT_DEF': enum_names_string, + 'ANNOTATION': annotation_name, + 'SCRIPT_NAME': GetScriptName(), + 'SOURCE_PATH': source_path, + 'YEAR': str(date.today().year) + } + return template.substitute(values) + + +def AssertFilesList(output_paths, assert_files_list): + actual = set(output_paths) + expected = set(assert_files_list) + if not actual == expected: + need_to_add = list(actual - expected) + need_to_remove = list(expected - actual) + raise Exception('Output files list does not match expectations. Please ' + 'add %s and remove %s.' % (need_to_add, need_to_remove)) + +def DoMain(argv): + usage = 'usage: %prog [options] [output_dir] input_file(s)...' + parser = optparse.OptionParser(usage=usage) + build_utils.AddDepfileOption(parser) + + parser.add_option('--assert_file', action="append", default=[], + dest="assert_files_list", help='Assert that the given ' + 'file is an output. There can be multiple occurrences of ' + 'this flag.') + parser.add_option('--srcjar', + help='When specified, a .srcjar at the given path is ' + 'created instead of individual .java files.') + parser.add_option('--print_output_only', help='Only print output paths.', + action='store_true') + parser.add_option('--verbose', help='Print more information.', + action='store_true') + + options, args = parser.parse_args(argv) + + if options.srcjar: + if not args: + parser.error('Need to specify at least one input file') + input_paths = args + else: + if len(args) < 2: + parser.error( + 'Need to specify output directory and at least one input file') + output_dir = args[0] + input_paths = args[1:] + + if options.depfile: + python_deps = build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, input_paths + python_deps) + + if options.srcjar: + if options.print_output_only: + parser.error('--print_output_only does not work with --srcjar') + if options.assert_files_list: + parser.error('--assert_file does not work with --srcjar') + + with zipfile.ZipFile(options.srcjar, 'w', zipfile.ZIP_STORED) as srcjar: + for output_path, data in DoGenerate(input_paths): + build_utils.AddToZipHermetic(srcjar, output_path, data=data) + else: + # TODO(agrieve): Delete this non-srcjar branch once GYP is gone. + output_paths = [] + for output_path, data in DoGenerate(input_paths): + full_path = os.path.join(output_dir, output_path) + output_paths.append(full_path) + if not options.print_output_only: + build_utils.MakeDirectory(os.path.dirname(full_path)) + with open(full_path, 'w') as out_file: + out_file.write(data) + + if options.assert_files_list: + AssertFilesList(output_paths, options.assert_files_list) + + if options.verbose: + print 'Output paths:' + print '\n'.join(output_paths) + + # Used by GYP. + return ' '.join(output_paths) + + +if __name__ == '__main__': + DoMain(sys.argv[1:]) diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py new file mode 100644 index 00000000000..643a410d510 --- /dev/null +++ b/build/android/gyp/java_cpp_enum_tests.py @@ -0,0 +1,470 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for enum_preprocess.py. + +This test suite containss various tests for the C++ -> Java enum generator. +""" + +import collections +from datetime import date +import optparse +import os +import sys +import unittest + +import java_cpp_enum +from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName +from java_cpp_enum import HeaderParser + +sys.path.append(os.path.join(os.path.dirname(__file__), "gyp")) +from util import build_utils + +class TestPreprocess(unittest.TestCase): + def testOutput(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='some.package', + entries=[('E1', 1), ('E2', '2 << 2')], + comments=[('E2', 'This is a comment.'), + ('E1', 'This is a multiple line ' + 'comment that is really long. ' + 'This is a multiple line ' + 'comment that is really ' + 'really long.')]) + output = GenerateOutput('path/to/file', definition) + expected = """ +// Copyright %d The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// path/to/file + +package some.package; + +import android.support.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +public class ClassName { + @IntDef({ + E1, E2 + }) + @Retention(RetentionPolicy.SOURCE) + public @interface ClassNameEnum {} + /** + * %s + * really really long. + */ + public static final int E1 = 1; + /** + * This is a comment. + */ + public static final int E2 = 2 << 2; +} +""" + long_comment = ('This is a multiple line comment that is really long. ' + 'This is a multiple line comment that is') + self.assertEqual( + expected % (date.today().year, GetScriptName(), long_comment), + output) + + def testParseSimpleEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO, + VALUE_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0), + ('VALUE_ONE', 1)]), + definition.entries) + + def testParseBitShifts(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO = 1 << 0, + VALUE_ONE = 1 << 1, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', '1 << 1')]), + definition.entries) + + def testParseClassNameOverride(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + FOO + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride + enum PrefixTest { + PREFIX_TEST_A, + PREFIX_TEST_B, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('OverrideName', definition.class_name) + + definition = definitions[1] + self.assertEqual('OtherOverride', definition.class_name) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseTwoEnums(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A, + // This comment spans + // two lines. + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumOne', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + self.assertEqual(collections.OrderedDict([('ENUM_ONE_B', 'Comment there')]), + definition.comments) + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict( + [('P_B', 'This comment spans two lines.')]), definition.comments) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseThrowsOnUnknownDirective(self): + test_data = """ + // GENERATED_JAVA_UNKNOWN: Value + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseReturnsEmptyListWithoutDirectives(self): + test_data = """ + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + self.assertEqual([], HeaderParser(test_data).ParseDefinitions()) + + def testParseEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseEnumStruct(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum struct Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Foo : int { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('int', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: unsigned short { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('unsigned short', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseUnknownFixedTypeRaises(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: foo_type { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseSimpleMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: (te + // st.name + // space) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + + def testParseMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: ( + // Ba + // r + // ) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMalformedMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveShort(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testEnumValueAssignmentNoneDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2)]), + definition.entries) + + def testEnumValueAssignmentAllDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', '1') + definition.AppendEntry('B', '2') + definition.AppendEntry('C', '3') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', '2'), + ('C', '3')]), + definition.entries) + + def testEnumValueAssignmentReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', None) + definition.AppendEntry('D', 'C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 1), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentSet(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', '2') + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 2), + ('C', 3)]), + definition.entries) + + def testEnumValueAssignmentSetReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', 'B') + definition.AppendEntry('D', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 0), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentRaises(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'foo') + definition.AppendEntry('C', None) + with self.assertRaises(Exception): + definition.Finalize() + + def testExplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('P_A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('P_C', None) + definition.AppendEntry('P_LAST', 'P_C') + definition.prefix_to_strip = 'P_' + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='p') + definition.AppendEntry('CLASS_NAME_A', None) + definition.AppendEntry('CLASS_NAME_B', None) + definition.AppendEntry('CLASS_NAME_C', None) + definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self): + definition = EnumDefinition(original_enum_name='Name', + enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('NAME_LAST', None) + definition.Finalize() + self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys()) + + def testGenerateThrowsOnEmptyInput(self): + with self.assertRaises(Exception): + original_do_parse = java_cpp_enum.DoParseHeaderFile + try: + java_cpp_enum.DoParseHeaderFile = lambda _: [] + for _ in java_cpp_enum.DoGenerate(['file']): + pass + finally: + java_cpp_enum.DoParseHeaderFile = original_do_parse + +def main(argv): + parser = optparse.OptionParser() + parser.add_option("--stamp", help="File to touch on success.") + options, _ = parser.parse_args(argv) + + suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess) + unittest.TextTestRunner(verbosity=0).run(suite) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/java_google_api_keys.py b/build/android/gyp/java_google_api_keys.py new file mode 100644 index 00000000000..95cb416cd5c --- /dev/null +++ b/build/android/gyp/java_google_api_keys.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a Java file with API keys. + +import argparse +import os +import string +import sys +import zipfile + +from util import build_utils + +sys.path.append( + os.path.abspath(os.path.join(sys.path[0], '../../../google_apis'))) +import google_api_keys + +sys.path.append(os.path.abspath(os.path.join( + os.path.dirname(__file__), os.pardir))) +from pylib.constants import host_paths + + +PACKAGE = 'org.chromium.chrome' +CLASSNAME = 'GoogleAPIKeys' + + +def GetScriptName(): + return os.path.relpath(__file__, host_paths.DIR_SOURCE_ROOT) + + +def GenerateOutput(constant_definitions): + template = string.Template(""" +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +public class ${CLASS_NAME} { +${CONSTANT_ENTRIES} +} +""") + + constant_template = string.Template( + ' public static final String ${NAME} = "${VALUE}";') + constant_entries_list = [] + for constant_name, constant_value in constant_definitions.iteritems(): + values = { + 'NAME': constant_name, + 'VALUE': constant_value, + } + constant_entries_list.append(constant_template.substitute(values)) + constant_entries_string = '\n'.join(constant_entries_list) + + values = { + 'CLASS_NAME': CLASSNAME, + 'CONSTANT_ENTRIES': constant_entries_string, + 'PACKAGE': PACKAGE, + 'SCRIPT_NAME': GetScriptName(), + 'SOURCE_PATH': 'google_api_keys/google_api_keys.h', + } + return template.substitute(values) + + +def _DoWriteJavaOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with open(output_path, 'w') as out_file: + out_file.write(GenerateOutput(constant_definition)) + + +def _DoWriteJarOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with zipfile.ZipFile(output_path, 'w') as srcjar: + path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java') + data = GenerateOutput(constant_definition) + build_utils.AddToZipHermetic(srcjar, path, data=data) + + +def _DoMain(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--out", help="Path for java output.") + parser.add_argument("--srcjar", help="Path for srcjar output.") + options = parser.parse_args(argv) + if not options.out and not options.srcjar: + parser.print_help() + sys.exit(-1) + + values = {} + values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey() + values['GOOGLE_API_KEY_REMOTING'] = google_api_keys.GetAPIKeyRemoting() + values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys. + GetAPIKeyPhysicalWebTest()) + values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN') + values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN') + values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID( + 'CLOUD_PRINT') + values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret( + 'CLOUD_PRINT') + values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING') + values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret( + 'REMOTING') + values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID( + 'REMOTING_HOST') + values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys. + GetClientSecret('REMOTING_HOST')) + values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys. + GetClientID('REMOTING_IDENTITY_API')) + + if options.out: + _DoWriteJavaOutput(options.out, values) + if options.srcjar: + _DoWriteJarOutput(options.srcjar, values) + + +if __name__ == '__main__': + _DoMain(sys.argv[1:]) + diff --git a/build/android/gyp/java_google_api_keys_tests.py b/build/android/gyp/java_google_api_keys_tests.py new file mode 100644 index 00000000000..eb24ea4aa5d --- /dev/null +++ b/build/android/gyp/java_google_api_keys_tests.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for java_google_api_keys.py. + +This test suite contains various tests for the C++ -> Java Google API Keys +generator. +""" + +import collections +import argparse +import os +import sys +import unittest + +import java_google_api_keys + +sys.path.append(os.path.join(os.path.dirname(__file__), "gyp")) +from util import build_utils + + +class TestJavaGoogleAPIKeys(unittest.TestCase): + def testOutput(self): + definition = {'E1': 'abc', 'E2': 'defgh'} + output = java_google_api_keys.GenerateOutput(definition) + expected = """ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// google_api_keys/google_api_keys.h + +package org.chromium.chrome; + +public class GoogleAPIKeys { + public static final String E1 = "abc"; + public static final String E2 = "defgh"; +} +""" + self.assertEqual(expected % java_google_api_keys.GetScriptName(), output) + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--stamp", help="File to touch on success.") + options = parser.parse_args(argv) + + suite = unittest.TestLoader().loadTestsFromTestCase(TestJavaGoogleAPIKeys) + unittest.TextTestRunner(verbosity=0).run(suite) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + main(sys.argv[1:]) + diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py new file mode 100644 index 00000000000..7b346cfffac --- /dev/null +++ b/build/android/gyp/javac.py @@ -0,0 +1,448 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import shutil +import re +import sys +import textwrap + +from util import build_utils +from util import md5_check + +import jar + +sys.path.append(build_utils.COLORAMA_ROOT) +import colorama + + +def ColorJavacOutput(output): + fileline_prefix = r'(?P(?P[-.\w/\\]+.java):(?P[0-9]+):)' + warning_re = re.compile( + fileline_prefix + r'(?P warning: (?P.*))$') + error_re = re.compile( + fileline_prefix + r'(?P (?P.*))$') + marker_re = re.compile(r'\s*(?P\^)\s*$') + + warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM] + error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT] + marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] + + def Colorize(line, regex, color): + match = regex.match(line) + start = match.start(color[0]) + end = match.end(color[0]) + return (line[:start] + + color[1] + line[start:end] + + colorama.Fore.RESET + colorama.Style.RESET_ALL + + line[end:]) + + def ApplyColor(line): + if warning_re.match(line): + line = Colorize(line, warning_re, warning_color) + elif error_re.match(line): + line = Colorize(line, error_re, error_color) + elif marker_re.match(line): + line = Colorize(line, marker_re, marker_color) + return line + + return '\n'.join(map(ApplyColor, output.split('\n'))) + + +ERRORPRONE_OPTIONS = [ + # These crash on lots of targets. + '-Xep:ParameterPackage:OFF', + '-Xep:OverridesGuiceInjectableMethod:OFF', + '-Xep:OverridesJavaxInjectableMethod:OFF', +] + + +def _FilterJavaFiles(paths, filters): + return [f for f in paths + if not filters or build_utils.MatchesGlob(f, filters)] + + +_MAX_MANIFEST_LINE_LEN = 72 + + +def _ExtractClassFiles(jar_path, dest_dir, java_files): + """Extracts all .class files not corresponding to |java_files|.""" + # Two challenges exist here: + # 1. |java_files| have prefixes that are not represented in the the jar paths. + # 2. A single .java file results in multiple .class files when it contains + # nested classes. + # Here's an example: + # source path: ../../base/android/java/src/org/chromium/Foo.java + # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class + # To extract only .class files not related to the given .java files, we strip + # off ".class" and "$*.class" and use a substring match against java_files. + def extract_predicate(path): + if not path.endswith('.class'): + return False + path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path) + partial_java_path = path_without_suffix + '.java' + return not any(p.endswith(partial_java_path) for p in java_files) + + build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate) + for path in build_utils.FindInDirectory(dest_dir, '*.class'): + shutil.copystat(jar_path, path) + + +def _ConvertToJMakeArgs(javac_cmd, pdb_path): + new_args = ['bin/jmake', '-pdb', pdb_path] + if javac_cmd[0] != 'javac': + new_args.extend(('-jcexec', new_args[0])) + if md5_check.PRINT_EXPLANATIONS: + new_args.append('-Xtiming') + + do_not_prefix = ('-classpath', '-bootclasspath') + skip_next = False + for arg in javac_cmd[1:]: + if not skip_next and arg not in do_not_prefix: + arg = '-C' + arg + new_args.append(arg) + skip_next = arg in do_not_prefix + + return new_args + + +def _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir): + # The .pdb records absolute paths. Fix up paths within /tmp (srcjars). + if os.path.exists(pdb_path): + # Although its a binary file, search/replace still seems to work fine. + with open(pdb_path) as fileobj: + pdb_data = fileobj.read() + with open(pdb_path, 'w') as fileobj: + fileobj.write(re.sub(r'/tmp/[^/]*', temp_dir, pdb_data)) + + +def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): + with build_utils.TempDir() as temp_dir: + srcjars = options.java_srcjars + # The .excluded.jar contains .class files excluded from the main jar. + # It is used for incremental compiles. + excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') + + classes_dir = os.path.join(temp_dir, 'classes') + os.makedirs(classes_dir) + + changed_paths = None + # jmake can handle deleted files, but it's a rare case and it would + # complicate this script's logic. + if options.incremental and changes.AddedOrModifiedOnly(): + changed_paths = set(changes.IterChangedPaths()) + # Do a full compile if classpath has changed. + # jmake doesn't seem to do this on its own... Might be that ijars mess up + # its change-detection logic. + if any(p in changed_paths for p in classpath_inputs): + changed_paths = None + + if options.incremental: + # jmake is a compiler wrapper that figures out the minimal set of .java + # files that need to be rebuilt given a set of .java files that have + # changed. + # jmake determines what files are stale based on timestamps between .java + # and .class files. Since we use .jars, .srcjars, and md5 checks, + # timestamp info isn't accurate for this purpose. Rather than use jmake's + # programatic interface (like we eventually should), we ensure that all + # .class files are newer than their .java files, and convey to jmake which + # sources are stale by having their .class files be missing entirely + # (by not extracting them). + pdb_path = options.jar_path + '.pdb' + javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) + if srcjars: + _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) + + if srcjars: + java_dir = os.path.join(temp_dir, 'java') + os.makedirs(java_dir) + for srcjar in options.java_srcjars: + if changed_paths: + changed_paths.update(os.path.join(java_dir, f) + for f in changes.IterChangedSubpaths(srcjar)) + build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') + jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') + jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) + java_files.extend(jar_srcs) + if changed_paths: + # Set the mtime of all sources to 0 since we use the absense of .class + # files to tell jmake which files are stale. + for path in jar_srcs: + os.utime(path, (0, 0)) + + if java_files: + if changed_paths: + changed_java_files = [p for p in java_files if p in changed_paths] + if os.path.exists(options.jar_path): + _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) + if os.path.exists(excluded_jar_path): + _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) + # Add the extracted files to the classpath. This is required because + # when compiling only a subset of files, classes that haven't changed + # need to be findable. + classpath_idx = javac_cmd.index('-classpath') + javac_cmd[classpath_idx + 1] += ':' + classes_dir + + # Can happen when a target goes from having no sources, to having sources. + # It's created by the call to build_utils.Touch() below. + if options.incremental: + if os.path.exists(pdb_path) and not os.path.getsize(pdb_path): + os.unlink(pdb_path) + + # Don't include the output directory in the initial set of args since it + # being in a temp dir makes it unstable (breaks md5 stamping). + cmd = javac_cmd + ['-d', classes_dir] + java_files + + # JMake prints out some diagnostic logs that we want to ignore. + # This assumes that all compiler output goes through stderr. + stdout_filter = lambda s: '' + if md5_check.PRINT_EXPLANATIONS: + stdout_filter = None + + attempt_build = lambda: build_utils.CheckOutput( + cmd, + print_stdout=options.chromium_code, + stdout_filter=stdout_filter, + stderr_filter=ColorJavacOutput) + try: + attempt_build() + except build_utils.CalledProcessError as e: + # Work-around for a bug in jmake (http://crbug.com/551449). + if 'project database corrupted' not in e.output: + raise + print ('Applying work-around for jmake project database corrupted ' + '(http://crbug.com/551449).') + os.unlink(pdb_path) + attempt_build() + elif options.incremental: + # Make sure output exists. + build_utils.Touch(pdb_path) + + glob = options.jar_excluded_classes + inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) + exclusion_predicate = lambda f: not inclusion_predicate(f) + + jar.JarDirectory(classes_dir, + options.jar_path, + predicate=inclusion_predicate, + provider_configurations=options.provider_configurations, + additional_files=options.additional_jar_files) + jar.JarDirectory(classes_dir, + excluded_jar_path, + predicate=exclusion_predicate, + provider_configurations=options.provider_configurations, + additional_files=options.additional_jar_files) + + +def _ParseOptions(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option( + '--src-gendirs', + help='Directories containing generated java files.') + parser.add_option( + '--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_option( + '--bootclasspath', + action='append', + default=[], + help='Boot classpath for javac. If this is specified multiple times, ' + 'they will all be appended to construct the classpath.') + parser.add_option( + '--java-version', + help='Java language version to use in -source and -target args to javac.') + parser.add_option( + '--classpath', + action='append', + help='Classpath for javac. If this is specified multiple times, they ' + 'will all be appended to construct the classpath.') + parser.add_option( + '--incremental', + action='store_true', + help='Whether to re-use .class files rather than recompiling them ' + '(when possible).') + parser.add_option( + '--javac-includes', + default='', + help='A list of file patterns. If provided, only java files that match' + 'one of the patterns will be compiled.') + parser.add_option( + '--jar-excluded-classes', + default='', + help='List of .class file patterns to exclude from the jar.') + parser.add_option( + '--processor', + dest='processors', + action='append', + help='Annotation processor to use.') + parser.add_option( + '--processor-arg', + dest='processor_args', + action='append', + help='key=value arguments for the annotation processors.') + parser.add_option( + '--provider-configuration', + dest='provider_configurations', + action='append', + help='File to specify a service provider. Will be included ' + 'in the jar under META-INF/services.') + parser.add_option( + '--additional-jar-file', + dest='additional_jar_files', + action='append', + help='Additional files to package into jar. By default, only Java .class ' + 'files are packaged into the jar. Files should be specified in ' + 'format :.') + parser.add_option( + '--chromium-code', + type='int', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + parser.add_option( + '--use-errorprone-path', + help='Use the Errorprone compiler at this path.') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, args = parser.parse_args(argv) + build_utils.CheckOptions(options, parser, required=('jar_path',)) + + bootclasspath = [] + for arg in options.bootclasspath: + bootclasspath += build_utils.ParseGnList(arg) + options.bootclasspath = bootclasspath + + classpath = [] + for arg in options.classpath: + classpath += build_utils.ParseGnList(arg) + options.classpath = classpath + + java_srcjars = [] + for arg in options.java_srcjars: + java_srcjars += build_utils.ParseGnList(arg) + options.java_srcjars = java_srcjars + + additional_jar_files = [] + for arg in options.additional_jar_files or []: + filepath, jar_filepath = arg.split(':') + additional_jar_files.append((filepath, jar_filepath)) + options.additional_jar_files = additional_jar_files + + if options.src_gendirs: + options.src_gendirs = build_utils.ParseGnList(options.src_gendirs) + + options.javac_includes = build_utils.ParseGnList(options.javac_includes) + options.jar_excluded_classes = ( + build_utils.ParseGnList(options.jar_excluded_classes)) + + java_files = [] + for arg in args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + java_files.extend(build_utils.ReadSourcesList(arg[1:])) + else: + java_files.append(arg) + + return options, java_files + + +def main(argv): + colorama.init() + + argv = build_utils.ExpandFileArgs(argv) + options, java_files = _ParseOptions(argv) + + if options.src_gendirs: + java_files += build_utils.FindInDirectories(options.src_gendirs, '*.java') + + java_files = _FilterJavaFiles(java_files, options.javac_includes) + + javac_cmd = ['javac'] + if options.use_errorprone_path: + javac_cmd = [options.use_errorprone_path] + ERRORPRONE_OPTIONS + + javac_cmd.extend(( + '-g', + # Chromium only allows UTF8 source files. Being explicit avoids + # javac pulling a default encoding from the user's environment. + '-encoding', 'UTF-8', + '-classpath', ':'.join(options.classpath), + # Prevent compiler from compiling .java files not listed as inputs. + # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ + '-sourcepath', '' + )) + + if options.bootclasspath: + javac_cmd.extend([ + '-bootclasspath', ':'.join(options.bootclasspath) + ]) + + if options.java_version: + javac_cmd.extend([ + '-source', options.java_version, + '-target', options.java_version, + ]) + + if options.chromium_code: + javac_cmd.extend(['-Xlint:unchecked', '-Xlint:deprecation']) + else: + # XDignore.symbol.file makes javac compile against rt.jar instead of + # ct.sym. This means that using a java internal package/class will not + # trigger a compile warning or error. + javac_cmd.extend(['-XDignore.symbol.file']) + + if options.processors: + javac_cmd.extend(['-processor', ','.join(options.processors)]) + if options.processor_args: + for arg in options.processor_args: + javac_cmd.extend(['-A%s' % arg]) + + classpath_inputs = options.bootclasspath + if options.classpath: + if options.classpath[0].endswith('.interface.jar'): + classpath_inputs.extend(options.classpath) + else: + # TODO(agrieve): Remove this .TOC heuristic once GYP is no more. + for path in options.classpath: + if os.path.exists(path + '.TOC'): + classpath_inputs.append(path + '.TOC') + else: + classpath_inputs.append(path) + + # Compute the list of paths that when changed, we need to rebuild. + input_paths = classpath_inputs + options.java_srcjars + java_files + + output_paths = [ + options.jar_path, + options.jar_path.replace('.jar', '.excluded.jar'), + ] + if options.incremental: + output_paths.append(options.jar_path + '.pdb') + + # An escape hatch to be able to check if incremental compiles are causing + # problems. + force = int(os.environ.get('DISABLE_INCREMENTAL_JAVAC', 0)) + + # List python deps in input_strings rather than input_paths since the contents + # of them does not change what gets written to the depsfile. + build_utils.CallAndWriteDepfileIfStale( + lambda changes: _OnStaleMd5(changes, options, javac_cmd, java_files, + classpath_inputs), + options, + input_paths=input_paths, + input_strings=javac_cmd, + output_paths=output_paths, + force=force, + pass_changes=True) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py new file mode 100644 index 00000000000..c361f24f138 --- /dev/null +++ b/build/android/gyp/jinja_template.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Renders one or more template files using the Jinja template engine.""" + +import codecs +import argparse +import os +import sys + +from util import build_utils + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.constants import host_paths + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party')) +import jinja2 # pylint: disable=F0401 + + +class _RecordingFileSystemLoader(jinja2.FileSystemLoader): + def __init__(self, searchpath): + jinja2.FileSystemLoader.__init__(self, searchpath) + self.loaded_templates = set() + + def get_source(self, environment, template): + contents, filename, uptodate = jinja2.FileSystemLoader.get_source( + self, environment, template) + self.loaded_templates.add(os.path.relpath(filename)) + return contents, filename, uptodate + + +class JinjaProcessor(object): + """Allows easy rendering of jinja templates with input file tracking.""" + def __init__(self, loader_base_dir, variables=None): + self.loader_base_dir = loader_base_dir + self.variables = variables + self.loader = _RecordingFileSystemLoader(loader_base_dir) + self.env = jinja2.Environment(loader=self.loader) + self.env.undefined = jinja2.StrictUndefined + self.env.line_comment_prefix = '##' + self.env.trim_blocks = True + self.env.lstrip_blocks = True + + def Render(self, input_filename, variables=None): + input_rel_path = os.path.relpath(input_filename, self.loader_base_dir) + template = self.env.get_template(input_rel_path) + return template.render(variables or self.variables) + + def GetLoadedTemplates(self): + return list(self.loader.loaded_templates) + + +def _ProcessFile(processor, input_filename, output_filename): + output = processor.Render(input_filename) + with codecs.open(output_filename, 'w', 'utf-8') as output_file: + output_file.write(output) + + +def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip): + with build_utils.TempDir() as temp_dir: + for input_filename in input_filenames: + relpath = os.path.relpath(os.path.abspath(input_filename), + os.path.abspath(inputs_base_dir)) + if relpath.startswith(os.pardir): + raise Exception('input file %s is not contained in inputs base dir %s' + % (input_filename, inputs_base_dir)) + + output_filename = os.path.join(temp_dir, relpath) + parent_dir = os.path.dirname(output_filename) + build_utils.MakeDirectory(parent_dir) + _ProcessFile(processor, input_filename, output_filename) + + build_utils.ZipDir(outputs_zip, temp_dir) + + +def _ParseVariables(variables_arg, error_func): + variables = {} + for v in build_utils.ParseGnList(variables_arg): + if '=' not in v: + error_func('--variables argument must contain "=": ' + v) + name, _, value = v.partition('=') + variables[name] = value + return variables + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--inputs', required=True, + help='The template files to process.') + parser.add_argument('--output', help='The output file to generate. Valid ' + 'only if there is a single input.') + parser.add_argument('--outputs-zip', help='A zip file for the processed ' + 'templates. Required if there are multiple inputs.') + parser.add_argument('--inputs-base-dir', help='A common ancestor directory ' + 'of the inputs. Each output\'s path in the output zip ' + 'will match the relative path from INPUTS_BASE_DIR to ' + 'the input. Required if --output-zip is given.') + parser.add_argument('--loader-base-dir', help='Base path used by the ' + 'template loader. Must be a common ancestor directory of ' + 'the inputs. Defaults to DIR_SOURCE_ROOT.', + default=host_paths.DIR_SOURCE_ROOT) + parser.add_argument('--variables', help='Variables to be made available in ' + 'the template processing environment, as a GYP list ' + '(e.g. --variables "channel=beta mstone=39")', default='') + build_utils.AddDepfileOption(parser) + options = parser.parse_args() + + inputs = build_utils.ParseGnList(options.inputs) + + if (options.output is None) == (options.outputs_zip is None): + parser.error('Exactly one of --output and --output-zip must be given') + if options.output and len(inputs) != 1: + parser.error('--output cannot be used with multiple inputs') + if options.outputs_zip and not options.inputs_base_dir: + parser.error('--inputs-base-dir must be given when --output-zip is used') + + variables = _ParseVariables(options.variables, parser.error) + processor = JinjaProcessor(options.loader_base_dir, variables=variables) + + if options.output: + _ProcessFile(processor, inputs[0], options.output) + else: + _ProcessFiles(processor, inputs, options.inputs_base_dir, + options.outputs_zip) + + if options.depfile: + deps = processor.GetLoadedTemplates() + build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, deps) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py new file mode 100644 index 00000000000..a13a675341a --- /dev/null +++ b/build/android/gyp/lint.py @@ -0,0 +1,363 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs Android's lint tool.""" + + +import argparse +import os +import re +import sys +import traceback +from xml.dom import minidom + +from util import build_utils + +_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long +_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', '..', '..')) + + +def _OnStaleMd5(lint_path, config_path, processed_config_path, + manifest_path, result_path, product_dir, sources, jar_path, + cache_dir, android_sdk_version, resource_sources, + classpath=None, can_fail_build=False, silent=False): + def _RebasePath(path): + """Returns relative path to top-level src dir. + + Args: + path: A path relative to cwd. + """ + ret = os.path.relpath(os.path.abspath(path), _SRC_ROOT) + # If it's outside of src/, just use abspath. + if ret.startswith('..'): + ret = os.path.abspath(path) + return ret + + def _ProcessConfigFile(): + if not config_path or not processed_config_path: + return + if not build_utils.IsTimeStale(processed_config_path, [config_path]): + return + + with open(config_path, 'rb') as f: + content = f.read().replace( + 'PRODUCT_DIR', _RebasePath(product_dir)) + + with open(processed_config_path, 'wb') as f: + f.write(content) + + def _ProcessResultFile(): + with open(result_path, 'rb') as f: + content = f.read().replace( + _RebasePath(product_dir), 'PRODUCT_DIR') + + with open(result_path, 'wb') as f: + f.write(content) + + def _ParseAndShowResultFile(): + dom = minidom.parse(result_path) + issues = dom.getElementsByTagName('issue') + if not silent: + print >> sys.stderr + for issue in issues: + issue_id = issue.attributes['id'].value + message = issue.attributes['message'].value + location_elem = issue.getElementsByTagName('location')[0] + path = location_elem.attributes['file'].value + line = location_elem.getAttribute('line') + if line: + error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id) + else: + # Issues in class files don't have a line number. + error = '%s %s: %s [warning]' % (path, message, issue_id) + print >> sys.stderr, error.encode('utf-8') + for attr in ['errorLine1', 'errorLine2']: + error_line = issue.getAttribute(attr) + if error_line: + print >> sys.stderr, error_line.encode('utf-8') + return len(issues) + + with build_utils.TempDir() as temp_dir: + _ProcessConfigFile() + + cmd = [ + _RebasePath(lint_path), '-Werror', '--exitcode', '--showall', + '--xml', _RebasePath(result_path), + ] + if jar_path: + # --classpath is just for .class files for this one target. + cmd.extend(['--classpath', _RebasePath(jar_path)]) + if processed_config_path: + cmd.extend(['--config', _RebasePath(processed_config_path)]) + + tmp_dir_counter = [0] + def _NewTempSubdir(prefix, append_digit=True): + # Helper function to create a new sub directory based on the number of + # subdirs created earlier. + if append_digit: + tmp_dir_counter[0] += 1 + prefix += str(tmp_dir_counter[0]) + new_dir = os.path.join(temp_dir, prefix) + os.makedirs(new_dir) + return new_dir + + resource_dirs = [] + for resource_source in resource_sources: + if os.path.isdir(resource_source): + resource_dirs.append(resource_source) + else: + # This is a zip file with generated resources (e. g. strings from GRD). + # Extract it to temporary folder. + resource_dir = _NewTempSubdir(_RebasePath(resource_source), + append_digit=False) + resource_dirs.append(resource_dir) + build_utils.ExtractAll(resource_source, path=resource_dir) + + for resource_dir in resource_dirs: + cmd.extend(['--resources', _RebasePath(resource_dir)]) + + if classpath: + # --libraries is the classpath (excluding active target). + cp = ':'.join(_RebasePath(p) for p in classpath) + cmd.extend(['--libraries', cp]) + + # There may be multiple source files with the same basename (but in + # different directories). It is difficult to determine what part of the path + # corresponds to the java package, and so instead just link the source files + # into temporary directories (creating a new one whenever there is a name + # conflict). + def PathInDir(d, src): + subpath = os.path.join(d, _RebasePath(src)) + subdir = os.path.dirname(subpath) + if not os.path.exists(subdir): + os.makedirs(subdir) + return subpath + + src_dirs = [] + for src in sources: + src_dir = None + for d in src_dirs: + if not os.path.exists(PathInDir(d, src)): + src_dir = d + break + if not src_dir: + src_dir = _NewTempSubdir('SRC_ROOT') + src_dirs.append(src_dir) + cmd.extend(['--sources', _RebasePath(src_dir)]) + os.symlink(os.path.abspath(src), PathInDir(src_dir, src)) + + project_dir = _NewTempSubdir('SRC_ROOT') + if android_sdk_version: + # Create dummy project.properies file in a temporary "project" directory. + # It is the only way to add Android SDK to the Lint's classpath. Proper + # classpath is necessary for most source-level checks. + with open(os.path.join(project_dir, 'project.properties'), 'w') \ + as propfile: + print >> propfile, 'target=android-{}'.format(android_sdk_version) + + # Put the manifest in a temporary directory in order to avoid lint detecting + # sibling res/ and src/ directories (which should be pass explicitly if they + # are to be included). + if manifest_path: + os.symlink(os.path.abspath(manifest_path), + os.path.join(project_dir, 'AndroidManifest.xml')) + cmd.append(project_dir) + + if os.path.exists(result_path): + os.remove(result_path) + + env = {} + stderr_filter = None + if cache_dir: + env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir) + # When _JAVA_OPTIONS is set, java prints to stderr: + # Picked up _JAVA_OPTIONS: ... + # + # We drop all lines that contain _JAVA_OPTIONS from the output + stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) + + try: + build_utils.CheckOutput(cmd, cwd=_SRC_ROOT, env=env or None, + stderr_filter=stderr_filter) + except build_utils.CalledProcessError: + # There is a problem with lint usage + if not os.path.exists(result_path): + raise + + # Sometimes produces empty (almost) files: + if os.path.getsize(result_path) < 10: + if can_fail_build: + raise + elif not silent: + traceback.print_exc() + return + + # There are actual lint issues + try: + num_issues = _ParseAndShowResultFile() + except Exception: # pylint: disable=broad-except + if not silent: + print 'Lint created unparseable xml file...' + print 'File contents:' + with open(result_path) as f: + print f.read() + if not can_fail_build: + return + + if can_fail_build and not silent: + traceback.print_exc() + + # There are actual lint issues + try: + num_issues = _ParseAndShowResultFile() + except Exception: # pylint: disable=broad-except + if not silent: + print 'Lint created unparseable xml file...' + print 'File contents:' + with open(result_path) as f: + print f.read() + raise + + _ProcessResultFile() + msg = ('\nLint found %d new issues.\n' + ' - For full explanation, please refer to %s\n' + ' - For more information about lint and how to fix lint issues,' + ' please refer to %s\n' % + (num_issues, _RebasePath(result_path), _LINT_MD_URL)) + if not silent: + print >> sys.stderr, msg + if can_fail_build: + raise Exception('Lint failed.') + + +def main(): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + + parser.add_argument('--lint-path', required=True, + help='Path to lint executable.') + parser.add_argument('--product-dir', required=True, + help='Path to product dir.') + parser.add_argument('--result-path', required=True, + help='Path to XML lint result file.') + parser.add_argument('--cache-dir', required=True, + help='Path to the directory in which the android cache ' + 'directory tree should be stored.') + parser.add_argument('--platform-xml-path', required=True, + help='Path to api-platforms.xml') + parser.add_argument('--android-sdk-version', + help='Version (API level) of the Android SDK used for ' + 'building.') + parser.add_argument('--create-cache', action='store_true', + help='Mark the lint cache file as an output rather than ' + 'an input.') + parser.add_argument('--can-fail-build', action='store_true', + help='If set, script will exit with nonzero exit status' + ' if lint errors are present') + parser.add_argument('--config-path', + help='Path to lint suppressions file.') + parser.add_argument('--enable', action='store_true', + help='Run lint instead of just touching stamp.') + parser.add_argument('--jar-path', + help='Jar file containing class files.') + parser.add_argument('--java-sources-file', + help='File containing a list of java files.') + parser.add_argument('--manifest-path', + help='Path to AndroidManifest.xml') + parser.add_argument('--classpath', default=[], action='append', + help='GYP-list of classpath .jar files') + parser.add_argument('--processed-config-path', + help='Path to processed lint suppressions file.') + parser.add_argument('--resource-dir', + help='Path to resource dir.') + parser.add_argument('--resource-sources', default=[], action='append', + help='GYP-list of resource sources (directories with ' + 'resources or archives created by resource-generating ' + 'tasks.') + parser.add_argument('--silent', action='store_true', + help='If set, script will not log anything.') + parser.add_argument('--src-dirs', + help='Directories containing java files.') + parser.add_argument('--stamp', + help='Path to touch on success.') + + args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + + if args.enable: + sources = [] + if args.src_dirs: + src_dirs = build_utils.ParseGnList(args.src_dirs) + sources = build_utils.FindInDirectories(src_dirs, '*.java') + elif args.java_sources_file: + sources.extend(build_utils.ReadSourcesList(args.java_sources_file)) + + if args.config_path and not args.processed_config_path: + parser.error('--config-path specified without --processed-config-path') + elif args.processed_config_path and not args.config_path: + parser.error('--processed-config-path specified without --config-path') + + input_paths = [ + args.lint_path, + args.platform_xml_path, + ] + if args.config_path: + input_paths.append(args.config_path) + if args.jar_path: + input_paths.append(args.jar_path) + if args.manifest_path: + input_paths.append(args.manifest_path) + if sources: + input_paths.extend(sources) + classpath = [] + for gyp_list in args.classpath: + classpath.extend(build_utils.ParseGnList(gyp_list)) + input_paths.extend(classpath) + + resource_sources = [] + if args.resource_dir: + # Backward compatibility with GYP + resource_sources += [ args.resource_dir ] + + for gyp_list in args.resource_sources: + resource_sources += build_utils.ParseGnList(gyp_list) + + for resource_source in resource_sources: + if os.path.isdir(resource_source): + input_paths.extend(build_utils.FindInDirectory(resource_source, '*')) + else: + input_paths.append(resource_source) + + input_strings = [] + if args.android_sdk_version: + input_strings.append(args.android_sdk_version) + if args.processed_config_path: + input_strings.append(args.processed_config_path) + + output_paths = [ args.result_path ] + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(args.lint_path, + args.config_path, + args.processed_config_path, + args.manifest_path, args.result_path, + args.product_dir, sources, + args.jar_path, + args.cache_dir, + args.android_sdk_version, + resource_sources, + classpath=classpath, + can_fail_build=args.can_fail_build, + silent=args.silent), + args, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + depfile_deps=classpath) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/locale_pak_resources.py b/build/android/gyp/locale_pak_resources.py new file mode 100644 index 00000000000..63d33431835 --- /dev/null +++ b/build/android/gyp/locale_pak_resources.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a resources.zip for locale .pak files. + +Places the locale.pak files into appropriate resource configs +(e.g. en-GB.pak -> res/raw-en/en_gb.lpak). Also generates a locale_paks +TypedArray so that resource files can be enumerated at runtime. +""" + +import collections +import optparse +import os +import sys +import zipfile + +from util import build_utils + + +# This should stay in sync with: +# base/android/java/src/org/chromium/base/LocaleUtils.java +_CHROME_TO_ANDROID_LOCALE_MAP = { + 'he': 'iw', + 'id': 'in', + 'fil': 'tl', +} + + +def ToResourceFileName(name): + """Returns the resource-compatible file name for the given file.""" + # Resources file names must consist of [a-z0-9_.]. + # Changes extension to .lpak so that compression can be toggled separately for + # locale pak files vs other pak files. + return name.replace('-', '_').replace('.pak', '.lpak').lower() + + +def CreateLocalePaksXml(names): + """Creates the contents for the locale-paks.xml files.""" + VALUES_FILE_TEMPLATE = ''' + + %s + + +''' + VALUES_ITEM_TEMPLATE = ''' + @raw/%s''' + + res_names = (os.path.splitext(name)[0] for name in names) + items = ''.join((VALUES_ITEM_TEMPLATE % name for name in res_names)) + return VALUES_FILE_TEMPLATE % items + + +def ComputeMappings(sources): + """Computes the mappings of sources -> resources. + + Returns a tuple of: + - mappings: List of (src, dest) paths + - lang_to_locale_map: Map of language -> list of resource names + e.g. "en" -> ["en_gb.lpak"] + """ + lang_to_locale_map = collections.defaultdict(list) + mappings = [] + for src_path in sources: + basename = os.path.basename(src_path) + name = os.path.splitext(basename)[0] + res_name = ToResourceFileName(basename) + if name == 'en-US': + dest_dir = 'raw' + else: + # Chrome's uses different region mapping logic from Android, so include + # all regions for each language. + android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(name, name) + lang = android_locale[0:2] + dest_dir = 'raw-' + lang + lang_to_locale_map[lang].append(res_name) + mappings.append((src_path, os.path.join(dest_dir, res_name))) + return mappings, lang_to_locale_map + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--locale-paks', help='List of files for res/raw-LOCALE') + parser.add_option('--resources-zip', help='Path to output resources.zip') + parser.add_option('--print-languages', + action='store_true', + help='Print out the list of languages that cover the given locale paks ' + '(using Android\'s language codes)') + + options, _ = parser.parse_args() + build_utils.CheckOptions(options, parser, + required=['locale_paks']) + + sources = build_utils.ParseGnList(options.locale_paks) + + if options.depfile: + deps = sources + build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, deps) + + mappings, lang_to_locale_map = ComputeMappings(sources) + if options.print_languages: + print '\n'.join(sorted(lang_to_locale_map)) + + if options.resources_zip: + with zipfile.ZipFile(options.resources_zip, 'w', zipfile.ZIP_STORED) as out: + for mapping in mappings: + out.write(mapping[0], mapping[1]) + + # Create TypedArray resources so ResourceExtractor can enumerate files. + def WriteValuesFile(lang, names): + dest_dir = 'values' + if lang: + dest_dir += '-' + lang + # Always extract en-US.lpak since it's the fallback. + xml = CreateLocalePaksXml(names + ['en_us.lpak']) + out.writestr(os.path.join(dest_dir, 'locale-paks.xml'), xml) + + for lang, names in lang_to_locale_map.iteritems(): + WriteValuesFile(lang, names) + WriteValuesFile(None, []) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/main_dex_list.py b/build/android/gyp/main_dex_list.py new file mode 100644 index 00000000000..10a9961c1e9 --- /dev/null +++ b/build/android/gyp/main_dex_list.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import sys +import tempfile + +from util import build_utils + +sys.path.append(os.path.abspath(os.path.join( + os.path.dirname(__file__), os.pardir))) +from pylib import constants + + +def main(args): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--android-sdk-tools', required=True, + help='Android sdk build tools directory.') + parser.add_argument('--main-dex-rules-path', action='append', default=[], + dest='main_dex_rules_paths', + help='A file containing a list of proguard rules to use ' + 'in determining the class to include in the ' + 'main dex.') + parser.add_argument('--main-dex-list-path', required=True, + help='The main dex list file to generate.') + parser.add_argument('--enabled-configurations', + help='The build configurations for which a main dex list' + ' should be generated.') + parser.add_argument('--configuration-name', + help='The current build configuration.') + parser.add_argument('--multidex-configuration-path', + help='A JSON file containing multidex build ' + 'configuration.') + parser.add_argument('--inputs', + help='JARs for which a main dex list should be ' + 'generated.') + parser.add_argument('paths', nargs='*', default=[], + help='JARs for which a main dex list should be ' + 'generated.') + + args = parser.parse_args(build_utils.ExpandFileArgs(args)) + + if args.multidex_configuration_path: + with open(args.multidex_configuration_path) as multidex_config_file: + multidex_config = json.loads(multidex_config_file.read()) + + if not multidex_config.get('enabled', False): + return 0 + + if args.inputs: + args.paths.extend(build_utils.ParseGnList(args.inputs)) + + shrinked_android_jar = os.path.abspath( + os.path.join(args.android_sdk_tools, 'lib', 'shrinkedAndroid.jar')) + dx_jar = os.path.abspath( + os.path.join(args.android_sdk_tools, 'lib', 'dx.jar')) + rules_file = os.path.abspath( + os.path.join(args.android_sdk_tools, 'mainDexClasses.rules')) + + proguard_cmd = [ + constants.PROGUARD_SCRIPT_PATH, + '-forceprocessing', + '-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify', + '-libraryjars', shrinked_android_jar, + '-include', rules_file, + ] + for m in args.main_dex_rules_paths: + proguard_cmd.extend(['-include', m]) + + main_dex_list_cmd = [ + 'java', '-cp', dx_jar, + 'com.android.multidex.MainDexListBuilder', + ] + + input_paths = list(args.paths) + input_paths += [ + shrinked_android_jar, + dx_jar, + rules_file, + ] + input_paths += args.main_dex_rules_paths + + input_strings = [ + proguard_cmd, + main_dex_list_cmd, + ] + + output_paths = [ + args.main_dex_list_path, + ] + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths, + args.main_dex_list_path), + args, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths) + + return 0 + + +def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path): + paths_arg = ':'.join(paths) + main_dex_list = '' + try: + with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar: + proguard_cmd += [ + '-injars', paths_arg, + '-outjars', temp_jar.name + ] + build_utils.CheckOutput(proguard_cmd, print_stderr=False) + + main_dex_list_cmd += [ + temp_jar.name, paths_arg + ] + main_dex_list = build_utils.CheckOutput(main_dex_list_cmd) + except build_utils.CalledProcessError as e: + if 'output jar is empty' in e.output: + pass + elif "input doesn't contain any classes" in e.output: + pass + else: + raise + + with open(main_dex_list_path, 'w') as main_dex_list_file: + main_dex_list_file.write(main_dex_list) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + diff --git a/build/android/gyp/pack_relocations.py b/build/android/gyp/pack_relocations.py new file mode 100644 index 00000000000..c2d02d72c05 --- /dev/null +++ b/build/android/gyp/pack_relocations.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pack relocations in a library (or copy unchanged). + +If --enable-packing and --configuration-name=='Release', invoke the +relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given +library files. This step is inserted after the libraries are stripped. + +If --enable-packing is zero, the script copies files verbatim, with no +attempt to pack relocations. +""" + +import ast +import optparse +import os +import shutil +import sys +import tempfile + +from util import build_utils + +def PackLibraryRelocations(android_pack_relocations, library_path, output_path): + shutil.copy(library_path, output_path) + pack_command = [android_pack_relocations, output_path] + build_utils.CheckOutput(pack_command) + + +def CopyLibraryUnchanged(library_path, output_path): + shutil.copy(library_path, output_path) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--clear-dir', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + + parser.add_option('--configuration-name', + default='Release', + help='Gyp configuration name (i.e. Debug, Release)') + parser.add_option('--enable-packing', + choices=['0', '1'], + help=('Pack relocations if 1 and configuration name is \'Release\',' + ' otherwise plain file copy')) + parser.add_option('--android-pack-relocations', + help='Path to the relocations packer binary') + parser.add_option('--stripped-libraries-dir', + help='Directory for stripped libraries') + parser.add_option('--packed-libraries-dir', + help='Directory for packed libraries') + parser.add_option('--libraries', action='append', + help='List of libraries in Python dictionary format') + parser.add_option('--stamp', help='Path to touch on success') + parser.add_option('--filelistjson', + help='Output path of filelist.json to write') + + options, _ = parser.parse_args(args) + enable_packing = (options.enable_packing == '1' and + options.configuration_name == 'Release') + + libraries = [] + for libs_arg in options.libraries: + libraries += ast.literal_eval(libs_arg) + + if options.clear_dir: + build_utils.DeleteDirectory(options.packed_libraries_dir) + + build_utils.MakeDirectory(options.packed_libraries_dir) + + output_paths = [] + for library in libraries: + library_path = os.path.join(options.stripped_libraries_dir, library) + output_path = os.path.join( + options.packed_libraries_dir, os.path.basename(library)) + output_paths.append(output_path) + + if enable_packing: + PackLibraryRelocations(options.android_pack_relocations, + library_path, + output_path) + else: + CopyLibraryUnchanged(library_path, output_path) + + if options.filelistjson: + build_utils.WriteJson({ 'files': output_paths }, options.filelistjson) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + libraries + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py new file mode 100644 index 00000000000..53ba92c3c6d --- /dev/null +++ b/build/android/gyp/package_resources.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=C0301 +"""Package resources into an apk. + +See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java +and +https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml +""" +# pylint: enable=C0301 + +import optparse +import os +import re +import shutil +import sys +import zipfile + +from util import build_utils + + +# List is generated from the chrome_apk.apk_intermediates.ap_ via: +# unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \ +# | uniq | grep -- -tvdpi- | cut -c10- +# and then manually sorted. +# Note that we can't just do a cross-product of dimentions because the filenames +# become too big and aapt fails to create the files. +# This leaves all default drawables (mdpi) in the main apk. Android gets upset +# though if any drawables are missing from the default drawables/ directory. +DENSITY_SPLITS = { + 'hdpi': ( + 'hdpi-v4', # Order matters for output file names. + 'ldrtl-hdpi-v4', + 'sw600dp-hdpi-v13', + 'ldrtl-hdpi-v17', + 'ldrtl-sw600dp-hdpi-v17', + 'hdpi-v21', + ), + 'xhdpi': ( + 'xhdpi-v4', + 'ldrtl-xhdpi-v4', + 'sw600dp-xhdpi-v13', + 'ldrtl-xhdpi-v17', + 'ldrtl-sw600dp-xhdpi-v17', + 'xhdpi-v21', + ), + 'xxhdpi': ( + 'xxhdpi-v4', + 'ldrtl-xxhdpi-v4', + 'sw600dp-xxhdpi-v13', + 'ldrtl-xxhdpi-v17', + 'ldrtl-sw600dp-xxhdpi-v17', + 'xxhdpi-v21', + ), + 'xxxhdpi': ( + 'xxxhdpi-v4', + 'ldrtl-xxxhdpi-v4', + 'sw600dp-xxxhdpi-v13', + 'ldrtl-xxxhdpi-v17', + 'ldrtl-sw600dp-xxxhdpi-v17', + 'xxxhdpi-v21', + ), + 'tvdpi': ( + 'tvdpi-v4', + 'sw600dp-tvdpi-v13', + 'ldrtl-sw600dp-tvdpi-v17', + ), +} + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--android-sdk-jar', + help='path to the Android SDK jar.') + parser.add_option('--aapt-path', + help='path to the Android aapt tool') + + parser.add_option('--configuration-name', + help='Gyp\'s configuration name (Debug or Release).') + + parser.add_option('--android-manifest', help='AndroidManifest.xml path') + parser.add_option('--version-code', help='Version code for apk.') + parser.add_option('--version-name', help='Version name for apk.') + parser.add_option( + '--shared-resources', + action='store_true', + help='Make a resource package that can be loaded by a different' + 'application at runtime to access the package\'s resources.') + parser.add_option( + '--app-as-shared-lib', + action='store_true', + help='Make a resource package that can be loaded as shared library') + parser.add_option('--resource-zips', + default='[]', + help='zip files containing resources to be packaged') + parser.add_option('--asset-dir', + help='directories containing assets to be packaged') + parser.add_option('--no-compress', help='disables compression for the ' + 'given comma separated list of extensions') + parser.add_option( + '--create-density-splits', + action='store_true', + help='Enables density splits') + parser.add_option('--language-splits', + default='[]', + help='GYP list of languages to create splits for') + + parser.add_option('--apk-path', + help='Path to output (partial) apk.') + + options, positional_args = parser.parse_args(args) + + if positional_args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('android_sdk_jar', 'aapt_path', 'configuration_name', + 'android_manifest', 'version_code', 'version_name', + 'apk_path') + + build_utils.CheckOptions(options, parser, required=required_options) + + options.resource_zips = build_utils.ParseGnList(options.resource_zips) + options.language_splits = build_utils.ParseGnList(options.language_splits) + return options + + +def MoveImagesToNonMdpiFolders(res_root): + """Move images from drawable-*-mdpi-* folders to drawable-* folders. + + Why? http://crbug.com/289843 + """ + for src_dir_name in os.listdir(res_root): + src_components = src_dir_name.split('-') + if src_components[0] != 'drawable' or 'mdpi' not in src_components: + continue + src_dir = os.path.join(res_root, src_dir_name) + if not os.path.isdir(src_dir): + continue + dst_components = [c for c in src_components if c != 'mdpi'] + assert dst_components != src_components + dst_dir_name = '-'.join(dst_components) + dst_dir = os.path.join(res_root, dst_dir_name) + build_utils.MakeDirectory(dst_dir) + for src_file_name in os.listdir(src_dir): + if not src_file_name.endswith('.png'): + continue + src_file = os.path.join(src_dir, src_file_name) + dst_file = os.path.join(dst_dir, src_file_name) + assert not os.path.lexists(dst_file) + shutil.move(src_file, dst_file) + + +def PackageArgsForExtractedZip(d): + """Returns the aapt args for an extracted resources zip. + + A resources zip either contains the resources for a single target or for + multiple targets. If it is multiple targets merged into one, the actual + resource directories will be contained in the subdirectories 0, 1, 2, ... + """ + subdirs = [os.path.join(d, s) for s in os.listdir(d)] + subdirs = [s for s in subdirs if os.path.isdir(s)] + is_multi = '0' in [os.path.basename(s) for s in subdirs] + if is_multi: + res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p))) + else: + res_dirs = [d] + package_command = [] + for d in res_dirs: + MoveImagesToNonMdpiFolders(d) + package_command += ['-S', d] + return package_command + + +def _GenerateDensitySplitPaths(apk_path): + for density, config in DENSITY_SPLITS.iteritems(): + src_path = '%s_%s' % (apk_path, '_'.join(config)) + dst_path = '%s_%s' % (apk_path, density) + yield src_path, dst_path + + +def _GenerateLanguageSplitOutputPaths(apk_path, languages): + for lang in languages: + yield '%s_%s' % (apk_path, lang) + + +def RenameDensitySplits(apk_path): + """Renames all density splits to have shorter / predictable names.""" + for src_path, dst_path in _GenerateDensitySplitPaths(apk_path): + shutil.move(src_path, dst_path) + + +def CheckForMissedConfigs(apk_path, check_density, languages): + """Raises an exception if apk_path contains any unexpected configs.""" + triggers = [] + if check_density: + triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS) + if languages: + triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages) + with zipfile.ZipFile(apk_path) as main_apk_zip: + for name in main_apk_zip.namelist(): + for trigger in triggers: + if trigger.search(name) and not 'mipmap-' in name: + raise Exception(('Found config in main apk that should have been ' + + 'put into a split: %s\nYou need to update ' + + 'package_resources.py to include this new ' + + 'config (trigger=%s)') % (name, trigger.pattern)) + + +def _ConstructMostAaptArgs(options): + package_command = [ + options.aapt_path, + 'package', + '--version-code', options.version_code, + '--version-name', options.version_name, + '-M', options.android_manifest, + '--no-crunch', + '-f', + '--auto-add-overlay', + '--no-version-vectors', + '-I', options.android_sdk_jar, + '-F', options.apk_path, + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN, + ] + + if options.no_compress: + for ext in options.no_compress.split(','): + package_command += ['-0', ext] + + if options.shared_resources: + package_command.append('--shared-lib') + + if options.app_as_shared_lib: + package_command.append('--app-as-shared-lib') + + if options.asset_dir and os.path.exists(options.asset_dir): + package_command += ['-A', options.asset_dir] + + if options.create_density_splits: + for config in DENSITY_SPLITS.itervalues(): + package_command.extend(('--split', ','.join(config))) + + if options.language_splits: + for lang in options.language_splits: + package_command.extend(('--split', lang)) + + if 'Debug' in options.configuration_name: + package_command += ['--debug-mode'] + + return package_command + + +def _OnStaleMd5(package_command, options): + with build_utils.TempDir() as temp_dir: + if options.resource_zips: + dep_zips = options.resource_zips + for z in dep_zips: + subdir = os.path.join(temp_dir, os.path.basename(z)) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + os.path.basename(z)) + build_utils.ExtractAll(z, path=subdir) + package_command += PackageArgsForExtractedZip(subdir) + + build_utils.CheckOutput( + package_command, print_stdout=False, print_stderr=False) + + if options.create_density_splits or options.language_splits: + CheckForMissedConfigs(options.apk_path, options.create_density_splits, + options.language_splits) + + if options.create_density_splits: + RenameDensitySplits(options.apk_path) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + package_command = _ConstructMostAaptArgs(options) + + output_paths = [ options.apk_path ] + + if options.create_density_splits: + for _, dst_path in _GenerateDensitySplitPaths(options.apk_path): + output_paths.append(dst_path) + output_paths.extend( + _GenerateLanguageSplitOutputPaths(options.apk_path, + options.language_splits)) + + input_paths = [ options.android_manifest ] + options.resource_zips + + input_strings = [] + input_strings.extend(package_command) + + # The md5_check.py doesn't count file path in md5 intentionally, + # in order to repackage resources when assets' name changed, we need + # to put assets into input_strings, as we know the assets path isn't + # changed among each build if there is no asset change. + if options.asset_dir and os.path.exists(options.asset_dir): + asset_paths = [] + for root, _, filenames in os.walk(options.asset_dir): + asset_paths.extend(os.path.join(root, f) for f in filenames) + input_paths.extend(asset_paths) + input_strings.extend(sorted(asset_paths)) + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(package_command, options), + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py new file mode 100644 index 00000000000..2d4638f8d81 --- /dev/null +++ b/build/android/gyp/process_resources.py @@ -0,0 +1,541 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Process Android resources to generate R.java, and prepare for packaging. + +This will crunch images and generate v14 compatible resources +(see generate_v14_compatible_resources.py). +""" + +import codecs +import collections +import optparse +import os +import re +import shutil +import sys +import xml.etree.ElementTree + +import generate_v14_compatible_resources + +from util import build_utils + +# Import jinja2 from third_party/jinja2 +sys.path.insert(1, + os.path.join(os.path.dirname(__file__), '../../../third_party')) +from jinja2 import Template # pylint: disable=F0401 + + +# Represents a line from a R.txt file. +TextSymbolsEntry = collections.namedtuple('RTextEntry', + ('java_type', 'resource_type', 'name', 'value')) + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--android-sdk-jar', + help='the path to android jar file.') + parser.add_option('--aapt-path', + help='path to the Android aapt tool') + parser.add_option('--non-constant-id', action='store_true') + + parser.add_option('--android-manifest', help='AndroidManifest.xml path') + parser.add_option('--custom-package', help='Java package for R.java') + parser.add_option( + '--shared-resources', + action='store_true', + help='Make a resource package that can be loaded by a different' + 'application at runtime to access the package\'s resources.') + parser.add_option( + '--app-as-shared-lib', + action='store_true', + help='Make a resource package that can be loaded as shared library.') + + parser.add_option('--resource-dirs', + help='Directories containing resources of this target.') + parser.add_option('--dependencies-res-zips', + help='Resources from dependents.') + + parser.add_option('--resource-zip-out', + help='Path for output zipped resources.') + + parser.add_option('--R-dir', + help='directory to hold generated R.java.') + parser.add_option('--srcjar-out', + help='Path to srcjar to contain generated R.java.') + parser.add_option('--r-text-out', + help='Path to store the R.txt file generated by appt.') + + parser.add_option('--proguard-file', + help='Path to proguard.txt generated file') + + parser.add_option( + '--v14-skip', + action="store_true", + help='Do not generate nor verify v14 resources') + + parser.add_option( + '--extra-res-packages', + help='Additional package names to generate R.java files for') + parser.add_option( + '--extra-r-text-files', + help='For each additional package, the R.txt file should contain a ' + 'list of resources to be included in the R.java file in the format ' + 'generated by aapt') + parser.add_option( + '--include-all-resources', + action='store_true', + help='Include every resource ID in every generated R.java file ' + '(ignoring R.txt).') + + parser.add_option( + '--all-resources-zip-out', + help='Path for output of all resources. This includes resources in ' + 'dependencies.') + + parser.add_option('--stamp', help='File to touch on success') + + options, positional_args = parser.parse_args(args) + + if positional_args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ( + 'android_sdk_jar', + 'aapt_path', + 'android_manifest', + 'dependencies_res_zips', + 'resource_dirs', + 'resource_zip_out', + ) + build_utils.CheckOptions(options, parser, required=required_options) + + if (options.R_dir is None) == (options.srcjar_out is None): + raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.') + + options.resource_dirs = build_utils.ParseGnList(options.resource_dirs) + options.dependencies_res_zips = ( + build_utils.ParseGnList(options.dependencies_res_zips)) + + # Don't use [] as default value since some script explicitly pass "". + if options.extra_res_packages: + options.extra_res_packages = ( + build_utils.ParseGnList(options.extra_res_packages)) + else: + options.extra_res_packages = [] + + if options.extra_r_text_files: + options.extra_r_text_files = ( + build_utils.ParseGnList(options.extra_r_text_files)) + else: + options.extra_r_text_files = [] + + return options + + +def CreateRJavaFiles(srcjar_dir, main_r_txt_file, packages, r_txt_files, + shared_resources): + assert len(packages) == len(r_txt_files), 'Need one R.txt file per package' + + # Map of (resource_type, name) -> Entry. + # Contains the correct values for resources. + all_resources = {} + for entry in _ParseTextSymbolsFile(main_r_txt_file): + all_resources[(entry.resource_type, entry.name)] = entry + + # Map of package_name->resource_type->entry + resources_by_package = ( + collections.defaultdict(lambda: collections.defaultdict(list))) + # Build the R.java files using each package's R.txt file, but replacing + # each entry's placeholder value with correct values from all_resources. + for package, r_txt_file in zip(packages, r_txt_files): + if package in resources_by_package: + raise Exception(('Package name "%s" appeared twice. All ' + 'android_resources() targets must use unique package ' + 'names, or no package name at all.') % package) + resources_by_type = resources_by_package[package] + # The sub-R.txt files have the wrong values at this point. Read them to + # figure out which entries belong to them, but use the values from the + # main R.txt file. + for entry in _ParseTextSymbolsFile(r_txt_file): + entry = all_resources[(entry.resource_type, entry.name)] + resources_by_type[entry.resource_type].append(entry) + + for package, resources_by_type in resources_by_package.iteritems(): + package_r_java_dir = os.path.join(srcjar_dir, *package.split('.')) + build_utils.MakeDirectory(package_r_java_dir) + package_r_java_path = os.path.join(package_r_java_dir, 'R.java') + java_file_contents = _CreateRJavaFile( + package, resources_by_type, shared_resources) + with open(package_r_java_path, 'w') as f: + f.write(java_file_contents) + + +def _ParseTextSymbolsFile(path): + """Given an R.txt file, returns a list of TextSymbolsEntry.""" + ret = [] + with open(path) as f: + for line in f: + m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) + if not m: + raise Exception('Unexpected line in R.txt: %s' % line) + java_type, resource_type, name, value = m.groups() + ret.append(TextSymbolsEntry(java_type, resource_type, name, value)) + return ret + + +def _CreateRJavaFile(package, resources_by_type, shared_resources): + """Generates the contents of a R.java file.""" + # Keep these assignments all on one line to make diffing against regular + # aapt-generated files easier. + create_id = ('{{ e.resource_type }}.{{ e.name }} = ' + '({{ e.resource_type }}.{{ e.name }} & 0x00ffffff) |' + ' (packageId << 24);') + create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] = ' + '({{ e.resource_type }}.{{ e.name }}[i] & 0x00ffffff) |' + ' (packageId << 24);') + # Here we diverge from what aapt does. Because we have so many + # resources, the onResourcesLoaded method was exceeding the 64KB limit that + # Java imposes. For this reason we split onResourcesLoaded into different + # methods for each resource type. + template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; + +public final class R { + {% for resource_type in resource_types %} + public static final class {{ resource_type }} { + {% for e in resources[resource_type] %} + {% if shared_resources %} + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% else %} + public static final {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% endif %} + {% endfor %} + } + {% endfor %} + {% if shared_resources %} + public static void onResourcesLoaded(int packageId) { + {% for resource_type in resource_types %} + onResourcesLoaded{{ resource_type|title }}(packageId); + {% for e in resources[resource_type] %} + {% if e.java_type == 'int[]' %} + for(int i = 0; i < {{ e.resource_type }}.{{ e.name }}.length; ++i) { + """ + create_id_arr + """ + } + {% endif %} + {% endfor %} + {% endfor %} + } + {% for res_type in resource_types %} + private static void onResourcesLoaded{{ res_type|title }}(int packageId) { + {% for e in resources[res_type] %} + {% if res_type != 'styleable' and e.java_type != 'int[]' %} + """ + create_id + """ + {% endif %} + {% endfor %} + } + {% endfor %} + {% endif %} +} +""", trim_blocks=True, lstrip_blocks=True) + + return template.render(package=package, + resources=resources_by_type, + resource_types=sorted(resources_by_type), + shared_resources=shared_resources) + + +def CrunchDirectory(aapt, input_dir, output_dir): + """Crunches the images in input_dir and its subdirectories into output_dir. + + If an image is already optimized, crunching often increases image size. In + this case, the crunched image is overwritten with the original image. + """ + aapt_cmd = [aapt, + 'crunch', + '-C', output_dir, + '-S', input_dir, + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] + build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr, + fail_func=DidCrunchFail) + + # Check for images whose size increased during crunching and replace them + # with their originals (except for 9-patches, which must be crunched). + for dir_, _, files in os.walk(output_dir): + for crunched in files: + if crunched.endswith('.9.png'): + continue + if not crunched.endswith('.png'): + raise Exception('Unexpected file in crunched dir: ' + crunched) + crunched = os.path.join(dir_, crunched) + original = os.path.join(input_dir, os.path.relpath(crunched, output_dir)) + original_size = os.path.getsize(original) + crunched_size = os.path.getsize(crunched) + if original_size < crunched_size: + shutil.copyfile(original, crunched) + + +def FilterCrunchStderr(stderr): + """Filters out lines from aapt crunch's stderr that can safely be ignored.""" + filtered_lines = [] + for line in stderr.splitlines(True): + # Ignore this libpng warning, which is a known non-error condition. + # http://crbug.com/364355 + if ('libpng warning: iCCP: Not recognizing known sRGB profile that has ' + + 'been edited' in line): + continue + filtered_lines.append(line) + return ''.join(filtered_lines) + + +def DidCrunchFail(returncode, stderr): + """Determines whether aapt crunch failed from its return code and output. + + Because aapt's return code cannot be trusted, any output to stderr is + an indication that aapt has failed (http://crbug.com/314885). + """ + return returncode != 0 or stderr + + +def ZipResources(resource_dirs, zip_path): + # Python zipfile does not provide a way to replace a file (it just writes + # another file with the same name). So, first collect all the files to put + # in the zip (with proper overriding), and then zip them. + files_to_zip = dict() + for d in resource_dirs: + for root, _, files in os.walk(d): + for f in files: + archive_path = f + parent_dir = os.path.relpath(root, d) + if parent_dir != '.': + archive_path = os.path.join(parent_dir, f) + path = os.path.join(root, f) + files_to_zip[archive_path] = path + build_utils.DoZip(files_to_zip.iteritems(), zip_path) + + +def CombineZips(zip_files, output_path): + # When packaging resources, if the top-level directories in the zip file are + # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a + # resources directory. While some resources just clobber others (image files, + # etc), other resources (particularly .xml files) need to be more + # intelligently merged. That merging is left up to aapt. + def path_transform(name, src_zip): + return '%d/%s' % (zip_files.index(src_zip), name) + + build_utils.MergeZips(output_path, zip_files, path_transform=path_transform) + + +def _ExtractPackageFromManifest(manifest_path): + doc = xml.etree.ElementTree.parse(manifest_path) + return doc.getroot().get('package') + + +def _OnStaleMd5(options): + aapt = options.aapt_path + with build_utils.TempDir() as temp_dir: + deps_dir = os.path.join(temp_dir, 'deps') + build_utils.MakeDirectory(deps_dir) + v14_dir = os.path.join(temp_dir, 'v14') + build_utils.MakeDirectory(v14_dir) + + gen_dir = os.path.join(temp_dir, 'gen') + build_utils.MakeDirectory(gen_dir) + r_txt_path = os.path.join(gen_dir, 'R.txt') + srcjar_dir = os.path.join(temp_dir, 'java') + + input_resource_dirs = options.resource_dirs + + if not options.v14_skip: + for resource_dir in input_resource_dirs: + generate_v14_compatible_resources.GenerateV14Resources( + resource_dir, + v14_dir) + + dep_zips = options.dependencies_res_zips + dep_subdirs = [] + for z in dep_zips: + subdir = os.path.join(deps_dir, os.path.basename(z)) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + os.path.basename(z)) + build_utils.ExtractAll(z, path=subdir) + dep_subdirs.append(subdir) + + # Generate R.java. This R.java contains non-final constants and is used only + # while compiling the library jar (e.g. chromium_content.jar). When building + # an apk, a new R.java file with the correct resource -> ID mappings will be + # generated by merging the resources from all libraries and the main apk + # project. + package_command = [aapt, + 'package', + '-m', + '-M', options.android_manifest, + '--auto-add-overlay', + '--no-version-vectors', + '-I', options.android_sdk_jar, + '--output-text-symbols', gen_dir, + '-J', gen_dir, # Required for R.txt generation. + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] + + # aapt supports only the "--include-all-resources" mode, where each R.java + # file ends up with all symbols, rather than only those that it had at the + # time it was originally generated. This subtle difference makes no + # difference when compiling, but can lead to increased unused symbols in the + # resulting R.class files. + # TODO(agrieve): See if proguard makes this difference actually translate + # into a size difference. If not, we can delete all of our custom R.java + # template code above (and make include_all_resources the default). + if options.include_all_resources: + srcjar_dir = gen_dir + if options.extra_res_packages: + colon_separated = ':'.join(options.extra_res_packages) + package_command += ['--extra-packages', colon_separated] + if options.non_constant_id: + package_command.append('--non-constant-id') + if options.custom_package: + package_command += ['--custom-package', options.custom_package] + if options.shared_resources: + package_command.append('--shared-lib') + if options.app_as_shared_lib: + package_command.append('--app-as-shared-lib') + + for d in input_resource_dirs: + package_command += ['-S', d] + + # Adding all dependencies as sources is necessary for @type/foo references + # to symbols within dependencies to resolve. However, it has the side-effect + # that all Java symbols from dependencies are copied into the new R.java. + # E.g.: It enables an arguably incorrect usage of + # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be + # more correct. This is just how Android works. + for d in dep_subdirs: + package_command += ['-S', d] + + if options.proguard_file: + package_command += ['-G', options.proguard_file] + build_utils.CheckOutput(package_command, print_stderr=False) + + # When an empty res/ directory is passed, aapt does not write an R.txt. + if not os.path.exists(r_txt_path): + build_utils.Touch(r_txt_path) + + if not options.include_all_resources: + packages = list(options.extra_res_packages) + r_txt_files = list(options.extra_r_text_files) + + cur_package = options.custom_package + if not options.custom_package: + cur_package = _ExtractPackageFromManifest(options.android_manifest) + + # Don't create a .java file for the current resource target when: + # - no package name was provided (either by manifest or build rules), + # - there was already a dependent android_resources() with the same + # package (occurs mostly when an apk target and resources target share + # an AndroidManifest.xml) + if cur_package != 'dummy.package' and cur_package not in packages: + packages.append(cur_package) + r_txt_files.append(r_txt_path) + + if packages: + shared_resources = options.shared_resources or options.app_as_shared_lib + CreateRJavaFiles(srcjar_dir, r_txt_path, packages, r_txt_files, + shared_resources) + + # This is the list of directories with resources to put in the final .zip + # file. The order of these is important so that crunched/v14 resources + # override the normal ones. + zip_resource_dirs = input_resource_dirs + [v14_dir] + + base_crunch_dir = os.path.join(temp_dir, 'crunch') + + # Crunch image resources. This shrinks png files and is necessary for + # 9-patch images to display correctly. 'aapt crunch' accepts only a single + # directory at a time and deletes everything in the output directory. + for idx, input_dir in enumerate(input_resource_dirs): + crunch_dir = os.path.join(base_crunch_dir, str(idx)) + build_utils.MakeDirectory(crunch_dir) + zip_resource_dirs.append(crunch_dir) + CrunchDirectory(aapt, input_dir, crunch_dir) + + ZipResources(zip_resource_dirs, options.resource_zip_out) + + if options.all_resources_zip_out: + CombineZips([options.resource_zip_out] + dep_zips, + options.all_resources_zip_out) + + if options.R_dir: + build_utils.DeleteDirectory(options.R_dir) + shutil.copytree(srcjar_dir, options.R_dir) + else: + build_utils.ZipDir(options.srcjar_out, srcjar_dir) + + if options.r_text_out: + shutil.copyfile(r_txt_path, options.r_text_out) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + possible_output_paths = [ + options.resource_zip_out, + options.all_resources_zip_out, + options.proguard_file, + options.r_text_out, + options.srcjar_out, + ] + output_paths = [x for x in possible_output_paths if x] + + # List python deps in input_strings rather than input_paths since the contents + # of them does not change what gets written to the depsfile. + input_strings = options.extra_res_packages + [ + options.app_as_shared_lib, + options.custom_package, + options.include_all_resources, + options.non_constant_id, + options.shared_resources, + options.v14_skip, + ] + + input_paths = [ + options.aapt_path, + options.android_manifest, + options.android_sdk_jar, + ] + input_paths.extend(options.dependencies_res_zips) + input_paths.extend(options.extra_r_text_files) + + resource_names = [] + for resource_dir in options.resource_dirs: + for resource_file in build_utils.FindInDirectory(resource_dir, '*'): + input_paths.append(resource_file) + resource_names.append(os.path.relpath(resource_file, resource_dir)) + + # Resource filenames matter to the output, so add them to strings as well. + # This matters if a file is renamed but not changed (http://crbug.com/597126). + input_strings.extend(sorted(resource_names)) + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(options), + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + # TODO(agrieve): Remove R_dir when it's no longer used (used only by GYP). + force=options.R_dir) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py new file mode 100644 index 00000000000..8a4ac599a47 --- /dev/null +++ b/build/android/gyp/proguard.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils +from util import proguard_util + + +_DANGEROUS_OPTIMIZATIONS = [ + # See crbug.com/625992 + "code/allocation/variable", + # See crbug.com/625994 + "field/propagation/value", + "method/propagation/parameter", + "method/propagation/returnvalue", +] + +def _ParseOptions(args): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--proguard-path', + help='Path to the proguard executable.') + parser.add_option('--input-paths', + help='Paths to the .jar files proguard should run on.') + parser.add_option('--output-path', help='Path to the generated .jar file.') + parser.add_option('--proguard-configs', + help='Paths to proguard configuration files.') + parser.add_option('--mapping', help='Path to proguard mapping to apply.') + parser.add_option('--is-test', action='store_true', + help='If true, extra proguard options for instrumentation tests will be ' + 'added.') + parser.add_option('--tested-apk-info', help='Path to the proguard .info file ' + 'for the tested apk') + parser.add_option('--classpath', action='append', + help='Classpath for proguard.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--enable-dangerous-optimizations', action='store_true', + help='Enable optimizations which are known to have issues.') + parser.add_option('--verbose', '-v', action='store_true', + help='Print all proguard output') + + options, _ = parser.parse_args(args) + + classpath = [] + for arg in options.classpath: + classpath += build_utils.ParseGnList(arg) + options.classpath = classpath + + return options + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseOptions(args) + + proguard = proguard_util.ProguardCmdBuilder(options.proguard_path) + proguard.injars(build_utils.ParseGnList(options.input_paths)) + proguard.configs(build_utils.ParseGnList(options.proguard_configs)) + proguard.outjar(options.output_path) + + if options.mapping: + proguard.mapping(options.mapping) + + if options.tested_apk_info: + proguard.tested_apk_info(options.tested_apk_info) + + classpath = list(set(options.classpath)) + proguard.libraryjars(classpath) + proguard.verbose(options.verbose) + if not options.enable_dangerous_optimizations: + proguard.disable_optimizations(_DANGEROUS_OPTIMIZATIONS) + + input_paths = proguard.GetInputs() + + build_utils.CallAndWriteDepfileIfStale( + proguard.CheckOutput, + options, + input_paths=input_paths, + input_strings=proguard.build(), + output_paths=[options.output_path]) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py new file mode 100644 index 00000000000..7d904430ccc --- /dev/null +++ b/build/android/gyp/push_libraries.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pushes native libraries to a device. + +""" + +import optparse +import os +import sys + +from util import build_device +from util import build_utils +from util import md5_check + +BUILD_ANDROID_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir)) +sys.path.append(BUILD_ANDROID_DIR) + +import devil_chromium +from pylib import constants + +def DoPush(options): + libraries = build_utils.ParseGnList(options.libraries) + + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + serial_number = device.GetSerialNumber() + # A list so that it is modifiable in Push below. + needs_directory = [True] + for lib in libraries: + device_path = os.path.join(options.device_dir, lib) + host_path = os.path.join(options.libraries_dir, lib) + + def Push(): + if needs_directory: + device.RunShellCommand('mkdir -p ' + options.device_dir) + needs_directory[:] = [] # = False + device.PushChangedFiles([(os.path.abspath(host_path), device_path)]) + + record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number) + md5_check.CallAndRecordIfStale( + Push, + record_path=record_path, + input_paths=[host_path], + input_strings=[device_path]) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + parser.add_option('--libraries-dir', + help='Directory that contains stripped libraries.') + parser.add_option('--device-dir', + help='Device directory to push the libraries to.') + parser.add_option('--libraries', + help='List of native libraries.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--output-directory', + help='The output directory.') + options, _ = parser.parse_args(args) + + required_options = ['libraries', 'device_dir', 'libraries'] + build_utils.CheckOptions(options, parser, required=required_options) + + devil_chromium.Initialize( + output_directory=os.path.abspath(options.output_directory)) + + DoPush(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/test/BUILD.gn b/build/android/gyp/test/BUILD.gn new file mode 100644 index 00000000000..2deac1d56f2 --- /dev/null +++ b/build/android/gyp/test/BUILD.gn @@ -0,0 +1,13 @@ +import("//build/config/android/rules.gni") + +java_library("hello_world_java") { + java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ] +} + +java_binary("hello_world") { + deps = [ + ":hello_world_java", + ] + java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ] + main_class = "org.chromium.helloworld.HelloWorldMain" +} diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java new file mode 100644 index 00000000000..10860d8332d --- /dev/null +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java @@ -0,0 +1,15 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldMain { + public static void main(String[] args) { + if (args.length > 0) { + System.exit(Integer.parseInt(args[0])); + } + HelloWorldPrinter.print(); + } +} + diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java new file mode 100644 index 00000000000..b09673e21f4 --- /dev/null +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java @@ -0,0 +1,12 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldPrinter { + public static void print() { + System.out.println("Hello, world!"); + } +} + diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py new file mode 100644 index 00000000000..7b4375e40ab --- /dev/null +++ b/build/android/gyp/touch.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys + +from util import build_utils + +def main(argv): + for f in argv[1:]: + build_utils.Touch(f) + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py new file mode 100644 index 00000000000..727e987e6b6 --- /dev/null +++ b/build/android/gyp/util/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py new file mode 100644 index 00000000000..6a703c64e6f --- /dev/null +++ b/build/android/gyp/util/build_device.py @@ -0,0 +1,102 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" A simple device interface for build steps. + +""" + +import logging +import os +import re +import sys + +from util import build_utils + +from devil.android import device_errors +from devil.android import device_utils +from devil.android.sdk import adb_wrapper + + +def GetAttachedDevices(): + return [a.GetDeviceSerial() + for a in adb_wrapper.AdbWrapper.Devices()] + + +class BuildDevice(object): + def __init__(self, configuration): + self.id = configuration['id'] + self.description = configuration['description'] + self.install_metadata = configuration['install_metadata'] + assert all(isinstance(entry, dict) for entry in self.install_metadata), ( + 'Invalid BuildDevice configuration') + self.device = device_utils.DeviceUtils(self.id) + + def RunShellCommand(self, *args, **kwargs): + return self.device.RunShellCommand(*args, **kwargs) + + def PushChangedFiles(self, *args, **kwargs): + return self.device.PushChangedFiles(*args, **kwargs) + + def GetSerialNumber(self): + return self.id + + def Install(self, *args, **kwargs): + return self.device.Install(*args, **kwargs) + + def InstallSplitApk(self, *args, **kwargs): + return self.device.InstallSplitApk(*args, **kwargs) + + def GetInstallMetadata(self, apk_package, refresh=False): + """Gets the metadata on the device for a given apk. + + Args: + apk_package: A string with the package name for which to get metadata. + refresh: A boolean indicating whether to re-read package metadata from + the device, or use the values from the current configuration. + """ + if refresh: + self.install_metadata = self.device.StatDirectory( + '/data/app/', as_root=True) + # Matches names like: org.chromium.chrome.apk, org.chromium.chrome-1.apk + apk_pattern = re.compile('%s(-[0-9]*)?(.apk)?$' % re.escape(apk_package)) + return next( + (entry for entry in self.install_metadata + if apk_pattern.match(entry['filename'])), + None) + + +def GetConfigurationForDevice(device_id): + device = device_utils.DeviceUtils(device_id) + configuration = None + has_root = False + is_online = device.IsOnline() + if is_online: + has_root = device.HasRoot() + configuration = { + 'id': device_id, + 'description': device.build_description, + 'install_metadata': device.StatDirectory('/data/app/', as_root=True), + } + return configuration, is_online, has_root + + +def WriteConfigurations(configurations, path): + # Currently we only support installing to the first device. + build_utils.WriteJson(configurations[:1], path, only_if_changed=True) + + +def ReadConfigurations(path): + return build_utils.ReadJson(path) + + +def GetBuildDevice(configurations): + assert len(configurations) == 1 + return BuildDevice(configurations[0]) + + +def GetBuildDeviceFromPath(path): + configurations = ReadConfigurations(path) + if len(configurations) > 0: + return GetBuildDevice(ReadConfigurations(path)) + return None diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py new file mode 100644 index 00000000000..d2ae39fb2ba --- /dev/null +++ b/build/android/gyp/util/build_utils.py @@ -0,0 +1,538 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import ast +import contextlib +import fnmatch +import json +import os +import pipes +import re +import shlex +import shutil +import stat +import subprocess +import sys +import tempfile +import zipfile + +# Some clients do not add //build/android/gyp to PYTHONPATH. +import md5_check # pylint: disable=relative-import + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) +from pylib.constants import host_paths + +sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir)) +import gn_helpers + +COLORAMA_ROOT = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'colorama', 'src') +# aapt should ignore OWNERS files in addition the default ignore pattern. +AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:_*:' + + '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp') +HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0) +_HERMETIC_FILE_ATTR = (0644 << 16L) + + +@contextlib.contextmanager +def TempDir(): + dirname = tempfile.mkdtemp() + try: + yield dirname + finally: + shutil.rmtree(dirname) + + +def MakeDirectory(dir_path): + try: + os.makedirs(dir_path) + except OSError: + pass + + +def DeleteDirectory(dir_path): + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + + +def Touch(path, fail_if_missing=False): + if fail_if_missing and not os.path.exists(path): + raise Exception(path + ' doesn\'t exist.') + + MakeDirectory(os.path.dirname(path)) + with open(path, 'a'): + os.utime(path, None) + + +def FindInDirectory(directory, filename_filter): + files = [] + for root, _dirnames, filenames in os.walk(directory): + matched_files = fnmatch.filter(filenames, filename_filter) + files.extend((os.path.join(root, f) for f in matched_files)) + return files + + +def FindInDirectories(directories, filename_filter): + all_files = [] + for directory in directories: + all_files.extend(FindInDirectory(directory, filename_filter)) + return all_files + + +def ParseGnList(gn_string): + """Converts a command-line parameter into a list. + + If the input starts with a '[' it is assumed to be a GN-formatted list and + it will be parsed accordingly. When empty an empty list will be returned. + Otherwise, the parameter will be treated as a single raw string (not + GN-formatted in that it's not assumed to have literal quotes that must be + removed) and a list will be returned containing that string. + + The common use for this behavior is in the Android build where things can + take lists of @FileArg references that are expanded via ExpandFileArgs. + """ + if gn_string.startswith('['): + parser = gn_helpers.GNValueParser(gn_string) + return parser.ParseList() + if len(gn_string): + return [ gn_string ] + return [] + + +def CheckOptions(options, parser, required=None): + if not required: + return + for option_name in required: + if getattr(options, option_name) is None: + parser.error('--%s is required' % option_name.replace('_', '-')) + + +def WriteJson(obj, path, only_if_changed=False): + old_dump = None + if os.path.exists(path): + with open(path, 'r') as oldfile: + old_dump = oldfile.read() + + new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) + + if not only_if_changed or old_dump != new_dump: + with open(path, 'w') as outfile: + outfile.write(new_dump) + + +def ReadJson(path): + with open(path, 'r') as jsonfile: + return json.load(jsonfile) + + +class CalledProcessError(Exception): + """This exception is raised when the process run by CheckOutput + exits with a non-zero exit code.""" + + def __init__(self, cwd, args, output): + super(CalledProcessError, self).__init__() + self.cwd = cwd + self.args = args + self.output = output + + def __str__(self): + # A user should be able to simply copy and paste the command that failed + # into their shell. + copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), + ' '.join(map(pipes.quote, self.args))) + return 'Command failed: {}\n{}'.format(copyable_command, self.output) + + +# This can be used in most cases like subprocess.check_output(). The output, +# particularly when the command fails, better highlights the command's failure. +# If the command fails, raises a build_utils.CalledProcessError. +def CheckOutput(args, cwd=None, env=None, + print_stdout=False, print_stderr=True, + stdout_filter=None, + stderr_filter=None, + fail_func=lambda returncode, stderr: returncode != 0): + if not cwd: + cwd = os.getcwd() + + child = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) + stdout, stderr = child.communicate() + + if stdout_filter is not None: + stdout = stdout_filter(stdout) + + if stderr_filter is not None: + stderr = stderr_filter(stderr) + + if fail_func(child.returncode, stderr): + raise CalledProcessError(cwd, args, stdout + stderr) + + if print_stdout: + sys.stdout.write(stdout) + if print_stderr: + sys.stderr.write(stderr) + + return stdout + + +def GetModifiedTime(path): + # For a symlink, the modified time should be the greater of the link's + # modified time and the modified time of the target. + return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) + + +def IsTimeStale(output, inputs): + if not os.path.exists(output): + return True + + output_time = GetModifiedTime(output) + for i in inputs: + if GetModifiedTime(i) > output_time: + return True + return False + + +def IsDeviceReady(): + device_state = CheckOutput(['adb', 'get-state']) + return device_state.strip() == 'device' + + +def CheckZipPath(name): + if os.path.normpath(name) != name: + raise Exception('Non-canonical zip path: %s' % name) + if os.path.isabs(name): + raise Exception('Absolute zip path: %s' % name) + + +def IsSymlink(zip_file, name): + zi = zip_file.getinfo(name) + + # The two high-order bytes of ZipInfo.external_attr represent + # UNIX permissions and file type bits. + return stat.S_ISLNK(zi.external_attr >> 16L) + + +def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None, + predicate=None): + if path is None: + path = os.getcwd() + elif not os.path.exists(path): + MakeDirectory(path) + + if not zipfile.is_zipfile(zip_path): + raise Exception('Invalid zip file: %s' % zip_path) + + with zipfile.ZipFile(zip_path) as z: + for name in z.namelist(): + if name.endswith('/'): + continue + if pattern is not None: + if not fnmatch.fnmatch(name, pattern): + continue + if predicate and not predicate(name): + continue + CheckZipPath(name) + if no_clobber: + output_path = os.path.join(path, name) + if os.path.exists(output_path): + raise Exception( + 'Path already exists from zip: %s %s %s' + % (zip_path, name, output_path)) + if IsSymlink(z, name): + dest = os.path.join(path, name) + MakeDirectory(os.path.dirname(dest)) + os.symlink(z.read(name), dest) + else: + z.extract(name, path) + + +def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None, + compress=None): + """Adds a file to the given ZipFile with a hard-coded modified time. + + Args: + zip_file: ZipFile instance to add the file to. + zip_path: Destination path within the zip file. + src_path: Path of the source file. Mutually exclusive with |data|. + data: File data as a string. + compress: Whether to enable compression. Default is take from ZipFile + constructor. + """ + assert (src_path is None) != (data is None), ( + '|src_path| and |data| are mutually exclusive.') + CheckZipPath(zip_path) + zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP) + zipinfo.external_attr = _HERMETIC_FILE_ATTR + + if src_path and os.path.islink(src_path): + zipinfo.filename = zip_path + zipinfo.external_attr |= stat.S_IFLNK << 16L # mark as a symlink + zip_file.writestr(zipinfo, os.readlink(src_path)) + return + + if src_path: + with file(src_path) as f: + data = f.read() + + # zipfile will deflate even when it makes the file bigger. To avoid + # growing files, disable compression at an arbitrary cut off point. + if len(data) < 16: + compress = False + + # None converts to ZIP_STORED, when passed explicitly rather than the + # default passed to the ZipFile constructor. + compress_type = zip_file.compression + if compress is not None: + compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + zip_file.writestr(zipinfo, data, compress_type) + + +def DoZip(inputs, output, base_dir=None): + """Creates a zip file from a list of files. + + Args: + inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. + output: Destination .zip file. + base_dir: Prefix to strip from inputs. + """ + input_tuples = [] + for tup in inputs: + if isinstance(tup, basestring): + tup = (os.path.relpath(tup, base_dir), tup) + input_tuples.append(tup) + + # Sort by zip path to ensure stable zip ordering. + input_tuples.sort(key=lambda tup: tup[0]) + with zipfile.ZipFile(output, 'w') as outfile: + for zip_path, fs_path in input_tuples: + AddToZipHermetic(outfile, zip_path, src_path=fs_path) + + +def ZipDir(output, base_dir): + """Creates a zip file from a directory.""" + inputs = [] + for root, _, files in os.walk(base_dir): + for f in files: + inputs.append(os.path.join(root, f)) + DoZip(inputs, output, base_dir) + + +def MatchesGlob(path, filters): + """Returns whether the given path matches any of the given glob patterns.""" + return filters and any(fnmatch.fnmatch(path, f) for f in filters) + + +def MergeZips(output, inputs, exclude_patterns=None, path_transform=None): + path_transform = path_transform or (lambda p, z: p) + added_names = set() + + with zipfile.ZipFile(output, 'w') as out_zip: + for in_file in inputs: + with zipfile.ZipFile(in_file, 'r') as in_zip: + in_zip._expected_crc = None + for info in in_zip.infolist(): + # Ignore directories. + if info.filename[-1] == '/': + continue + dst_name = path_transform(info.filename, in_file) + already_added = dst_name in added_names + if not already_added and not MatchesGlob(dst_name, exclude_patterns): + AddToZipHermetic(out_zip, dst_name, data=in_zip.read(info)) + added_names.add(dst_name) + + +def PrintWarning(message): + print 'WARNING: ' + message + + +def PrintBigWarning(message): + print '***** ' * 8 + PrintWarning(message) + print '***** ' * 8 + + +def GetSortedTransitiveDependencies(top, deps_func): + """Gets the list of all transitive dependencies in sorted order. + + There should be no cycles in the dependency graph. + + Args: + top: a list of the top level nodes + deps_func: A function that takes a node and returns its direct dependencies. + Returns: + A list of all transitive dependencies of nodes in top, in order (a node will + appear in the list at a higher index than all of its dependencies). + """ + def Node(dep): + return (dep, deps_func(dep)) + + # First: find all deps + unchecked_deps = list(top) + all_deps = set(top) + while unchecked_deps: + dep = unchecked_deps.pop() + new_deps = deps_func(dep).difference(all_deps) + unchecked_deps.extend(new_deps) + all_deps = all_deps.union(new_deps) + + # Then: simple, slow topological sort. + sorted_deps = [] + unsorted_deps = dict(map(Node, all_deps)) + while unsorted_deps: + for library, dependencies in unsorted_deps.items(): + if not dependencies.intersection(unsorted_deps.keys()): + sorted_deps.append(library) + del unsorted_deps[library] + + return sorted_deps + + +def GetPythonDependencies(): + """Gets the paths of imported non-system python modules. + + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ + module_paths = (m.__file__ for m in sys.modules.itervalues() + if m is not None and hasattr(m, '__file__')) + + abs_module_paths = map(os.path.abspath, module_paths) + + assert os.path.isabs(host_paths.DIR_SOURCE_ROOT) + non_system_module_paths = [ + p for p in abs_module_paths if p.startswith(host_paths.DIR_SOURCE_ROOT)] + def ConvertPycToPy(s): + if s.endswith('.pyc'): + return s[:-1] + return s + + non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) + non_system_module_paths = map(os.path.relpath, non_system_module_paths) + return sorted(set(non_system_module_paths)) + + +def AddDepfileOption(parser): + # TODO(agrieve): Get rid of this once we've moved to argparse. + if hasattr(parser, 'add_option'): + func = parser.add_option + else: + func = parser.add_argument + func('--depfile', + help='Path to depfile. Must be specified as the action\'s first output.') + + +def WriteDepfile(path, dependencies): + with open(path, 'w') as depfile: + depfile.write(path) + depfile.write(': ') + depfile.write(' '.join(dependencies)) + depfile.write('\n') + + +def ExpandFileArgs(args): + """Replaces file-arg placeholders in args. + + These placeholders have the form: + @FileArg(filename:key1:key2:...:keyn) + + The value of such a placeholder is calculated by reading 'filename' as json. + And then extracting the value at [key1][key2]...[keyn]. + + Note: This intentionally does not return the list of files that appear in such + placeholders. An action that uses file-args *must* know the paths of those + files prior to the parsing of the arguments (typically by explicitly listing + them in the action's inputs in build files). + """ + new_args = list(args) + file_jsons = dict() + r = re.compile('@FileArg\((.*?)\)') + for i, arg in enumerate(args): + match = r.search(arg) + if not match: + continue + + if match.end() != len(arg): + raise Exception('Unexpected characters after FileArg: ' + arg) + + lookup_path = match.group(1).split(':') + file_path = lookup_path[0] + if not file_path in file_jsons: + file_jsons[file_path] = ReadJson(file_path) + + expansion = file_jsons[file_path] + for k in lookup_path[1:]: + expansion = expansion[k] + + # This should match ParseGNList. The output is either a GN-formatted list + # or a literal (with no quotes). + if isinstance(expansion, list): + new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(expansion) + else: + new_args[i] = arg[:match.start()] + str(expansion) + + return new_args + + +def ReadSourcesList(sources_list_file_name): + """Reads a GN-written file containing list of file names and returns a list. + + Note that this function should not be used to parse response files. + """ + with open(sources_list_file_name) as f: + return [file_name.strip() for file_name in f] + + +def CallAndWriteDepfileIfStale(function, options, record_path=None, + input_paths=None, input_strings=None, + output_paths=None, force=False, + pass_changes=False, + depfile_deps=None): + """Wraps md5_check.CallAndRecordIfStale() and also writes dep & stamp files. + + Depfiles and stamp files are automatically added to output_paths when present + in the |options| argument. They are then created after |function| is called. + + By default, only python dependencies are added to the depfile. If there are + other input paths that are not captured by GN deps, then they should be listed + in depfile_deps. It's important to write paths to the depfile that are already + captured by GN deps since GN args can cause GN deps to change, and such + changes are not immediately reflected in depfiles (http://crbug.com/589311). + """ + if not output_paths: + raise Exception('At least one output_path must be specified.') + input_paths = list(input_paths or []) + input_strings = list(input_strings or []) + output_paths = list(output_paths or []) + + python_deps = None + if hasattr(options, 'depfile') and options.depfile: + python_deps = GetPythonDependencies() + input_paths += python_deps + output_paths += [options.depfile] + + stamp_file = hasattr(options, 'stamp') and options.stamp + if stamp_file: + output_paths += [stamp_file] + + def on_stale_md5(changes): + args = (changes,) if pass_changes else () + function(*args) + if python_deps is not None: + all_depfile_deps = list(python_deps) + if depfile_deps: + all_depfile_deps.extend(depfile_deps) + WriteDepfile(options.depfile, all_depfile_deps) + if stamp_file: + Touch(stamp_file) + + md5_check.CallAndRecordIfStale( + on_stale_md5, + record_path=record_path, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=True) + diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py new file mode 100644 index 00000000000..76591249bac --- /dev/null +++ b/build/android/gyp/util/md5_check.py @@ -0,0 +1,410 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import difflib +import hashlib +import itertools +import json +import os +import sys +import zipfile + + +# When set and a difference is detected, a diff of what changed is printed. +PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0)) + +# An escape hatch that causes all targets to be rebuilt. +_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0)) + + +def CallAndRecordIfStale( + function, record_path=None, input_paths=None, input_strings=None, + output_paths=None, force=False, pass_changes=False): + """Calls function if outputs are stale. + + Outputs are considered stale if: + - any output_paths are missing, or + - the contents of any file within input_paths has changed, or + - the contents of input_strings has changed. + + To debug which files are out-of-date, set the environment variable: + PRINT_MD5_DIFFS=1 + + Args: + function: The function to call. + record_path: Path to record metadata. + Defaults to output_paths[0] + '.md5.stamp' + input_paths: List of paths to calcualte an md5 sum on. + input_strings: List of strings to record verbatim. + output_paths: List of output paths. + force: Whether to treat outputs as missing regardless of whether they + actually are. + pass_changes: Whether to pass a Changes instance to |function|. + """ + assert record_path or output_paths + input_paths = input_paths or [] + input_strings = input_strings or [] + output_paths = output_paths or [] + record_path = record_path or output_paths[0] + '.md5.stamp' + + assert record_path.endswith('.stamp'), ( + 'record paths must end in \'.stamp\' so that they are easy to find ' + 'and delete') + + new_metadata = _Metadata() + new_metadata.AddStrings(input_strings) + + for path in input_paths: + if _IsZipFile(path): + entries = _ExtractZipEntries(path) + new_metadata.AddZipFile(path, entries) + else: + new_metadata.AddFile(path, _Md5ForPath(path)) + + old_metadata = None + force = force or _FORCE_REBUILD + missing_outputs = [x for x in output_paths if force or not os.path.exists(x)] + # When outputs are missing, don't bother gathering change information. + if not missing_outputs and os.path.exists(record_path): + with open(record_path, 'r') as jsonfile: + try: + old_metadata = _Metadata.FromFile(jsonfile) + except: # pylint: disable=bare-except + pass # Not yet using new file format. + + changes = Changes(old_metadata, new_metadata, force, missing_outputs) + if not changes.HasChanges(): + return + + if PRINT_EXPLANATIONS: + print '=' * 80 + print 'Target is stale: %s' % record_path + print changes.DescribeDifference() + print '=' * 80 + + args = (changes,) if pass_changes else () + function(*args) + + with open(record_path, 'w') as f: + new_metadata.ToFile(f) + + +class Changes(object): + """Provides and API for querying what changed between runs.""" + + def __init__(self, old_metadata, new_metadata, force, missing_outputs): + self.old_metadata = old_metadata + self.new_metadata = new_metadata + self.force = force + self.missing_outputs = missing_outputs + + def _GetOldTag(self, path, subpath=None): + return self.old_metadata and self.old_metadata.GetTag(path, subpath) + + def HasChanges(self): + """Returns whether any changes exist.""" + return (self.force or + not self.old_metadata or + self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or + self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5()) + + def AddedOrModifiedOnly(self): + """Returns whether the only changes were from added or modified (sub)files. + + No missing outputs, no removed paths/subpaths. + """ + if (self.force or + not self.old_metadata or + self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()): + return False + if any(self.IterRemovedPaths()): + return False + for path in self.IterModifiedPaths(): + if any(self.IterRemovedSubpaths(path)): + return False + return True + + def IterAllPaths(self): + """Generator for paths.""" + return self.new_metadata.IterPaths(); + + def IterAllSubpaths(self, path): + """Generator for subpaths.""" + return self.new_metadata.IterSubpaths(path); + + def IterAddedPaths(self): + """Generator for paths that were added.""" + for path in self.new_metadata.IterPaths(): + if self._GetOldTag(path) is None: + yield path + + def IterAddedSubpaths(self, path): + """Generator for paths that were added within the given zip file.""" + for subpath in self.new_metadata.IterSubpaths(path): + if self._GetOldTag(path, subpath) is None: + yield subpath + + def IterRemovedPaths(self): + """Generator for paths that were removed.""" + if self.old_metadata: + for path in self.old_metadata.IterPaths(): + if self.new_metadata.GetTag(path) is None: + yield path + + def IterRemovedSubpaths(self, path): + """Generator for paths that were removed within the given zip file.""" + if self.old_metadata: + for subpath in self.old_metadata.IterSubpaths(path): + if self.new_metadata.GetTag(path, subpath) is None: + yield subpath + + def IterModifiedPaths(self): + """Generator for paths whose contents have changed.""" + for path in self.new_metadata.IterPaths(): + old_tag = self._GetOldTag(path) + new_tag = self.new_metadata.GetTag(path) + if old_tag is not None and old_tag != new_tag: + yield path + + def IterModifiedSubpaths(self, path): + """Generator for paths within a zip file whose contents have changed.""" + for subpath in self.new_metadata.IterSubpaths(path): + old_tag = self._GetOldTag(path, subpath) + new_tag = self.new_metadata.GetTag(path, subpath) + if old_tag is not None and old_tag != new_tag: + yield subpath + + def IterChangedPaths(self): + """Generator for all changed paths (added/removed/modified).""" + return itertools.chain(self.IterRemovedPaths(), + self.IterModifiedPaths(), + self.IterAddedPaths()) + + def IterChangedSubpaths(self, path): + """Generator for paths within a zip that were added/removed/modified.""" + return itertools.chain(self.IterRemovedSubpaths(path), + self.IterModifiedSubpaths(path), + self.IterAddedSubpaths(path)) + + def DescribeDifference(self): + """Returns a human-readable description of what changed.""" + if self.force: + return 'force=True' + elif self.missing_outputs: + return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) + elif self.old_metadata is None: + return 'Previous stamp file not found.' + + if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5(): + ndiff = difflib.ndiff(self.old_metadata.GetStrings(), + self.new_metadata.GetStrings()) + changed = [s for s in ndiff if not s.startswith(' ')] + return 'Input strings changed:\n ' + '\n '.join(changed) + + if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5(): + return "There's no difference." + + lines = [] + lines.extend('Added: ' + p for p in self.IterAddedPaths()) + lines.extend('Removed: ' + p for p in self.IterRemovedPaths()) + for path in self.IterModifiedPaths(): + lines.append('Modified: ' + path) + lines.extend(' -> Subpath added: ' + p + for p in self.IterAddedSubpaths(path)) + lines.extend(' -> Subpath removed: ' + p + for p in self.IterRemovedSubpaths(path)) + lines.extend(' -> Subpath modified: ' + p + for p in self.IterModifiedSubpaths(path)) + if lines: + return 'Input files changed:\n ' + '\n '.join(lines) + return 'I have no idea what changed (there is a bug).' + + +class _Metadata(object): + """Data model for tracking change metadata.""" + # Schema: + # { + # "files-md5": "VALUE", + # "strings-md5": "VALUE", + # "input-files": [ + # { + # "path": "path.jar", + # "tag": "{MD5 of entries}", + # "entries": [ + # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ... + # ] + # }, { + # "path": "path.txt", + # "tag": "{MD5}", + # } + # ], + # "input-strings": ["a", "b", ...], + # } + def __init__(self): + self._files_md5 = None + self._strings_md5 = None + self._files = [] + self._strings = [] + # Map of (path, subpath) -> entry. Created upon first call to _GetEntry(). + self._file_map = None + + @classmethod + def FromFile(cls, fileobj): + """Returns a _Metadata initialized from a file object.""" + ret = cls() + obj = json.load(fileobj) + ret._files_md5 = obj['files-md5'] + ret._strings_md5 = obj['strings-md5'] + ret._files = obj['input-files'] + ret._strings = obj['input-strings'] + return ret + + def ToFile(self, fileobj): + """Serializes metadata to the given file object.""" + obj = { + "files-md5": self.FilesMd5(), + "strings-md5": self.StringsMd5(), + "input-files": self._files, + "input-strings": self._strings, + } + json.dump(obj, fileobj, indent=2) + + def _AssertNotQueried(self): + assert self._files_md5 is None + assert self._strings_md5 is None + assert self._file_map is None + + def AddStrings(self, values): + self._AssertNotQueried() + self._strings.extend(str(v) for v in values) + + def AddFile(self, path, tag): + """Adds metadata for a non-zip file. + + Args: + path: Path to the file. + tag: A short string representative of the file contents. + """ + self._AssertNotQueried() + self._files.append({ + 'path': path, + 'tag': tag, + }) + + def AddZipFile(self, path, entries): + """Adds metadata for a zip file. + + Args: + path: Path to the file. + entries: List of (subpath, tag) tuples for entries within the zip. + """ + self._AssertNotQueried() + tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries), + (e[1] for e in entries))) + self._files.append({ + 'path': path, + 'tag': tag, + 'entries': [{"path": e[0], "tag": e[1]} for e in entries], + }) + + def GetStrings(self): + """Returns the list of input strings.""" + return self._strings + + def FilesMd5(self): + """Lazily computes and returns the aggregate md5 of input files.""" + if self._files_md5 is None: + # Omit paths from md5 since temporary files have random names. + self._files_md5 = _ComputeInlineMd5( + self.GetTag(p) for p in sorted(self.IterPaths())) + return self._files_md5 + + def StringsMd5(self): + """Lazily computes and returns the aggregate md5 of input strings.""" + if self._strings_md5 is None: + self._strings_md5 = _ComputeInlineMd5(self._strings) + return self._strings_md5 + + def _GetEntry(self, path, subpath=None): + """Returns the JSON entry for the given path / subpath.""" + if self._file_map is None: + self._file_map = {} + for entry in self._files: + self._file_map[(entry['path'], None)] = entry + for subentry in entry.get('entries', ()): + self._file_map[(entry['path'], subentry['path'])] = subentry + return self._file_map.get((path, subpath)) + + def GetTag(self, path, subpath=None): + """Returns the tag for the given path / subpath.""" + ret = self._GetEntry(path, subpath) + return ret and ret['tag'] + + def IterPaths(self): + """Returns a generator for all top-level paths.""" + return (e['path'] for e in self._files) + + def IterSubpaths(self, path): + """Returns a generator for all subpaths in the given zip. + + If the given path is not a zip file or doesn't exist, returns an empty + iterable. + """ + outer_entry = self._GetEntry(path) + if not outer_entry: + return () + subentries = outer_entry.get('entries', []) + return (entry['path'] for entry in subentries) + + +def _UpdateMd5ForFile(md5, path, block_size=2**16): + with open(path, 'rb') as infile: + while True: + data = infile.read(block_size) + if not data: + break + md5.update(data) + + +def _UpdateMd5ForDirectory(md5, dir_path): + for root, _, files in os.walk(dir_path): + for f in files: + _UpdateMd5ForFile(md5, os.path.join(root, f)) + + +def _Md5ForPath(path): + md5 = hashlib.md5() + if os.path.isdir(path): + _UpdateMd5ForDirectory(md5, path) + else: + _UpdateMd5ForFile(md5, path) + return md5.hexdigest() + + +def _ComputeInlineMd5(iterable): + """Computes the md5 of the concatenated parameters.""" + md5 = hashlib.md5() + for item in iterable: + md5.update(str(item)) + return md5.hexdigest() + + +def _IsZipFile(path): + """Returns whether to treat the given file as a zip file.""" + # ijar doesn't set the CRC32 field. + if path.endswith('.interface.jar'): + return False + return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar') + + +def _ExtractZipEntries(path): + """Returns a list of (path, CRC32) of all files within |path|.""" + entries = [] + with zipfile.ZipFile(path) as zip_file: + for zip_info in zip_file.infolist(): + # Skip directories and empty files. + if zip_info.CRC: + entries.append( + (zip_info.filename, zip_info.CRC + zip_info.compress_type)) + return entries diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py new file mode 100644 index 00000000000..312d4a98cbb --- /dev/null +++ b/build/android/gyp/util/md5_check_test.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import tempfile +import unittest +import zipfile + +import md5_check # pylint: disable=W0403 + + +def _WriteZipFile(path, entries): + with zipfile.ZipFile(path, 'w') as zip_file: + for subpath, data in entries: + zip_file.writestr(subpath, data) + + +class TestMd5Check(unittest.TestCase): + def setUp(self): + self.called = False + self.changes = None + + def testCallAndRecordIfStale(self): + input_strings = ['string1', 'string2'] + input_file1 = tempfile.NamedTemporaryFile(suffix='.txt') + input_file2 = tempfile.NamedTemporaryFile(suffix='.zip') + file1_contents = 'input file 1' + input_file1.write(file1_contents) + input_file1.flush() + # Test out empty zip file to start. + _WriteZipFile(input_file2.name, []) + input_files = [input_file1.name, input_file2.name] + + record_path = tempfile.NamedTemporaryFile(suffix='.stamp') + + def CheckCallAndRecord(should_call, message, force=False, + outputs_specified=False, outputs_missing=False, + expected_changes=None, added_or_modified_only=None): + output_paths = None + if outputs_specified: + output_file1 = tempfile.NamedTemporaryFile() + if outputs_missing: + output_file1.close() # Gets deleted on close(). + output_paths = [output_file1.name] + + self.called = False + self.changes = None + if expected_changes or added_or_modified_only is not None: + def MarkCalled(changes): + self.called = True + self.changes = changes + else: + def MarkCalled(): + self.called = True + + md5_check.CallAndRecordIfStale( + MarkCalled, + record_path=record_path.name, + input_paths=input_files, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=(expected_changes or added_or_modified_only) is not None) + self.assertEqual(should_call, self.called, message) + if expected_changes: + description = self.changes.DescribeDifference() + self.assertTrue(fnmatch.fnmatch(description, expected_changes), + 'Expected %s to match %s' % ( + repr(description), repr(expected_changes))) + if should_call and added_or_modified_only is not None: + self.assertEqual(added_or_modified_only, + self.changes.AddedOrModifiedOnly()) + + CheckCallAndRecord(True, 'should call when record doesn\'t exist', + expected_changes='Previous stamp file not found.', + added_or_modified_only=False) + CheckCallAndRecord(False, 'should not call when nothing changed') + CheckCallAndRecord(False, 'should not call when nothing changed #2', + outputs_specified=True, outputs_missing=False) + CheckCallAndRecord(True, 'should call when output missing', + outputs_specified=True, outputs_missing=True, + expected_changes='Outputs do not exist:*', + added_or_modified_only=False) + CheckCallAndRecord(True, force=True, message='should call when forced', + expected_changes='force=True', + added_or_modified_only=False) + + input_file1.write('some more input') + input_file1.flush() + CheckCallAndRecord(True, 'changed input file should trigger call', + expected_changes='*Modified: %s' % input_file1.name, + added_or_modified_only=True) + + input_files = input_files[::-1] + CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call') + + input_files = input_files[:1] + CheckCallAndRecord(True, 'removing file should trigger call', + expected_changes='*Removed: %s' % input_file1.name, + added_or_modified_only=False) + + input_files.append(input_file1.name) + CheckCallAndRecord(True, 'added input file should trigger call', + expected_changes='*Added: %s' % input_file1.name, + added_or_modified_only=True) + + input_strings[0] = input_strings[0] + ' a bit longer' + CheckCallAndRecord(True, 'changed input string should trigger call', + expected_changes='*Input strings changed*', + added_or_modified_only=False) + + input_strings = input_strings[::-1] + CheckCallAndRecord(True, 'reordering of string inputs should trigger call', + expected_changes='*Input strings changed*') + + input_strings = input_strings[:1] + CheckCallAndRecord(True, 'removing a string should trigger call') + + input_strings.append('a brand new string') + CheckCallAndRecord(True, 'added input string should trigger call') + + _WriteZipFile(input_file2.name, [('path/1.txt', '1')]) + CheckCallAndRecord(True, 'added subpath should trigger call', + expected_changes='*Modified: %s*Subpath added: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=True) + _WriteZipFile(input_file2.name, [('path/1.txt', '2')]) + CheckCallAndRecord(True, 'changed subpath should trigger call', + expected_changes='*Modified: %s*Subpath modified: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=True) + CheckCallAndRecord(False, 'should not call when nothing changed') + + _WriteZipFile(input_file2.name, []) + CheckCallAndRecord(True, 'removed subpath should trigger call', + expected_changes='*Modified: %s*Subpath removed: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=False) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/gyp/util/proguard_util.py b/build/android/gyp/util/proguard_util.py new file mode 100644 index 00000000000..1027a684c96 --- /dev/null +++ b/build/android/gyp/util/proguard_util.py @@ -0,0 +1,184 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +from util import build_utils + + +class _ProguardOutputFilter(object): + """ProGuard outputs boring stuff to stdout (proguard version, jar path, etc) + as well as interesting stuff (notes, warnings, etc). If stdout is entirely + boring, this class suppresses the output. + """ + + IGNORE_RE = re.compile( + r'(?:Pro.*version|Note:|Reading|Preparing|.*:.*(?:MANIFEST\.MF|\.empty))') + + def __init__(self): + self._last_line_ignored = False + + def __call__(self, output): + ret = [] + for line in output.splitlines(True): + if not line.startswith(' '): + self._last_line_ignored = bool(self.IGNORE_RE.match(line)) + elif 'You should check if you need to specify' in line: + self._last_line_ignored = True + + if not self._last_line_ignored: + ret.append(line) + return ''.join(ret) + + +class ProguardCmdBuilder(object): + def __init__(self, proguard_jar): + assert os.path.exists(proguard_jar) + self._proguard_jar_path = proguard_jar + self._tested_apk_info_path = None + self._tested_apk_info = None + self._mapping = None + self._libraries = None + self._injars = None + self._configs = None + self._outjar = None + self._cmd = None + self._verbose = False + self._disabled_optimizations = [] + + def outjar(self, path): + assert self._cmd is None + assert self._outjar is None + self._outjar = path + + def tested_apk_info(self, tested_apk_info_path): + assert self._cmd is None + assert self._tested_apk_info is None + self._tested_apk_info_path = tested_apk_info_path + + def mapping(self, path): + assert self._cmd is None + assert self._mapping is None + assert os.path.exists(path), path + self._mapping = path + + def libraryjars(self, paths): + assert self._cmd is None + assert self._libraries is None + for p in paths: + assert os.path.exists(p), p + self._libraries = paths + + def injars(self, paths): + assert self._cmd is None + assert self._injars is None + for p in paths: + assert os.path.exists(p), p + self._injars = paths + + def configs(self, paths): + assert self._cmd is None + assert self._configs is None + for p in paths: + assert os.path.exists(p), p + self._configs = paths + + def verbose(self, verbose): + assert self._cmd is None + self._verbose = verbose + + def disable_optimizations(self, optimizations): + assert self._cmd is None + self._disabled_optimizations += optimizations + + def build(self): + if self._cmd: + return self._cmd + assert self._injars is not None + assert self._outjar is not None + assert self._configs is not None + cmd = [ + 'java', '-jar', self._proguard_jar_path, + '-forceprocessing', + ] + if self._tested_apk_info_path: + tested_apk_info = build_utils.ReadJson(self._tested_apk_info_path) + self._configs += tested_apk_info['configs'] + + if self._mapping: + cmd += [ + '-applymapping', self._mapping, + ] + + if self._libraries: + cmd += [ + '-libraryjars', ':'.join(self._libraries), + ] + + for optimization in self._disabled_optimizations: + cmd += [ '-optimizations', '!' + optimization ] + + cmd += [ + '-injars', ':'.join(self._injars) + ] + + for config_file in self._configs: + cmd += ['-include', config_file] + + # The output jar must be specified after inputs. + cmd += [ + '-outjars', self._outjar, + '-dump', self._outjar + '.dump', + '-printseeds', self._outjar + '.seeds', + '-printusage', self._outjar + '.usage', + '-printmapping', self._outjar + '.mapping', + ] + + if self._verbose: + cmd.append('-verbose') + + self._cmd = cmd + return self._cmd + + def GetInputs(self): + self.build() + inputs = [self._proguard_jar_path] + self._configs + self._injars + if self._mapping: + inputs.append(self._mapping) + if self._libraries: + inputs += self._libraries + if self._tested_apk_info_path: + inputs += [self._tested_apk_info_path] + return inputs + + + def CheckOutput(self): + self.build() + # Proguard will skip writing these files if they would be empty. Create + # empty versions of them all now so that they are updated as the build + # expects. + open(self._outjar + '.dump', 'w').close() + open(self._outjar + '.seeds', 'w').close() + open(self._outjar + '.usage', 'w').close() + open(self._outjar + '.mapping', 'w').close() + # Warning: and Error: are sent to stderr, but messages and Note: are sent + # to stdout. + stdout_filter = None + stderr_filter = None + if not self._verbose: + stdout_filter = _ProguardOutputFilter() + stderr_filter = _ProguardOutputFilter() + build_utils.CheckOutput(self._cmd, print_stdout=True, + print_stderr=True, + stdout_filter=stdout_filter, + stderr_filter=stderr_filter) + + this_info = { + 'inputs': self._injars, + 'configs': self._configs, + 'mapping': self._outjar + '.mapping', + } + + build_utils.WriteJson(this_info, self._outjar + '.info') + diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py new file mode 100644 index 00000000000..02985297753 --- /dev/null +++ b/build/android/gyp/write_build_config.py @@ -0,0 +1,642 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes a build_config file. + +The build_config file for a target is a json file containing information about +how to build that target based on the target's dependencies. This includes +things like: the javac classpath, the list of android resources dependencies, +etc. It also includes the information needed to create the build_config for +other targets that depend on that one. + +Android build scripts should not refer to the build_config directly, and the +build specification should instead pass information in using the special +file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing +of values in a json dict in a file and looks like this: + --python-arg=@FileArg(build_config_path:javac:classpath) + +Note: If paths to input files are passed in this way, it is important that: + 1. inputs/deps of the action ensure that the files are available the first + time the action runs. + 2. Either (a) or (b) + a. inputs/deps ensure that the action runs whenever one of the files changes + b. the files are added to the action's depfile +""" + +import itertools +import optparse +import os +import sys +import xml.dom.minidom + +from util import build_utils +from util import md5_check + + +# Types that should never be used as a dependency of another build config. +_ROOT_TYPES = ('android_apk', 'deps_dex', 'java_binary', 'resource_rewriter') +# Types that should not allow code deps to pass through. +_RESOURCE_TYPES = ('android_assets', 'android_resources') + + +class AndroidManifest(object): + def __init__(self, path): + self.path = path + dom = xml.dom.minidom.parse(path) + manifests = dom.getElementsByTagName('manifest') + assert len(manifests) == 1 + self.manifest = manifests[0] + + def GetInstrumentation(self): + instrumentation_els = self.manifest.getElementsByTagName('instrumentation') + if len(instrumentation_els) == 0: + return None + if len(instrumentation_els) != 1: + raise Exception( + 'More than one element found in %s' % self.path) + return instrumentation_els[0] + + def CheckInstrumentation(self, expected_package): + instr = self.GetInstrumentation() + if not instr: + raise Exception('No elements found in %s' % self.path) + instrumented_package = instr.getAttributeNS( + 'http://schemas.android.com/apk/res/android', 'targetPackage') + if instrumented_package != expected_package: + raise Exception( + 'Wrong instrumented package. Expected %s, got %s' + % (expected_package, instrumented_package)) + + def GetPackageName(self): + return self.manifest.getAttribute('package') + + +dep_config_cache = {} +def GetDepConfig(path): + if not path in dep_config_cache: + dep_config_cache[path] = build_utils.ReadJson(path)['deps_info'] + return dep_config_cache[path] + + +def DepsOfType(wanted_type, configs): + return [c for c in configs if c['type'] == wanted_type] + + +def GetAllDepsConfigsInOrder(deps_config_paths): + def GetDeps(path): + return set(GetDepConfig(path)['deps_configs']) + return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps) + + +class Deps(object): + def __init__(self, direct_deps_config_paths): + self.all_deps_config_paths = GetAllDepsConfigsInOrder( + direct_deps_config_paths) + self.direct_deps_configs = [ + GetDepConfig(p) for p in direct_deps_config_paths] + self.all_deps_configs = [ + GetDepConfig(p) for p in self.all_deps_config_paths] + self.direct_deps_config_paths = direct_deps_config_paths + + def All(self, wanted_type=None): + if type is None: + return self.all_deps_configs + return DepsOfType(wanted_type, self.all_deps_configs) + + def Direct(self, wanted_type=None): + if wanted_type is None: + return self.direct_deps_configs + return DepsOfType(wanted_type, self.direct_deps_configs) + + def AllConfigPaths(self): + return self.all_deps_config_paths + + def RemoveNonDirectDep(self, path): + if path in self.direct_deps_config_paths: + raise Exception('Cannot remove direct dep.') + self.all_deps_config_paths.remove(path) + self.all_deps_configs.remove(GetDepConfig(path)) + + def PrebuiltJarPaths(self): + ret = [] + for config in self.Direct('java_library'): + if config['is_prebuilt']: + ret.append(config['jar_path']) + ret.extend(Deps(config['deps_configs']).PrebuiltJarPaths()) + return ret + + +def _MergeAssets(all_assets): + """Merges all assets from the given deps. + + Returns: + A tuple of lists: (compressed, uncompressed) + Each tuple entry is a list of "srcPath:zipPath". srcPath is the path of the + asset to add, and zipPath is the location within the zip (excluding assets/ + prefix) + """ + compressed = {} + uncompressed = {} + for asset_dep in all_assets: + entry = asset_dep['assets'] + disable_compression = entry.get('disable_compression', False) + dest_map = uncompressed if disable_compression else compressed + other_map = compressed if disable_compression else uncompressed + outputs = entry.get('outputs', []) + for src, dest in itertools.izip_longest(entry['sources'], outputs): + if not dest: + dest = os.path.basename(src) + # Merge so that each path shows up in only one of the lists, and that + # deps of the same target override previous ones. + other_map.pop(dest, 0) + dest_map[dest] = src + + def create_list(asset_map): + ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()] + # Sort to ensure deterministic ordering. + ret.sort() + return ret + + return create_list(compressed), create_list(uncompressed) + + +def _ResolveGroups(configs): + """Returns a list of configs with all groups inlined.""" + ret = list(configs) + while True: + groups = DepsOfType('group', ret) + if not groups: + return ret + for config in groups: + index = ret.index(config) + expanded_configs = [GetDepConfig(p) for p in config['deps_configs']] + ret[index:index + 1] = expanded_configs + + +def _FilterDepsPaths(dep_paths, target_type): + """Resolves all groups and trims dependency branches that we never want. + + E.g. When a resource or asset depends on an apk target, the intent is to + include the .apk as a resource/asset, not to have the apk's classpath added. + """ + configs = [GetDepConfig(p) for p in dep_paths] + configs = _ResolveGroups(configs) + # Don't allow root targets to be considered as a dep. + configs = [c for c in configs if c['type'] not in _ROOT_TYPES] + + # Don't allow java libraries to cross through assets/resources. + if target_type in _RESOURCE_TYPES: + configs = [c for c in configs if c['type'] in _RESOURCE_TYPES] + return [c['path'] for c in configs] + + +def _AsInterfaceJar(jar_path): + return jar_path[:-3] + 'interface.jar' + + +def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_files): + ret = [] + for path in runtime_deps_files: + with open(path) as f: + for line in f: + line = line.rstrip() + if not line.endswith('.so'): + continue + ret.append(os.path.normpath(line)) + ret.reverse() + return ret + + +def main(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--build-config', help='Path to build_config output.') + parser.add_option( + '--type', + help='Type of this target (e.g. android_library).') + parser.add_option( + '--deps-configs', + help='List of paths for dependency\'s build_config files. ') + + # android_resources options + parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') + parser.add_option('--resources-zip', help='Path to target\'s resources zip.') + parser.add_option('--r-text', help='Path to target\'s R.txt file.') + parser.add_option('--package-name', + help='Java package name for these resources.') + parser.add_option('--android-manifest', help='Path to android manifest.') + parser.add_option('--is-locale-resource', action='store_true', + help='Whether it is locale resource.') + parser.add_option('--resource-dirs', action='append', default=[], + help='GYP-list of resource dirs') + + # android_assets options + parser.add_option('--asset-sources', help='List of asset sources.') + parser.add_option('--asset-renaming-sources', + help='List of asset sources with custom destinations.') + parser.add_option('--asset-renaming-destinations', + help='List of asset custom destinations.') + parser.add_option('--disable-asset-compression', action='store_true', + help='Whether to disable asset compression.') + + # java library options + parser.add_option('--jar-path', help='Path to target\'s jar output.') + parser.add_option('--java-sources-file', help='Path to .sources file') + parser.add_option('--bundled-srcjars', + help='GYP-list of .srcjars that have been included in this java_library.') + parser.add_option('--supports-android', action='store_true', + help='Whether this library supports running on the Android platform.') + parser.add_option('--requires-android', action='store_true', + help='Whether this library requires running on the Android platform.') + parser.add_option('--bypass-platform-checks', action='store_true', + help='Bypass checks for support/require Android platform.') + parser.add_option('--extra-classpath-jars', + help='GYP-list of .jar files to include on the classpath when compiling, ' + 'but not to include in the final binary.') + + # android library options + parser.add_option('--dex-path', help='Path to target\'s dex output.') + + # native library options + parser.add_option('--shared-libraries-runtime-deps', + help='Path to file containing runtime deps for shared ' + 'libraries.') + + # apk options + parser.add_option('--apk-path', help='Path to the target\'s apk output.') + parser.add_option('--incremental-apk-path', + help="Path to the target's incremental apk output.") + parser.add_option('--incremental-install-script-path', + help="Path to the target's generated incremental install " + "script.") + + parser.add_option('--tested-apk-config', + help='Path to the build config of the tested apk (for an instrumentation ' + 'test apk).') + parser.add_option('--proguard-enabled', action='store_true', + help='Whether proguard is enabled for this apk.') + parser.add_option('--proguard-info', + help='Path to the proguard .info output for this apk.') + parser.add_option('--has-alternative-locale-resource', action='store_true', + help='Whether there is alternative-locale-resource in direct deps') + parser.add_option('--fail', + help='GYP-list of error message lines to fail with.') + + options, args = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given.') + if options.fail: + parser.error('\n'.join(build_utils.ParseGnList(options.fail))) + + required_options_map = { + 'java_binary': ['build_config', 'jar_path'], + 'java_library': ['build_config', 'jar_path'], + 'java_prebuilt': ['build_config', 'jar_path'], + 'android_assets': ['build_config'], + 'android_resources': ['build_config', 'resources_zip'], + 'android_apk': ['build_config', 'jar_path', 'dex_path', 'resources_zip'], + 'deps_dex': ['build_config', 'dex_path'], + 'resource_rewriter': ['build_config'], + 'group': ['build_config'], + } + required_options = required_options_map.get(options.type) + if not required_options: + raise Exception('Unknown type: <%s>' % options.type) + + build_utils.CheckOptions(options, parser, required_options) + + # Java prebuilts are the same as libraries except for in gradle files. + is_java_prebuilt = options.type == 'java_prebuilt' + if is_java_prebuilt: + options.type = 'java_library' + + if options.type == 'java_library': + if options.supports_android and not options.dex_path: + raise Exception('java_library that supports Android requires a dex path.') + + if options.requires_android and not options.supports_android: + raise Exception( + '--supports-android is required when using --requires-android') + + direct_deps_config_paths = build_utils.ParseGnList(options.deps_configs) + direct_deps_config_paths = _FilterDepsPaths(direct_deps_config_paths, + options.type) + + deps = Deps(direct_deps_config_paths) + all_inputs = deps.AllConfigPaths() + build_utils.GetPythonDependencies() + + # Remove other locale resources if there is alternative_locale_resource in + # direct deps. + if options.has_alternative_locale_resource: + alternative = [r['path'] for r in deps.Direct('android_resources') + if r.get('is_locale_resource')] + # We can only have one locale resources in direct deps. + if len(alternative) != 1: + raise Exception('The number of locale resource in direct deps is wrong %d' + % len(alternative)) + unwanted = [r['path'] for r in deps.All('android_resources') + if r.get('is_locale_resource') and r['path'] not in alternative] + for p in unwanted: + deps.RemoveNonDirectDep(p) + + + direct_library_deps = deps.Direct('java_library') + all_library_deps = deps.All('java_library') + + all_resources_deps = deps.All('android_resources') + # Resources should be ordered with the highest-level dependency first so that + # overrides are done correctly. + all_resources_deps.reverse() + + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_deps = Deps([options.tested_apk_config]) + tested_apk_resources_deps = tested_apk_deps.All('android_resources') + all_resources_deps = [ + d for d in all_resources_deps if not d in tested_apk_resources_deps] + + # Initialize some common config. + # Any value that needs to be queryable by dependents must go within deps_info. + config = { + 'deps_info': { + 'name': os.path.basename(options.build_config), + 'path': options.build_config, + 'type': options.type, + 'deps_configs': direct_deps_config_paths + }, + # Info needed only by generate_gradle.py. + 'gradle': {} + } + deps_info = config['deps_info'] + gradle = config['gradle'] + + # Required for generating gradle files. + if options.type == 'java_library': + deps_info['is_prebuilt'] = is_java_prebuilt + + if options.android_manifest: + gradle['android_manifest'] = options.android_manifest + if options.type in ('java_binary', 'java_library', 'android_apk'): + if options.java_sources_file: + gradle['java_sources_file'] = options.java_sources_file + if options.bundled_srcjars: + gradle['bundled_srcjars'] = ( + build_utils.ParseGnList(options.bundled_srcjars)) + + gradle['dependent_prebuilt_jars'] = deps.PrebuiltJarPaths() + + gradle['dependent_android_projects'] = [] + gradle['dependent_java_projects'] = [] + for c in direct_library_deps: + if not c['is_prebuilt']: + if c['requires_android']: + gradle['dependent_android_projects'].append(c['path']) + else: + gradle['dependent_java_projects'].append(c['path']) + + + if (options.type in ('java_binary', 'java_library') and + not options.bypass_platform_checks): + deps_info['requires_android'] = options.requires_android + deps_info['supports_android'] = options.supports_android + + deps_require_android = (all_resources_deps + + [d['name'] for d in all_library_deps if d['requires_android']]) + deps_not_support_android = ( + [d['name'] for d in all_library_deps if not d['supports_android']]) + + if deps_require_android and not options.requires_android: + raise Exception('Some deps require building for the Android platform: ' + + str(deps_require_android)) + + if deps_not_support_android and options.supports_android: + raise Exception('Not all deps support the Android platform: ' + + str(deps_not_support_android)) + + if options.type in ('java_binary', 'java_library', 'android_apk'): + deps_info['jar_path'] = options.jar_path + if options.type == 'android_apk' or options.supports_android: + deps_info['dex_path'] = options.dex_path + if options.type == 'android_apk': + deps_info['apk_path'] = options.apk_path + deps_info['incremental_apk_path'] = options.incremental_apk_path + deps_info['incremental_install_script_path'] = ( + options.incremental_install_script_path) + + # Classpath values filled in below (after applying tested_apk_config). + config['javac'] = {} + + + if options.type in ('java_binary', 'java_library'): + # Only resources might have srcjars (normal srcjar targets are listed in + # srcjar_deps). A resource's srcjar contains the R.java file for those + # resources, and (like Android's default build system) we allow a library to + # refer to the resources in any of its dependents. + config['javac']['srcjars'] = [ + c['srcjar'] for c in all_resources_deps if 'srcjar' in c] + + # Used to strip out R.class for android_prebuilt()s. + if options.type == 'java_library': + config['javac']['resource_packages'] = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + + if options.type == 'android_apk': + # Apks will get their resources srcjar explicitly passed to the java step. + config['javac']['srcjars'] = [] + + if options.type == 'android_assets': + all_asset_sources = [] + if options.asset_renaming_sources: + all_asset_sources.extend( + build_utils.ParseGnList(options.asset_renaming_sources)) + if options.asset_sources: + all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources)) + + deps_info['assets'] = { + 'sources': all_asset_sources + } + if options.asset_renaming_destinations: + deps_info['assets']['outputs'] = ( + build_utils.ParseGnList(options.asset_renaming_destinations)) + if options.disable_asset_compression: + deps_info['assets']['disable_compression'] = True + + if options.type == 'android_resources': + deps_info['resources_zip'] = options.resources_zip + if options.srcjar: + deps_info['srcjar'] = options.srcjar + if options.android_manifest: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if options.package_name: + deps_info['package_name'] = options.package_name + if options.r_text: + deps_info['r_text'] = options.r_text + if options.is_locale_resource: + deps_info['is_locale_resource'] = True + + deps_info['resources_dirs'] = [] + if options.resource_dirs: + for gyp_list in options.resource_dirs: + deps_info['resources_dirs'].extend(build_utils.ParseGnList(gyp_list)) + + if options.supports_android and options.type in ('android_apk', + 'java_library'): + # Lint all resources that are not already linted by a dependent library. + owned_resource_dirs = set() + owned_resource_zips = set() + for c in all_resources_deps: + # Always use resources_dirs in favour of resources_zips so that lint error + # messages have paths that are closer to reality (and to avoid needing to + # extract during lint). + if c['resources_dirs']: + owned_resource_dirs.update(c['resources_dirs']) + else: + owned_resource_zips.add(c['resources_zip']) + + for c in all_library_deps: + if c['supports_android']: + owned_resource_dirs.difference_update(c['owned_resources_dirs']) + owned_resource_zips.difference_update(c['owned_resources_zips']) + deps_info['owned_resources_dirs'] = list(owned_resource_dirs) + deps_info['owned_resources_zips'] = list(owned_resource_zips) + + if options.type in ('android_resources','android_apk', 'resource_rewriter'): + config['resources'] = {} + config['resources']['dependency_zips'] = [ + c['resources_zip'] for c in all_resources_deps] + config['resources']['extra_package_names'] = [] + config['resources']['extra_r_text_files'] = [] + + if options.type == 'android_apk' or options.type == 'resource_rewriter': + config['resources']['extra_package_names'] = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + config['resources']['extra_r_text_files'] = [ + c['r_text'] for c in all_resources_deps if 'r_text' in c] + + if options.type in ['android_apk', 'deps_dex']: + deps_dex_files = [c['dex_path'] for c in all_library_deps] + + if options.type in ('java_binary', 'java_library', 'android_apk'): + javac_classpath = [c['jar_path'] for c in direct_library_deps] + java_full_classpath = [c['jar_path'] for c in all_library_deps] + + if options.extra_classpath_jars: + extra_jars = build_utils.ParseGnList(options.extra_classpath_jars) + deps_info['extra_classpath_jars'] = extra_jars + javac_classpath += extra_jars + + # The java code for an instrumentation test apk is assembled differently for + # ProGuard vs. non-ProGuard. + # + # Without ProGuard: Each library's jar is dexed separately and then combined + # into a single classes.dex. A test apk will include all dex files not already + # present in the apk-under-test. At runtime all test code lives in the test + # apk, and the program code lives in the apk-under-test. + # + # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs + # a single .jar, which is then dexed into a classes.dex. A test apk includes + # all jar files from the program and the tests because having them separate + # doesn't work with ProGuard's whole-program optimizations. Although the + # apk-under-test still has all of its code in its classes.dex, none of it is + # used at runtime because the copy of it within the test apk takes precidence. + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_config = GetDepConfig(options.tested_apk_config) + + expected_tested_package = tested_apk_config['package_name'] + AndroidManifest(options.android_manifest).CheckInstrumentation( + expected_tested_package) + if options.proguard_enabled: + # Add all tested classes to the test's classpath to ensure that the test's + # java code is a superset of the tested apk's java code + java_full_classpath += [ + jar for jar in tested_apk_config['java']['full_classpath'] + if jar not in java_full_classpath] + + if tested_apk_config['proguard_enabled']: + assert options.proguard_enabled, ('proguard must be enabled for ' + 'instrumentation apks if it\'s enabled for the tested apk.') + + # Include in the classpath classes that are added directly to the apk under + # test (those that are not a part of a java_library). + javac_classpath.append(tested_apk_config['jar_path']) + java_full_classpath.append(tested_apk_config['jar_path']) + + # Exclude dex files from the test apk that exist within the apk under test. + # TODO(agrieve): When proguard is enabled, this filtering logic happens + # within proguard_util.py. Move the logic for the proguard case into + # here as well. + tested_apk_library_deps = tested_apk_deps.All('java_library') + tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps] + deps_dex_files = [ + p for p in deps_dex_files if not p in tested_apk_deps_dex_files] + + if options.type == 'android_apk': + deps_info['proguard_enabled'] = options.proguard_enabled + deps_info['proguard_info'] = options.proguard_info + config['proguard'] = {} + proguard_config = config['proguard'] + proguard_config['input_paths'] = [options.jar_path] + java_full_classpath + extra_jars = set() + for c in all_library_deps: + extra_jars.update(c.get('extra_classpath_jars', ())) + proguard_config['lib_paths'] = list(extra_jars) + + # Dependencies for the final dex file of an apk or a 'deps_dex'. + if options.type in ['android_apk', 'deps_dex']: + config['final_dex'] = {} + dex_config = config['final_dex'] + dex_config['dependency_dex_files'] = deps_dex_files + + if options.type in ('java_binary', 'java_library', 'android_apk'): + config['javac']['classpath'] = javac_classpath + config['javac']['interface_classpath'] = [ + _AsInterfaceJar(p) for p in javac_classpath] + deps_info['java'] = { + 'full_classpath': java_full_classpath + } + + if options.type == 'android_apk': + dependency_jars = [c['jar_path'] for c in all_library_deps] + all_interface_jars = [ + _AsInterfaceJar(p) for p in dependency_jars + [options.jar_path]] + config['dist_jar'] = { + 'dependency_jars': dependency_jars, + 'all_interface_jars': all_interface_jars, + } + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if not options.tested_apk_config and manifest.GetInstrumentation(): + # This must then have instrumentation only for itself. + manifest.CheckInstrumentation(manifest.GetPackageName()) + + library_paths = [] + java_libraries_list = None + runtime_deps_files = build_utils.ParseGnList( + options.shared_libraries_runtime_deps or '[]') + if runtime_deps_files: + library_paths = _ExtractSharedLibsFromRuntimeDeps(runtime_deps_files) + # Create a java literal array with the "base" library names: + # e.g. libfoo.so -> foo + java_libraries_list = ('{%s}' % ','.join( + ['"%s"' % s[3:-3] for s in library_paths])) + + all_inputs.extend(runtime_deps_files) + config['native'] = { + 'libraries': library_paths, + 'java_libraries_list': java_libraries_list, + } + config['assets'], config['uncompressed_assets'] = ( + _MergeAssets(deps.All('android_assets'))) + + build_utils.WriteJson(config, options.build_config, only_if_changed=True) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, all_inputs) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py new file mode 100644 index 00000000000..fd25e8e6521 --- /dev/null +++ b/build/android/gyp/write_ordered_libraries.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes dependency ordered list of native libraries. + +The list excludes any Android system libraries, as those are not bundled with +the APK. + +This list of libraries is used for several steps of building an APK. +In the component build, the --input-libraries only needs to be the top-level +library (i.e. libcontent_shell_content_view). This will then use readelf to +inspect the shared libraries and determine the full list of (non-system) +libraries that should be included in the APK. +""" + +# TODO(cjhopman): See if we can expose the list of library dependencies from +# gyp, rather than calculating it ourselves. +# http://crbug.com/225558 + +import optparse +import os +import re +import sys + +from util import build_utils + +_readelf = None +_library_dirs = None + +_library_re = re.compile( + '.*NEEDED.*Shared library: \[(?P.+)\]') + + +def SetReadelfPath(path): + global _readelf + _readelf = path + + +def SetLibraryDirs(dirs): + global _library_dirs + _library_dirs = dirs + + +def FullLibraryPath(library_name): + assert _library_dirs is not None + for directory in _library_dirs: + path = '%s/%s' % (directory, library_name) + if os.path.exists(path): + return path + return library_name + + +def IsSystemLibrary(library_name): + # If the library doesn't exist in the libraries directory, assume that it is + # an Android system library. + return not os.path.exists(FullLibraryPath(library_name)) + + +def CallReadElf(library_or_executable): + assert _readelf is not None + readelf_cmd = [_readelf, + '-d', + FullLibraryPath(library_or_executable)] + return build_utils.CheckOutput(readelf_cmd) + + +def GetDependencies(library_or_executable): + elf = CallReadElf(library_or_executable) + return set(_library_re.findall(elf)) + + +def GetNonSystemDependencies(library_name): + all_deps = GetDependencies(library_name) + return set((lib for lib in all_deps if not IsSystemLibrary(lib))) + + +def GetSortedTransitiveDependencies(libraries): + """Returns all transitive library dependencies in dependency order.""" + return build_utils.GetSortedTransitiveDependencies( + libraries, GetNonSystemDependencies) + + +def GetSortedTransitiveDependenciesForBinaries(binaries): + if binaries[0].endswith('.so'): + libraries = [os.path.basename(lib) for lib in binaries] + else: + assert len(binaries) == 1 + all_deps = GetDependencies(binaries[0]) + libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)] + + return GetSortedTransitiveDependencies(libraries) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--input-libraries', + help='A list of top-level input libraries.') + parser.add_option('--libraries-dir', + help='The directory which contains shared libraries.') + parser.add_option('--readelf', help='Path to the readelf binary.') + parser.add_option('--output', help='Path to the generated .json file.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + + SetReadelfPath(options.readelf) + SetLibraryDirs(options.libraries_dir.split(',')) + + libraries = build_utils.ParseGnList(options.input_libraries) + if len(libraries): + libraries = GetSortedTransitiveDependenciesForBinaries(libraries) + + # Convert to "base" library names: e.g. libfoo.so -> foo + java_libraries_list = ( + '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries])) + + out_json = { + 'libraries': libraries, + 'lib_paths': [FullLibraryPath(l) for l in libraries], + 'java_libraries_list': java_libraries_list + } + build_utils.WriteJson( + out_json, + options.output, + only_if_changed=True) + + if options.stamp: + build_utils.Touch(options.stamp) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + libraries + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main()) + + diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py new file mode 100644 index 00000000000..51322dfd5b2 --- /dev/null +++ b/build/android/gyp/zip.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Archives a set of files. +""" + +import optparse +import sys + +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option('--input-dir', help='Directory of files to archive.') + parser.add_option('--output', help='Path to output archive.') + options, _ = parser.parse_args() + + inputs = build_utils.FindInDirectory(options.input_dir, '*') + build_utils.DoZip(inputs, options.output, options.input_dir) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py new file mode 100644 index 00000000000..89905928ec8 --- /dev/null +++ b/build/android/host_heartbeat.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Sends a heart beat pulse to the currently online Android devices. +This heart beat lets the devices know that they are connected to a host. +""" +# pylint: disable=W0702 + +import sys +import time + +import devil_chromium +from devil.android import device_utils + +PULSE_PERIOD = 20 + +def main(): + devil_chromium.Initialize() + + while True: + try: + devices = device_utils.DeviceUtils.HealthyDevices(blacklist=None) + for d in devices: + d.RunShellCommand(['touch', '/sdcard/host_heartbeat'], + check_return=True) + except: + # Keep the heatbeat running bypassing all errors. + pass + time.sleep(PULSE_PERIOD) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/increase_size_for_speed.gypi b/build/android/increase_size_for_speed.gypi new file mode 100644 index 00000000000..c5600b1b1a8 --- /dev/null +++ b/build/android/increase_size_for_speed.gypi @@ -0,0 +1,42 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included to optimize a target for speed +# rather than for size on Android. +# This is used in some carefully tailored targets and is not meant +# to be included everywhere. Before adding the template to another target, +# please ask in chromium-dev@. See crbug.com/411909 + +{ + 'configurations': { + 'Release': { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['OS=="android"', { + 'cflags!': ['-Os'], + 'cflags': ['-O2'], + }], + # Do not merge -Os and -O2 in GCC LTO. + # LTO merges all optimization options at link-time. -O2 takes + # precedence over -Os. Avoid using LTO simultaneously + # on -Os and -O2 parts for that reason. + ['OS=="android" and clang==0 and use_lto==1', { + 'cflags!': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ['OS=="android" and clang==0 and use_lto_o2==1', { + 'cflags': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ], + }], + ], + }, + }, +} diff --git a/build/android/incremental_install/BUILD.gn b/build/android/incremental_install/BUILD.gn new file mode 100644 index 00000000000..3bb4696ea4b --- /dev/null +++ b/build/android/incremental_install/BUILD.gn @@ -0,0 +1,19 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +android_library("bootstrap_java") { + # Use .dex rather than .dex.jar to be usable by package_apk(). + dex_path = "$target_gen_dir/bootstrap.dex" + java_files = [ + "java/org/chromium/incrementalinstall/BootstrapApplication.java", + "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java", + "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java", + "java/org/chromium/incrementalinstall/LockFile.java", + "java/org/chromium/incrementalinstall/Reflect.java", + ] + emma_never_instrument = true + run_findbugs_override = false +} diff --git a/build/android/incremental_install/__init__.py b/build/android/incremental_install/__init__.py new file mode 100644 index 00000000000..1aaf0e179ff --- /dev/null +++ b/build/android/incremental_install/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/incremental_install/create_install_script.py b/build/android/incremental_install/create_install_script.py new file mode 100644 index 00000000000..d8afc0d54b3 --- /dev/null +++ b/build/android/incremental_install/create_install_script.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python + +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a script to run an "_incremental" .apk.""" + +import argparse +import os +import pprint +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) + +from pylib.constants import host_paths +from util import build_utils + + +SCRIPT_TEMPLATE = """\ +#!/usr/bin/env python +# +# This file was generated by: +# //build/android/incremental_install/create_install_script.py + +import os +import subprocess +import sys + + +def _ResolvePath(path): + script_directory = os.path.dirname(__file__) + return os.path.abspath(os.path.join(script_directory, path)) + + +# Exported to allow test runner to be able to install incremental apks. +def GetInstallParameters(): + apk_path = {apk_path} + dex_files = {dex_files} + dont_even_try = {dont_even_try} + native_libs = {native_libs} + show_proguard_warning = {show_proguard_warning} + splits = {splits} + + return dict(apk_path=_ResolvePath(apk_path), + dex_files=[_ResolvePath(p) for p in dex_files], + dont_even_try=dont_even_try, + native_libs=[_ResolvePath(p) for p in native_libs], + show_proguard_warning=show_proguard_warning, + splits=[_ResolvePath(p) for p in splits]) + + +def main(): + output_directory = {output_directory} + cmd_path = {cmd_path} + params = GetInstallParameters() + cmd_args = [ + _ResolvePath(cmd_path), + '--output-directory', _ResolvePath(output_directory), + ] + for native_lib in params['native_libs']: + cmd_args.extend(('--native_lib', native_lib)) + for dex_path in params['dex_files']: + cmd_args.extend(('--dex-file', dex_path)) + for split in params['splits']: + cmd_args.extend(('--split', split)) + cmd_args.append(params['apk_path']) + if params['dont_even_try']: + cmd_args.extend(('--dont-even-try', params['dont_even_try'])) + if params['show_proguard_warning']: + cmd_args.append('--show-proguard-warning') + return subprocess.call(cmd_args + sys.argv[1:]) + +if __name__ == '__main__': + sys.exit(main()) +""" + + +def _ParseArgs(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--script-output-path', + help='Output path for executable script.', + required=True) + parser.add_argument('--output-directory', + help='Path to the root build directory.', + default='.') + parser.add_argument('--apk-path', + help='Path to the .apk to install.', + required=True) + parser.add_argument('--split', + action='append', + dest='splits', + default=[], + help='A glob matching the apk splits. ' + 'Can be specified multiple times.') + parser.add_argument('--native-libs', + action='append', + default=[], + help='GYP-list of paths to native libraries. Can be ' + 'repeated.') + parser.add_argument('--dex-file', + action='append', + default=[], + dest='dex_files', + help='List of dex files to include.') + parser.add_argument('--dex-file-list', + help='GYP-list of dex files.') + parser.add_argument('--show-proguard-warning', + action='store_true', + default=False, + help='Print a warning about proguard being disabled') + parser.add_argument('--dont-even-try', + help='Prints this message and exits.') + + options = parser.parse_args(args) + options.dex_files += build_utils.ParseGnList(options.dex_file_list) + all_libs = [] + for gyp_list in options.native_libs: + all_libs.extend(build_utils.ParseGnList(gyp_list)) + options.native_libs = all_libs + return options + + +def main(args): + options = _ParseArgs(args) + + def relativize(path): + script_dir = os.path.dirname(options.script_output_path) + return path and os.path.relpath(path, script_dir) + + installer_path = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'android', + 'incremental_install', 'installer.py') + pformat = pprint.pformat + template_args = { + 'cmd_path': pformat(relativize(installer_path)), + 'apk_path': pformat(relativize(options.apk_path)), + 'output_directory': pformat(relativize(options.output_directory)), + 'native_libs': pformat([relativize(p) for p in options.native_libs]), + 'dex_files': pformat([relativize(p) for p in options.dex_files]), + 'dont_even_try': pformat(options.dont_even_try), + 'show_proguard_warning': pformat(options.show_proguard_warning), + 'splits': pformat([relativize(p) for p in options.splits]), + } + + with open(options.script_output_path, 'w') as script: + script.write(SCRIPT_TEMPLATE.format(**template_args)) + + os.chmod(options.script_output_path, 0750) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/incremental_install/generate_android_manifest.py b/build/android/incremental_install/generate_android_manifest.py new file mode 100644 index 00000000000..163b4c34be4 --- /dev/null +++ b/build/android/incremental_install/generate_android_manifest.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates an AndroidManifest.xml for an incremental APK. + +Given the manifest file for the real APK, generates an AndroidManifest.xml with +the application class changed to IncrementalApplication. +""" + +import argparse +import os +import sys +from xml.etree import ElementTree + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp')) +from util import build_utils + +_ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' +ElementTree.register_namespace('android', _ANDROID_NAMESPACE) + +_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication' +_META_DATA_APP_NAME = 'incremental-install-real-app' +_META_DATA_INSTRUMENTATION_NAME = 'incremental-install-real-instrumentation' +_DEFAULT_APPLICATION_CLASS = 'android.app.Application' +_DEFAULT_INSTRUMENTATION_CLASS = 'android.app.Instrumentation' + + +def _AddNamespace(name): + """Adds the android namespace prefix to the given identifier.""" + return '{%s}%s' % (_ANDROID_NAMESPACE, name) + +def _ParseArgs(): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--src-manifest', + help='The main manifest of the app', + required=True) + parser.add_argument('--out-manifest', + help='The output manifest', + required=True) + parser.add_argument('--disable-isolated-processes', + help='Changes all android:isolatedProcess to false. ' + 'This is required on Android M+', + action='store_true') + return parser.parse_args() + + +def _CreateMetaData(parent, name, value): + meta_data_node = ElementTree.SubElement(parent, 'meta-data') + meta_data_node.set(_AddNamespace('name'), name) + meta_data_node.set(_AddNamespace('value'), value) + + +def _ProcessManifest(main_manifest, disable_isolated_processes): + """Returns a transformed AndroidManifest.xml for use with _incremental apks. + + Args: + main_manifest: Manifest contents to transform. + disable_isolated_processes: Whether to set all isolatedProcess attributes to + false + + Returns: + The transformed AndroidManifest.xml. + """ + if disable_isolated_processes: + main_manifest = main_manifest.replace('isolatedProcess="true"', + 'isolatedProcess="false"') + + doc = ElementTree.fromstring(main_manifest) + app_node = doc.find('application') + if app_node is None: + app_node = ElementTree.SubElement(doc, 'application') + + real_app_class = app_node.get(_AddNamespace('name'), + _DEFAULT_APPLICATION_CLASS) + app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME) + _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class) + + # Seems to be a bug in ElementTree, as doc.find() doesn't work here. + instrumentation_nodes = doc.findall('instrumentation') + if instrumentation_nodes: + instrumentation_node = instrumentation_nodes[0] + real_instrumentation_class = instrumentation_node.get(_AddNamespace('name')) + instrumentation_node.set(_AddNamespace('name'), + _DEFAULT_INSTRUMENTATION_CLASS) + _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAME, + real_instrumentation_class) + + return ElementTree.tostring(doc, encoding='UTF-8') + + +def main(): + options = _ParseArgs() + with open(options.src_manifest) as f: + main_manifest_data = f.read() + new_manifest_data = _ProcessManifest(main_manifest_data, + options.disable_isolated_processes) + with open(options.out_manifest, 'w') as f: + f.write(new_manifest_data) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + [options.src_manifest] + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + main() diff --git a/build/android/incremental_install/installer.py b/build/android/incremental_install/installer.py new file mode 100644 index 00000000000..2f4e994d486 --- /dev/null +++ b/build/android/incremental_install/installer.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Install *_incremental.apk targets as well as their dependent files.""" + +import argparse +import glob +import logging +import os +import posixpath +import shutil +import sys +import zipfile + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +import devil_chromium +from devil.android import apk_helper +from devil.android import device_utils +from devil.android.sdk import version_codes +from devil.utils import reraiser_thread +from devil.utils import run_tests_helper +from pylib import constants +from pylib.utils import time_profile + +prev_sys_path = list(sys.path) +sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) +from util import build_utils +sys.path = prev_sys_path + + +def _DeviceCachePath(device): + file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() + return os.path.join(constants.GetOutDirectory(), file_name) + + +def _TransformDexPaths(paths): + """Given paths like ["/a/b/c", "/a/c/d"], returns ["b.c", "c.d"].""" + if len(paths) == 1: + return [os.path.basename(paths[0])] + + prefix_len = len(os.path.commonprefix(paths)) + return [p[prefix_len:].replace(os.sep, '.') for p in paths] + + +def _Execute(concurrently, *funcs): + """Calls all functions in |funcs| concurrently or in sequence.""" + timer = time_profile.TimeProfile() + if concurrently: + reraiser_thread.RunAsync(funcs) + else: + for f in funcs: + f() + timer.Stop(log=False) + return timer + + +def _GetDeviceIncrementalDir(package): + """Returns the device path to put incremental files for the given package.""" + return '/data/local/tmp/incremental-app-%s' % package + + +def _HasClasses(jar_path): + """Returns whether the given jar contains classes.dex.""" + with zipfile.ZipFile(jar_path) as jar: + return 'classes.dex' in jar.namelist() + + +def Uninstall(device, package, enable_device_cache=False): + """Uninstalls and removes all incremental files for the given package.""" + main_timer = time_profile.TimeProfile() + device.Uninstall(package) + if enable_device_cache: + # Uninstall is rare, so just wipe the cache in this case. + cache_path = _DeviceCachePath(device) + if os.path.exists(cache_path): + os.unlink(cache_path) + device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)], + check_return=True) + logging.info('Uninstall took %s seconds.', main_timer.GetDelta()) + + +def Install(device, apk, split_globs=None, native_libs=None, dex_files=None, + enable_device_cache=False, use_concurrency=True, + show_proguard_warning=False, permissions=(), + allow_downgrade=True): + """Installs the given incremental apk and all required supporting files. + + Args: + device: A DeviceUtils instance. + apk: The path to the apk, or an ApkHelper instance. + split_globs: Glob patterns for any required apk splits (optional). + native_libs: List of app's native libraries (optional). + dex_files: List of .dex.jar files that comprise the app's Dalvik code. + enable_device_cache: Whether to enable on-device caching of checksums. + use_concurrency: Whether to speed things up using multiple threads. + show_proguard_warning: Whether to print a warning about Proguard not being + enabled after installing. + permissions: A list of the permissions to grant, or None to grant all + non-blacklisted permissions in the manifest. + """ + main_timer = time_profile.TimeProfile() + install_timer = time_profile.TimeProfile() + push_native_timer = time_profile.TimeProfile() + push_dex_timer = time_profile.TimeProfile() + + apk = apk_helper.ToHelper(apk) + apk_package = apk.GetPackageName() + device_incremental_dir = _GetDeviceIncrementalDir(apk_package) + + # Install .apk(s) if any of them have changed. + def do_install(): + install_timer.Start() + if split_globs: + splits = [] + for split_glob in split_globs: + splits.extend((f for f in glob.glob(split_glob))) + device.InstallSplitApk(apk, splits, reinstall=True, + allow_cached_props=True, permissions=permissions, + allow_downgrade=allow_downgrade) + else: + device.Install(apk, reinstall=True, permissions=permissions, + allow_downgrade=allow_downgrade) + install_timer.Stop(log=False) + + # Push .so and .dex files to the device (if they have changed). + def do_push_files(): + if native_libs: + push_native_timer.Start() + with build_utils.TempDir() as temp_dir: + device_lib_dir = posixpath.join(device_incremental_dir, 'lib') + for path in native_libs: + # Note: Can't use symlinks as they don't work when + # "adb push parent_dir" is used (like we do here). + shutil.copy(path, os.path.join(temp_dir, os.path.basename(path))) + device.PushChangedFiles([(temp_dir, device_lib_dir)], + delete_device_stale=True) + push_native_timer.Stop(log=False) + + if dex_files: + push_dex_timer.Start() + # Put all .dex files to be pushed into a temporary directory so that we + # can use delete_device_stale=True. + with build_utils.TempDir() as temp_dir: + device_dex_dir = posixpath.join(device_incremental_dir, 'dex') + # Ensure no two files have the same name. + transformed_names = _TransformDexPaths(dex_files) + for src_path, dest_name in zip(dex_files, transformed_names): + # Binary targets with no extra classes create .dex.jar without a + # classes.dex (which Android chokes on). + if _HasClasses(src_path): + shutil.copy(src_path, os.path.join(temp_dir, dest_name)) + device.PushChangedFiles([(temp_dir, device_dex_dir)], + delete_device_stale=True) + push_dex_timer.Stop(log=False) + + def check_selinux(): + # Marshmallow has no filesystem access whatsoever. It might be possible to + # get things working on Lollipop, but attempts so far have failed. + # http://crbug.com/558818 + has_selinux = device.build_version_sdk >= version_codes.LOLLIPOP + if has_selinux and apk.HasIsolatedProcesses(): + raise Exception('Cannot use incremental installs on Android L+ without ' + 'first disabling isoloated processes.\n' + 'To do so, use GN arg:\n' + ' disable_incremental_isolated_processes=true') + + cache_path = _DeviceCachePath(device) + def restore_cache(): + if not enable_device_cache: + logging.info('Ignoring device cache') + return + if os.path.exists(cache_path): + logging.info('Using device cache: %s', cache_path) + with open(cache_path) as f: + device.LoadCacheData(f.read()) + # Delete the cached file so that any exceptions cause it to be cleared. + os.unlink(cache_path) + else: + logging.info('No device cache present: %s', cache_path) + + def save_cache(): + with open(cache_path, 'w') as f: + f.write(device.DumpCacheData()) + logging.info('Wrote device cache: %s', cache_path) + + # Create 2 lock files: + # * install.lock tells the app to pause on start-up (until we release it). + # * firstrun.lock is used by the app to pause all secondary processes until + # the primary process finishes loading the .dex / .so files. + def create_lock_files(): + # Creates or zeros out lock files. + cmd = ('D="%s";' + 'mkdir -p $D &&' + 'echo -n >$D/install.lock 2>$D/firstrun.lock') + device.RunShellCommand(cmd % device_incremental_dir, check_return=True) + + # The firstrun.lock is released by the app itself. + def release_installer_lock(): + device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir, + check_return=True) + + # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't + # been designed for multi-threading. Enabling only because this is a + # developer-only tool. + setup_timer = _Execute( + use_concurrency, create_lock_files, restore_cache, check_selinux) + + _Execute(use_concurrency, do_install, do_push_files) + + finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache) + + logging.info( + 'Took %s seconds (setup=%s, install=%s, libs=%s, dex=%s, finalize=%s)', + main_timer.GetDelta(), setup_timer.GetDelta(), install_timer.GetDelta(), + push_native_timer.GetDelta(), push_dex_timer.GetDelta(), + finalize_timer.GetDelta()) + if show_proguard_warning: + logging.warning('Target had proguard enabled, but incremental install uses ' + 'non-proguarded .dex files. Performance characteristics ' + 'may differ.') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('apk_path', + help='The path to the APK to install.') + parser.add_argument('--split', + action='append', + dest='splits', + help='A glob matching the apk splits. ' + 'Can be specified multiple times.') + parser.add_argument('--native_lib', + dest='native_libs', + help='Path to native library (repeatable)', + action='append', + default=[]) + parser.add_argument('--dex-file', + dest='dex_files', + help='Path to dex files (repeatable)', + action='append', + default=[]) + parser.add_argument('-d', '--device', dest='device', + help='Target device for apk to install on.') + parser.add_argument('--uninstall', + action='store_true', + default=False, + help='Remove the app and all side-loaded files.') + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('--no-threading', + action='store_false', + default=True, + dest='threading', + help='Do not install and push concurrently') + parser.add_argument('--no-cache', + action='store_false', + default=True, + dest='cache', + help='Do not use cached information about what files are ' + 'currently on the target device.') + parser.add_argument('--show-proguard-warning', + action='store_true', + default=False, + help='Print a warning about proguard being disabled') + parser.add_argument('--dont-even-try', + help='Prints this message and exits.') + parser.add_argument('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + parser.add_argument('--disable-downgrade', + action='store_false', + default=True, + dest='allow_downgrade', + help='Disable install of apk with lower version number' + 'than the version already on the device.') + + args = parser.parse_args() + + run_tests_helper.SetLogLevel(args.verbose_count) + constants.SetBuildType('Debug') + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + + devil_chromium.Initialize(output_directory=constants.GetOutDirectory()) + + if args.dont_even_try: + logging.fatal(args.dont_even_try) + return 1 + + # Retries are annoying when commands fail for legitimate reasons. Might want + # to enable them if this is ever used on bots though. + device = device_utils.DeviceUtils.HealthyDevices( + device_arg=args.device, + default_retries=0, + enable_device_files_cache=True)[0] + + apk = apk_helper.ToHelper(args.apk_path) + if args.uninstall: + Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache) + else: + Install(device, apk, split_globs=args.splits, native_libs=args.native_libs, + dex_files=args.dex_files, enable_device_cache=args.cache, + use_concurrency=args.threading, + show_proguard_warning=args.show_proguard_warning, + allow_downgrade=args.allow_downgrade) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java new file mode 100644 index 00000000000..1fb5e40b83b --- /dev/null +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java @@ -0,0 +1,282 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.app.Application; +import android.app.Instrumentation; +import android.content.ComponentName; +import android.content.Context; +import android.content.pm.ApplicationInfo; +import android.content.pm.PackageManager; +import android.content.pm.PackageManager.NameNotFoundException; +import android.os.Bundle; +import android.util.Log; + +import java.io.File; +import java.lang.ref.WeakReference; +import java.util.List; +import java.util.Map; + +/** + * An Application that replaces itself with another Application (as defined in + * an AndroidManifext.xml meta-data tag). It loads the other application only + * after side-loading its .so and .dex files from /data/local/tmp. + * + * This class is highly dependent on the private implementation details of + * Android's ActivityThread.java. However, it has been tested to work with + * JellyBean through Marshmallow. + */ +public final class BootstrapApplication extends Application { + private static final String TAG = "cr.incrementalinstall"; + private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-"; + private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app"; + private static final String REAL_INSTRUMENTATION_META_DATA_NAME = + "incremental-install-real-instrumentation"; + + private ClassLoaderPatcher mClassLoaderPatcher; + private Application mRealApplication; + private Instrumentation mOrigInstrumentation; + private Instrumentation mRealInstrumentation; + private Object mStashedProviderList; + private Object mActivityThread; + + @Override + protected void attachBaseContext(Context context) { + super.attachBaseContext(context); + try { + mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"), + "currentActivityThread"); + mClassLoaderPatcher = new ClassLoaderPatcher(context); + + mOrigInstrumentation = + (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation"); + Context instContext = mOrigInstrumentation.getContext(); + if (instContext == null) { + instContext = context; + } + + // When running with an instrumentation that lives in a different package from the + // application, we must load the dex files and native libraries from both pacakges. + // This logic likely won't work when the instrumentation is incremental, but the app is + // non-incremental. This configuration isn't used right now though. + String appPackageName = getPackageName(); + String instPackageName = instContext.getPackageName(); + boolean instPackageNameDiffers = !appPackageName.equals(instPackageName); + Log.i(TAG, "App PackageName: " + appPackageName); + if (instPackageNameDiffers) { + Log.i(TAG, "Inst PackageName: " + instPackageName); + } + + File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName); + File appLibDir = new File(appIncrementalRootDir, "lib"); + File appDexDir = new File(appIncrementalRootDir, "dex"); + File appInstallLockFile = new File(appIncrementalRootDir, "install.lock"); + File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock"); + File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName); + File instLibDir = new File(instIncrementalRootDir, "lib"); + File instDexDir = new File(instIncrementalRootDir, "dex"); + File instInstallLockFile = new File(instIncrementalRootDir, "install.lock"); + File instFirstRunLockFile = new File(instIncrementalRootDir , "firstrun.lock"); + + boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile) + || (instPackageNameDiffers + && LockFile.installerLockExists(instFirstRunLockFile)); + if (isFirstRun) { + if (mClassLoaderPatcher.mIsPrimaryProcess) { + // Wait for incremental_install.py to finish. + LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000); + LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000); + } else { + // Wait for the browser process to create the optimized dex files + // and copy the library files. + LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000); + LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000); + } + } + + mClassLoaderPatcher.importNativeLibs(instLibDir); + mClassLoaderPatcher.loadDexFiles(instDexDir); + if (instPackageNameDiffers) { + mClassLoaderPatcher.importNativeLibs(appLibDir); + mClassLoaderPatcher.loadDexFiles(appDexDir); + } + + if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) { + LockFile.clearInstallerLock(appFirstRunLockFile); + if (instPackageNameDiffers) { + LockFile.clearInstallerLock(instFirstRunLockFile); + } + } + + // mInstrumentationAppDir is one of a set of fields that is initialized only when + // instrumentation is active. + if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) { + String realInstrumentationName = + getClassNameFromMetadata(REAL_INSTRUMENTATION_META_DATA_NAME, instContext); + initInstrumentation(realInstrumentationName); + } else { + Log.i(TAG, "No instrumentation active."); + } + + // Even when instrumentation is not enabled, ActivityThread uses a default + // Instrumentation instance internally. We hook it here in order to hook into the + // call to Instrumentation.onCreate(). + Reflect.setField(mActivityThread, "mInstrumentation", + new BootstrapInstrumentation(this)); + + // attachBaseContext() is called from ActivityThread#handleBindApplication() and + // Application#mApplication is changed right after we return. Thus, we cannot swap + // the Application instances until onCreate() is called. + String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context); + Log.i(TAG, "Instantiating " + realApplicationName); + mRealApplication = + (Application) Reflect.newInstance(Class.forName(realApplicationName)); + Reflect.invokeMethod(mRealApplication, "attachBaseContext", context); + + // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate + // all ContentProviders. The ContentProviders break without the correct Application + // class being installed, so temporarily pretend there are no providers, and then + // instantiate them explicitly within onCreate(). + disableContentProviders(); + Log.i(TAG, "Waiting for Instrumentation.onCreate"); + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + /** + * Returns the fully-qualified class name for the given key, stored in a + * <meta> witin the manifest. + */ + private static String getClassNameFromMetadata(String key, Context context) + throws NameNotFoundException { + String pkgName = context.getPackageName(); + ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName, + PackageManager.GET_META_DATA); + String value = appInfo.metaData.getString(key); + if (value != null && !value.contains(".")) { + value = pkgName + "." + value; + } + return value; + } + + /** + * Instantiates and initializes mRealInstrumentation (the real Instrumentation class). + */ + private void initInstrumentation(String realInstrumentationName) + throws ReflectiveOperationException { + if (realInstrumentationName == null) { + // This is the case when an incremental app is used as a target for an instrumentation + // test. In this case, ActivityThread can instantiate the proper class just fine since + // it exists within the test apk (as opposed to the incremental apk-under-test). + Log.i(TAG, "Running with external instrumentation"); + mRealInstrumentation = mOrigInstrumentation; + return; + } + // For unit tests, the instrumentation class is replaced in the manifest by a build step + // because ActivityThread tries to instantiate it before we get a chance to load the + // incremental dex files. + Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName); + mRealInstrumentation = (Instrumentation) Reflect.newInstance( + Class.forName(realInstrumentationName)); + + // Initialize the fields that are set by Instrumentation.init(). + String[] initFields = {"mThread", "mMessageQueue", "mInstrContext", "mAppContext", + "mWatcher", "mUiAutomationConnection"}; + for (String fieldName : initFields) { + Reflect.setField(mRealInstrumentation, fieldName, + Reflect.getField(mOrigInstrumentation, fieldName)); + } + // But make sure the correct ComponentName is used. + ComponentName newName = new ComponentName( + mOrigInstrumentation.getComponentName().getPackageName(), realInstrumentationName); + Reflect.setField(mRealInstrumentation, "mComponent", newName); + } + + /** + * Called by BootstrapInstrumentation from Instrumentation.onCreate(). + * This happens regardless of whether or not instrumentation is enabled. + */ + void onInstrumentationCreate(Bundle arguments) { + Log.i(TAG, "Instrumentation.onCreate() called. Swapping references."); + try { + swapApplicationReferences(); + enableContentProviders(); + if (mRealInstrumentation != null) { + Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation); + mRealInstrumentation.onCreate(arguments); + } + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + @Override + public void onCreate() { + super.onCreate(); + try { + Log.i(TAG, "Application.onCreate() called."); + mRealApplication.onCreate(); + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + /** + * Nulls out ActivityThread.mBoundApplication.providers. + */ + private void disableContentProviders() throws ReflectiveOperationException { + Object data = Reflect.getField(mActivityThread, "mBoundApplication"); + mStashedProviderList = Reflect.getField(data, "providers"); + Reflect.setField(data, "providers", null); + } + + /** + * Restores the value of ActivityThread.mBoundApplication.providers, and invokes + * ActivityThread#installContentProviders(). + */ + private void enableContentProviders() throws ReflectiveOperationException { + Object data = Reflect.getField(mActivityThread, "mBoundApplication"); + Reflect.setField(data, "providers", mStashedProviderList); + if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) { + Log.i(TAG, "Instantiating content providers"); + Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication, + mStashedProviderList); + } + mStashedProviderList = null; + } + + /** + * Changes all fields within framework classes that have stored an reference to this + * BootstrapApplication to instead store references to mRealApplication. + * @throws NoSuchFieldException + */ + @SuppressWarnings("unchecked") + private void swapApplicationReferences() throws ReflectiveOperationException { + if (Reflect.getField(mActivityThread, "mInitialApplication") == this) { + Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication); + } + + List allApplications = + (List) Reflect.getField(mActivityThread, "mAllApplications"); + for (int i = 0; i < allApplications.size(); i++) { + if (allApplications.get(i) == this) { + allApplications.set(i, mRealApplication); + } + } + + for (String fieldName : new String[] { "mPackages", "mResourcePackages" }) { + Map> packageMap = + (Map>) Reflect.getField(mActivityThread, fieldName); + for (Map.Entry> entry : packageMap.entrySet()) { + Object loadedApk = entry.getValue().get(); + if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) { + Reflect.setField(loadedApk, "mApplication", mRealApplication); + Reflect.setField(mRealApplication, "mLoadedApk", loadedApk); + } + } + } + } +} diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java new file mode 100644 index 00000000000..f1974064999 --- /dev/null +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java @@ -0,0 +1,25 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.app.Instrumentation; +import android.os.Bundle; + +/** + * Notifies BootstrapApplication of the call to Instrumentation.onCreate(). + */ +public final class BootstrapInstrumentation extends Instrumentation { + private final BootstrapApplication mApp; + + BootstrapInstrumentation(BootstrapApplication app) { + mApp = app; + } + + @Override + public void onCreate(Bundle arguments) { + super.onCreate(arguments); + mApp.onInstrumentationCreate(arguments); + } +} diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java new file mode 100644 index 00000000000..c04779b4de3 --- /dev/null +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java @@ -0,0 +1,247 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.content.Context; +import android.os.Build; +import android.util.Log; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.List; + +/** + * Provides the ability to add native libraries and .dex files to an existing class loader. + * Tested with Jellybean MR2 - Marshmellow. + */ +final class ClassLoaderPatcher { + private static final String TAG = "cr.incrementalinstall"; + private final File mAppFilesSubDir; + private final ClassLoader mClassLoader; + private final Object mLibcoreOs; + private final int mProcessUid; + final boolean mIsPrimaryProcess; + + ClassLoaderPatcher(Context context) throws ReflectiveOperationException { + mAppFilesSubDir = + new File(context.getApplicationInfo().dataDir, "incremental-install-files"); + mClassLoader = context.getClassLoader(); + mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os"); + mProcessUid = (Integer) Reflect.invokeMethod(mLibcoreOs, "getuid"); + mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid; + Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")"); + } + + /** + * Loads all dex files within |dexDir| into the app's ClassLoader. + */ + void loadDexFiles(File dexDir) throws ReflectiveOperationException, FileNotFoundException { + Log.i(TAG, "Installing dex files from: " + dexDir); + File[] dexFilesArr = dexDir.listFiles(); + if (dexFilesArr == null) { + throw new FileNotFoundException("Dex dir does not exist: " + dexDir); + } + // The optimized dex files will be owned by this process' user. + // Store them within the app's data dir rather than on /data/local/tmp + // so that they are still deleted (by the OS) when we uninstall + // (even on a non-rooted device). + File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes"); + File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes"); + File optimizedDir; + + if (mIsPrimaryProcess) { + ensureAppFilesSubDirExists(); + // Allows isolated processes to access the same files. + incrementalDexesDir.mkdir(); + incrementalDexesDir.setReadable(true, false); + incrementalDexesDir.setExecutable(true, false); + // Create a directory for isolated processes to create directories in. + isolatedDexesDir.mkdir(); + isolatedDexesDir.setWritable(true, false); + isolatedDexesDir.setExecutable(true, false); + + optimizedDir = incrementalDexesDir; + } else { + // There is a UID check of the directory in dalvik.system.DexFile(): + // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101 + // Rather than have each isolated process run DexOpt though, we use + // symlinks within the directory to point at the browser process' + // optimized dex files. + optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid); + optimizedDir.mkdir(); + // Always wipe it out and re-create for simplicity. + Log.i(TAG, "Creating dex file symlinks for isolated process"); + for (File f : optimizedDir.listFiles()) { + f.delete(); + } + for (File f : incrementalDexesDir.listFiles()) { + String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName(); + File from = new File(optimizedDir, f.getName()); + createSymlink(to, from); + } + } + + Log.i(TAG, "Code cache dir: " + optimizedDir); + // TODO(agrieve): Might need to record classpath ordering if we ever have duplicate + // class names (since then order will matter here). + Log.i(TAG, "Loading " + dexFilesArr.length + " dex files"); + + Object dexPathList = Reflect.getField(mClassLoader, "pathList"); + Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements"); + dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements); + Reflect.setField(dexPathList, "dexElements", dexElements); + } + + /** + * Sets up all libraries within |libDir| to be loadable by System.loadLibrary(). + */ + void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException { + Log.i(TAG, "Importing native libraries from: " + libDir); + if (!libDir.exists()) { + Log.i(TAG, "No native libs exist."); + return; + } + // The library copying is not necessary on older devices, but we do it anyways to + // simplify things (it's fast compared to dexing). + // https://code.google.com/p/android/issues/detail?id=79480 + File localLibsDir = new File(mAppFilesSubDir, "lib"); + File copyLibsLockFile = new File(mAppFilesSubDir, "libcopy.lock"); + if (mIsPrimaryProcess) { + // Primary process: Copies native libraries into the app's data directory. + ensureAppFilesSubDirExists(); + LockFile lockFile = LockFile.acquireRuntimeLock(copyLibsLockFile); + if (lockFile == null) { + LockFile.waitForRuntimeLock(copyLibsLockFile, 10 * 1000); + } else { + try { + localLibsDir.mkdir(); + localLibsDir.setReadable(true, false); + localLibsDir.setExecutable(true, false); + copyChangedFiles(libDir, localLibsDir); + } finally { + lockFile.release(); + } + } + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + // TODO: Work around this issue by using APK splits to install each dex / lib. + throw new RuntimeException("Incremental install does not work on Android M+ " + + "with isolated processes. Use the gn arg:\n" + + " disable_incremental_isolated_processes=true\n" + + "and try again."); + } + // Other processes: Waits for primary process to finish copying. + LockFile.waitForRuntimeLock(copyLibsLockFile, 10 * 1000); + } + addNativeLibrarySearchPath(localLibsDir); + } + + @SuppressWarnings("unchecked") + private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException { + Object dexPathList = Reflect.getField(mClassLoader, "pathList"); + Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories"); + File[] newDirs = new File[] { nativeLibDir }; + // Switched from an array to an ArrayList in Lollipop. + if (currentDirs instanceof List) { + List dirsAsList = (List) currentDirs; + dirsAsList.add(0, nativeLibDir); + } else { + File[] dirsAsArray = (File[]) currentDirs; + Reflect.setField(dexPathList, "nativeLibraryDirectories", + Reflect.concatArrays(newDirs, newDirs, dirsAsArray)); + } + + Object[] nativeLibraryPathElements; + try { + nativeLibraryPathElements = + (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements"); + } catch (NoSuchFieldException e) { + // This field doesn't exist pre-M. + return; + } + Object[] additionalElements = makeNativePathElements(newDirs); + Reflect.setField(dexPathList, "nativeLibraryPathElements", + Reflect.concatArrays(nativeLibraryPathElements, additionalElements, + nativeLibraryPathElements)); + } + + private static void copyChangedFiles(File srcDir, File dstDir) throws IOException { + // No need to delete stale libs since libraries are loaded explicitly. + int numNotChanged = 0; + for (File f : srcDir.listFiles()) { + // Note: Tried using hardlinks, but resulted in EACCES exceptions. + File dest = new File(dstDir, f.getName()); + if (!copyIfModified(f, dest)) { + numNotChanged++; + } + } + if (numNotChanged > 0) { + Log.i(TAG, numNotChanged + " libs already up to date."); + } + } + + private static boolean copyIfModified(File src, File dest) throws IOException { + long lastModified = src.lastModified(); + if (dest.exists() && dest.lastModified() == lastModified) { + return false; + } + Log.i(TAG, "Copying " + src + " -> " + dest); + FileInputStream istream = new FileInputStream(src); + FileOutputStream ostream = new FileOutputStream(dest); + ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size()); + istream.close(); + ostream.close(); + dest.setReadable(true, false); + dest.setExecutable(true, false); + dest.setLastModified(lastModified); + return true; + } + + private void ensureAppFilesSubDirExists() { + mAppFilesSubDir.mkdir(); + mAppFilesSubDir.setExecutable(true, false); + } + + private void createSymlink(String to, File from) throws ReflectiveOperationException { + Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath()); + } + + private static Object[] makeNativePathElements(File[] paths) + throws ReflectiveOperationException { + Class entryClazz = Class.forName("dalvik.system.DexPathList$Element"); + Object[] entries = new Object[paths.length]; + for (int i = 0; i < paths.length; ++i) { + entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null); + } + return entries; + } + + private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements) + throws ReflectiveOperationException { + Class entryClazz = Class.forName("dalvik.system.DexPathList$Element"); + Class clazz = Class.forName("dalvik.system.DexPathList"); + Object[] ret = + Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]); + File emptyDir = new File(""); + for (int i = 0; i < files.length; ++i) { + File file = files[i]; + Object dexFile; + if (Build.VERSION.CODENAME.equals("N") + || Build.VERSION.SDK_INT > Build.VERSION_CODES.M) { + // loadDexFile requires that ret contain all previously added elements. + dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory, + mClassLoader, ret); + } else { + dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory); + } + ret[curDexElements.length + i] = + Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile); + } + return ret; + } +} diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java new file mode 100644 index 00000000000..6e48f3b1ea7 --- /dev/null +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java @@ -0,0 +1,129 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.util.Log; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.channels.FileLock; +import java.util.concurrent.Callable; + +/** + * Helpers for dealing with .lock files used during install / first run. + */ +final class LockFile { + private static final String TAG = "cr.incrementalinstall"; + + private final File mFile; + private final FileOutputStream mOutputStream; + private final FileLock mFileLock; + + private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) { + mFile = file; + mOutputStream = outputStream; + mFileLock = fileLock; + } + + /** + * Clears the lock file by writing to it (making it non-zero in length); + */ + static void clearInstallerLock(File lockFile) throws IOException { + Log.i(TAG, "Clearing " + lockFile); + // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead. + FileOutputStream os = new FileOutputStream(lockFile); + os.write(1); + os.close(); + } + + /** + * Waits for the given file to be non-zero in length. + */ + static void waitForInstallerLock(final File file, long timeoutMs) { + pollingWait(new Callable() { + @Override public Boolean call() { + return !installerLockExists(file); + } + }, file, timeoutMs); + } + + /** + * Waits for the given file to be non-zero in length. + */ + private static void pollingWait(Callable func, File file, long timeoutMs) { + long pollIntervalMs = 200; + for (int i = 0; i < timeoutMs / pollIntervalMs; i++) { + try { + if (func.call()) { + if (i > 0) { + Log.i(TAG, "Finished waiting on lock file: " + file); + } + return; + } else if (i == 0) { + Log.i(TAG, "Waiting on lock file: " + file); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + try { + Thread.sleep(pollIntervalMs); + } catch (InterruptedException e) { + // Should never happen. + } + } + throw new RuntimeException("Timed out waiting for lock file: " + file); + } + + /** + * Returns whether the given lock file is missing or is in the locked state. + */ + static boolean installerLockExists(File file) { + return !file.exists() || file.length() == 0; + } + + /** + * Attempts to acquire a lock for the given file. + * @return Returns the FileLock if it was acquired, or null otherwise. + */ + static LockFile acquireRuntimeLock(File file) { + try { + FileOutputStream outputStream = new FileOutputStream(file); + FileLock lock = outputStream.getChannel().tryLock(); + if (lock != null) { + Log.i(TAG, "Created lock file: " + file); + return new LockFile(file, outputStream, lock); + } + outputStream.close(); + } catch (IOException e) { + // Do nothing. We didn't get the lock. + Log.w(TAG, "Exception trying to acquire lock " + file, e); + } + return null; + } + + /** + * Waits for the given file to not exist. + */ + static void waitForRuntimeLock(final File file, long timeoutMs) { + pollingWait(new Callable() { + @Override public Boolean call() { + return !file.exists(); + } + }, file, timeoutMs); + } + + /** + * Releases and deletes the lock file. + */ + void release() throws IOException { + Log.i(TAG, "Deleting lock file: " + mFile); + mFileLock.release(); + mOutputStream.close(); + if (!mFile.delete()) { + throw new IOException("Failed to delete lock file: " + mFile); + } + } +} diff --git a/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java new file mode 100644 index 00000000000..c64dc1e8a31 --- /dev/null +++ b/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java @@ -0,0 +1,142 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.Arrays; + +/** + * Reflection helper methods. + */ +final class Reflect { + /** + * Sets the value of an object's field (even if it's not visible). + * + * @param instance The object containing the field to set. + * @param name The name of the field to set. + * @param value The new value for the field. + */ + static void setField(Object instance, String name, Object value) + throws ReflectiveOperationException { + Field field = findField(instance, name); + field.setAccessible(true); + field.set(instance, value); + } + + /** + * Retrieves the value of an object's field (even if it's not visible). + * + * @param instance The object containing the field to set. + * @param name The name of the field to set. + * @return The field's value. Primitive values are returned as their boxed + * type. + */ + static Object getField(Object instance, String name) throws ReflectiveOperationException { + Field field = findField(instance, name); + field.setAccessible(true); + return field.get(instance); + } + + /** + * Concatenates two arrays into a new array. The arrays must be of the same + * type. + */ + static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) { + Object[] result = (Object[]) Array.newInstance( + arrType.getClass().getComponentType(), left.length + right.length); + System.arraycopy(left, 0, result, 0, left.length); + System.arraycopy(right, 0, result, left.length, right.length); + return result; + } + + /** + * Invokes a method with zero or more parameters. For static methods, use the Class as the + * instance. + */ + static Object invokeMethod(Object instance, String name, Object... params) + throws ReflectiveOperationException { + boolean isStatic = instance instanceof Class; + Class clazz = isStatic ? (Class) instance : instance.getClass(); + Method method = findMethod(clazz, name, params); + method.setAccessible(true); + return method.invoke(instance, params); + } + + /** + * Calls a constructor with zero or more parameters. + */ + static Object newInstance(Class clazz, Object... params) + throws ReflectiveOperationException { + Constructor constructor = findConstructor(clazz, params); + constructor.setAccessible(true); + return constructor.newInstance(params); + } + + private static Field findField(Object instance, String name) throws NoSuchFieldException { + boolean isStatic = instance instanceof Class; + Class clazz = isStatic ? (Class) instance : instance.getClass(); + for (; clazz != null; clazz = clazz.getSuperclass()) { + try { + return clazz.getDeclaredField(name); + } catch (NoSuchFieldException e) { + // Need to look in the super class. + } + } + throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass()); + } + + private static Method findMethod(Class clazz, String name, Object... params) + throws NoSuchMethodException { + for (; clazz != null; clazz = clazz.getSuperclass()) { + for (Method method : clazz.getDeclaredMethods()) { + if (method.getName().equals(name) + && areParametersCompatible(method.getParameterTypes(), params)) { + return method; + } + } + } + throw new NoSuchMethodException("Method " + name + " with parameters " + + Arrays.asList(params) + " not found in " + clazz); + } + + private static Constructor findConstructor(Class clazz, Object... params) + throws NoSuchMethodException { + for (Constructor constructor : clazz.getDeclaredConstructors()) { + if (areParametersCompatible(constructor.getParameterTypes(), params)) { + return constructor; + } + } + throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params) + + " not found in " + clazz); + } + + private static boolean areParametersCompatible(Class[] paramTypes, Object... params) { + if (params.length != paramTypes.length) { + return false; + } + for (int i = 0; i < params.length; i++) { + if (!isAssignableFrom(paramTypes[i], params[i])) { + return false; + } + } + return true; + } + + private static boolean isAssignableFrom(Class left, Object right) { + if (right == null) { + return !left.isPrimitive(); + } + Class rightClazz = right.getClass(); + if (left.isPrimitive()) { + // TODO(agrieve): Fill in the rest as needed. + return left == boolean.class && rightClazz == Boolean.class + || left == int.class && rightClazz == Integer.class; + } + return left.isAssignableFrom(rightClazz); + } +} diff --git a/build/android/insert_chromium_version.gypi b/build/android/insert_chromium_version.gypi new file mode 100644 index 00000000000..a6ff9081086 --- /dev/null +++ b/build/android/insert_chromium_version.gypi @@ -0,0 +1,53 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# inserts a chromium version string into native libraries. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'insert_chromium_version', +# 'actions': [ +# 'variables': { +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'stripped_libraries_dir': 'the directory contains native libraries' +# 'input_paths': 'files to be added to the list of inputs' +# 'stamp': 'file to touch when the action is complete' +# 'version_string': 'chromium version string to be inserted' +# 'includes': [ '../../build/android/insert_chromium_version.gypi' ], +# ], +# }, +# + +{ + 'message': 'Inserting chromium version string into native libraries', + 'variables': { + 'input_paths': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/insert_chromium_version.py', + '<(ordered_libraries_file)', + '>@(input_paths)', + ], + 'outputs': [ + '<(stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/insert_chromium_version.py', + '--android-objcopy=<(android_objcopy)', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--version-string=<(version_string)', + '--stamp=<(stamp)', + ], + 'conditions': [ + ['component == "shared_library"', { + # Add a fake output to force the build to always re-run this step. This + # is required because the real inputs are not known at gyp-time and + # changing base.so may not trigger changes to dependent libraries. + 'outputs': [ '<(stamp).fake' ] + }], + ], +} diff --git a/build/android/install_emulator_deps.py b/build/android/install_emulator_deps.py new file mode 100644 index 00000000000..acd2093b1fc --- /dev/null +++ b/build/android/install_emulator_deps.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Installs deps for using SDK emulator for testing. + +The script will download the SDK and system images, if they are not present, and +install and enable KVM, if virtualization has been enabled in the BIOS. +""" + + +import logging +import optparse +import os +import re +import sys + +import devil_chromium +from devil.utils import cmd_helper +from devil.utils import run_tests_helper +from pylib import constants +from pylib import pexpect + +# Android API level +DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION +# Android ABI/Arch +DEFAULT_ABI = 'x86' + +# Default Time out for downloading SDK component +DOWNLOAD_SYSTEM_IMAGE_TIMEOUT = 30 +DOWNLOAD_SDK_PLATFORM_TIMEOUT = 60 + +def CheckSDK(): + """Check if SDK is already installed. + + Returns: + True if the emulator SDK directory (src/android_emulator_sdk/) exists. + """ + return os.path.exists(constants.ANDROID_SDK_ROOT) + + +def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): + """Check if the "SDK Platform" for the specified API level is installed. + This is necessary in order for the emulator to run when the target + is specified. + + Args: + abi: target abi, x86 or arm + api_level: the Android API level to check; defaults to the latest API. + google: use Google build system image instead of AOSP build + + Returns: + True if the platform is already installed. + """ + android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') + if google: + pattern = re.compile('id: [0-9]+ or "Google Inc.:Google APIs:%s"' % + api_level) + else: + pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level) + + try: + exit_code, stdout = cmd_helper.GetCmdStatusAndOutput( + [android_binary, 'list']) + if exit_code != 0: + raise Exception('\'android list\' command failed') + for line in stdout.split('\n'): + if pattern.match(line): + return True + return False + except OSError: + logging.exception('Unable to execute \'android list\'') + return False + + +def CheckSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): + """Check if Android system images have been installed. + + Args: + abi: target abi, x86 or arm + api_level: the Android API level to check for; defaults to the latest API. + google: use Google build system image instead of AOSP build + + Returns: + True if x86 image has been previously downloaded. + """ + api_target = 'android-%d' % api_level + system_image_root = os.path.join(constants.ANDROID_SDK_ROOT, + 'system-images', api_target) + if abi == 'x86': + if google: + return os.path.exists(os.path.join(system_image_root, 'google_apis', + 'x86')) + else: + return os.path.exists(os.path.join(system_image_root, 'default', 'x86')) + elif abi == 'arm': + if google: + return os.path.exists(os.path.join(system_image_root, 'google_apis', + 'armeabi-v7a')) + else: + return os.path.exists(os.path.join(system_image_root, 'default', + 'armeabi-v7a')) + else: + raise Exception("abi option invalid") + +def CheckKVM(): + """Quickly check whether KVM is enabled. + + Returns: + True iff /dev/kvm exists (Linux only). + """ + return os.path.exists('/dev/kvm') + +def RunKvmOk(): + """Run kvm-ok as root to check that KVM is properly enabled after installation + of the required packages. + + Returns: + True iff KVM is enabled (/dev/kvm exists). On failure, returns False + but also print detailed information explaining why KVM isn't enabled + (e.g. CPU doesn't support it, or BIOS disabled it). + """ + try: + # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it. + return not cmd_helper.RunCmd(['sudo', 'kvm-ok']) + except OSError: + logging.info('kvm-ok not installed') + return False + + +def InstallKVM(): + """Installs KVM packages.""" + rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm']) + if rc: + logging.critical('ERROR: Did not install KVM. Make sure hardware ' + 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' + 'AMD SVM).') + # TODO(navabi): Use modprobe kvm-amd on AMD processors. + rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel']) + if rc: + logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure ' + 'hardware virtualization is enabled in BIOS.') + # Now check to ensure KVM acceleration can be used. + if not RunKvmOk(): + logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware ' + 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' + 'AMD SVM).') + + +def UpdateSDK(api_level, package_name, package_pattern, timeout): + """This function update SDK with a filter index. + + Args: + api_level: the Android API level to download for. + package_name: logging name of package that is being updated. + package_pattern: the pattern to match the filter index from. + timeout: the amount of time wait for update command. + """ + android_binary = os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android') + + list_sdk_repo_command = [android_binary, 'list', 'sdk', '--all'] + + exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(list_sdk_repo_command) + + if exit_code != 0: + raise Exception('\'android list sdk --all\' command return %d' % exit_code) + + for line in stdout.split('\n'): + match = package_pattern.match(line) + if match: + index = match.group(1) + logging.info('package %s corresponds to %s with api level %d', + index, package_name, api_level) + update_command = [android_binary, 'update', 'sdk', '--no-ui', '--all', + '--filter', index] + update_command_str = ' '.join(update_command) + logging.info('running update command: %s', update_command_str) + update_process = pexpect.spawn(update_command_str) + + if update_process.expect('Do you accept the license') != 0: + raise Exception('License agreement check failed') + update_process.sendline('y') + if update_process.expect( + 'Done. 1 package installed.', timeout=timeout) == 0: + logging.info('Successfully installed %s for API level %d', + package_name, api_level) + return + else: + raise Exception('Failed to install platform update') + raise Exception('Could not find android-%d update for the SDK!' % api_level) + +def GetSystemImage(abi, api_level=DEFAULT_ANDROID_API_LEVEL, google=False): + """Download system image files + + Args: + abi: target abi, x86 or arm + api_level: the Android API level to download for. + google: use Google build system image instead of AOSP build + """ + logging.info('Download x86 system image directory into sdk directory.') + + if abi == 'x86': + if google: + package_name = 'Google Intel x86 Atom System Image' + pattern = re.compile( + r'\s*([0-9]+)- Google APIs Intel x86 Atom System Image, Google Inc.' + ' API %d.*' % api_level) + else: + package_name = 'Intel x86 system image' + pattern = re.compile( + r'\s*([0-9]+)- Intel x86 Atom System Image, Android API %d.*' + % api_level) + elif abi == 'arm': + if google: + package_name = 'Google arm system image' + pattern = re.compile( + r'\s*([0-9]+)- Google APIs ARM EABI v7a System Image, Google Inc. API ' + '%d.*' % api_level) + else: + package_name = 'Android arm system image' + pattern = re.compile( + r'\s*([0-9]+)- ARM EABI v7a System Image, Android API %d.*' % api_level) + else: + raise Exception('abi option is invalid') + + UpdateSDK(api_level, package_name, pattern, DOWNLOAD_SYSTEM_IMAGE_TIMEOUT) + +def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL, google=False): + """Update the SDK to include the platform specified. + + Args: + api_level: the Android API level to download + google: use Google build system image instead of AOSP build + """ + logging.info('Download SDK Platform directory into sdk directory.') + + platform_package_pattern = re.compile( + r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level) + + UpdateSDK(api_level, 'SDK Platform', platform_package_pattern, + DOWNLOAD_SDK_PLATFORM_TIMEOUT) + + if google: + google_api_package_pattern = re.compile( + r'\s*([0-9]+)- Google APIs, Android API %d.*' % api_level) + UpdateSDK(api_level, 'Google APIs', google_api_package_pattern, + DOWNLOAD_SDK_PLATFORM_TIMEOUT) + + +def main(argv): + opt_parser = optparse.OptionParser( + description='Install dependencies for running the Android emulator') + opt_parser.add_option('--abi', + dest='abi', + help='The targeted abi for emulator system image', + type='string', + default=DEFAULT_ABI) + opt_parser.add_option('--api-level', + dest='api_level', + help=('The API level (e.g., 19 for Android 4.4) to ' + 'ensure is available'), + type='int', + default=DEFAULT_ANDROID_API_LEVEL) + opt_parser.add_option('-v', + dest='verbosity', + default=1, + action='count', + help='Verbose level (multiple times for more)') + opt_parser.add_option('--google', + dest='google', + action='store_true', + default=False, + help='Install Google System Image instead of AOSP') + + options, _ = opt_parser.parse_args(argv[1:]) + + run_tests_helper.SetLogLevel(verbose_count=options.verbosity) + + devil_chromium.Initialize() + + # Calls below will download emulator SDK and/or system images only if needed. + if CheckSDK(): + logging.info('android_emulator_sdk/ exists') + else: + logging.critical('ERROR: Emulator SDK not installed in %s' + , constants.ANDROID_SDK_ROOT) + return 1 + + # Check target. The target has to be installed in order to run the emulator. + if CheckSDKPlatform(options.api_level, options.google): + logging.info('SDK platform %s %s android-%d already present, skipping.', + 'Google' if options.google else 'AOSP', options.abi, + options.api_level) + else: + logging.info('SDK platform %s %s android-%d not present, installing.', + 'Google' if options.google else 'AOSP', options.abi, + options.api_level) + GetSDKPlatform(options.api_level, options.google) + + # Download the system image needed + if CheckSystemImage(options.abi, options.api_level, options.google): + logging.info('system image for %s %s android-%d already present, skipping.', + 'Google' if options.google else 'AOSP', options.abi, + options.api_level) + else: + GetSystemImage(options.abi, options.api_level, options.google) + + # Make sure KVM packages are installed and enabled. + if options.abi == 'x86': + if CheckKVM(): + logging.info('KVM already installed and enabled.') + else: + logging.warning('KVM is not installed or enabled.') + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/java_cpp_enum.gypi b/build/android/java_cpp_enum.gypi new file mode 100644 index 00000000000..d4abafa812f --- /dev/null +++ b/build/android/java_cpp_enum.gypi @@ -0,0 +1,64 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide an action +# to generate Java source files from a C++ header file containing annotated +# enum definitions using a Python script. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'bitmap_format_java', +# 'type': 'none', +# 'variables': { +# 'source_file': 'ui/android/bitmap_format.h', +# }, +# 'includes': [ '../build/android/java_cpp_enum.gypi' ], +# }, +# +# Then have the gyp target which compiles the java code depend on the newly +# created target. + +{ + 'variables': { + # Location where all generated Java sources will be placed. + 'output_dir': '<(SHARED_INTERMEDIATE_DIR)/enums/<(_target_name)', + 'generator_path': '<(DEPTH)/build/android/gyp/java_cpp_enum.py', + 'generator_args': '<(output_dir) <(source_file)', + }, + 'direct_dependent_settings': { + 'variables': { + # Ensure that the output directory is used in the class path + # when building targets that depend on this one. + 'generated_src_dirs': [ + '<(output_dir)/', + ], + # Ensure that the targets depending on this one are rebuilt if the sources + # of this one are modified. + 'additional_input_paths': [ + '<(source_file)', + ], + }, + }, + 'actions': [ + { + 'action_name': 'generate_java_constants', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(generator_path)', + '<(source_file)', + ], + 'outputs': [ + # This is the main reason this is an action and not a rule. Gyp doesn't + # properly expand RULE_INPUT_PATH here and so it's impossible to + # calculate the list of outputs. + '//ClassName.java. The files which +# template dependents on and typically included by the template should be listed +# in template_deps variables. Any change to them will force a rebuild of +# the template, and hence of any source that depends on it. +# + +{ + # Location where all generated Java sources will be placed. + 'variables': { + 'include_path%': '<(DEPTH)', + 'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(_target_name)/<(package_name)', + }, + 'direct_dependent_settings': { + 'variables': { + # Ensure that the output directory is used in the class path + # when building targets that depend on this one. + 'generated_src_dirs': [ + '<(output_dir)/', + ], + # Ensure dependents are rebuilt when sources for this rule change. + 'additional_input_paths': [ + '<@(_sources)', + '<@(template_deps)', + ], + }, + }, + # Define a single rule that will be apply to each .template file + # listed in 'sources'. + 'rules': [ + { + 'rule_name': 'generate_java_constants', + 'extension': 'template', + # Set template_deps as additional dependencies. + 'variables': { + 'output_path': '<(output_dir)/<(RULE_INPUT_ROOT).java', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '<@(template_deps)' + ], + 'outputs': [ + '<(output_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '--include-path=<(include_path)', + '--output=<(output_path)', + '--template=<(RULE_INPUT_PATH)', + ], + 'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)', + } + ], +} diff --git a/build/android/java_google_api_keys.gyp b/build/android/java_google_api_keys.gyp new file mode 100644 index 00000000000..df046b6da78 --- /dev/null +++ b/build/android/java_google_api_keys.gyp @@ -0,0 +1,45 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file provides an action to generate Java source files from the Google +# API keys using a Python script. + +{ + 'targets': [ + { + 'target_name': 'google_api_keys_java', + 'type': 'none', + 'variables': { + # Location where all generated Java sources will be placed. + 'output_dir': '<(SHARED_INTERMEDIATE_DIR)/java_google_api_keys', + 'generator_path': '<(DEPTH)/build/android/gyp/java_google_api_keys.py', + 'output_file': '<(output_dir)/GoogleAPIKeys.java', + }, + 'direct_dependent_settings': { + 'variables': { + # Ensure that the output directory is used in the class path + # when building targets that depend on this one. + 'generated_src_dirs': [ + '<(output_dir)/', + ], + }, + }, + 'actions': [ + { + 'action_name': 'generate_java_google_api_keys', + 'inputs': [ + '<(generator_path)', + ], + 'outputs': [ + '<(output_file)', + ], + 'action': [ + 'python', '<(generator_path)', '--out', '<(output_file)' + ], + 'message': 'Generating Java from Google API Keys header', + }, + ], + }, + ], +} diff --git a/build/android/jinja_template.gypi b/build/android/jinja_template.gypi new file mode 100644 index 00000000000..a893e9ba474 --- /dev/null +++ b/build/android/jinja_template.gypi @@ -0,0 +1,85 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to process one or more +# Jinja templates. +# +# To process a single template file, create a gyp target with the following +# form: +# { +# 'target_name': 'chrome_public_manifest', +# 'type': 'none', +# 'variables': { +# 'jinja_inputs': ['android/java/AndroidManifest.xml'], +# 'jinja_output': '<(SHARED_INTERMEDIATE_DIR)/chrome_public_manifest/AndroidManifest.xml', +# 'jinja_variables': ['app_name=ChromePublic'], +# }, +# 'includes': [ '../build/android/jinja_template.gypi' ], +# }, +# +# To process multiple template files and package the results into a zip file, +# create a gyp target with the following form: +# { +# 'target_name': 'chrome_template_resources', +# 'type': 'none', +# 'variables': { +# 'jinja_inputs_base_dir': 'android/java/res_template', +# 'jinja_inputs': [ +# '<(jinja_inputs_base_dir)/xml/searchable.xml', +# '<(jinja_inputs_base_dir)/xml/syncadapter.xml', +# ], +# 'jinja_outputs_zip': '<(PRODUCT_DIR)/res.java/<(_target_name).zip', +# 'jinja_variables': ['app_name=ChromePublic'], +# }, +# 'includes': [ '../build/android/jinja_template.gypi' ], +# }, +# + +{ + 'actions': [ + { + 'action_name': '<(_target_name)_jinja_template', + 'message': 'processing jinja template', + 'variables': { + 'jinja_output%': '', + 'jinja_outputs_zip%': '', + 'jinja_inputs_base_dir%': '', + 'jinja_includes%': [], + 'jinja_variables%': [], + 'jinja_args': ['--loader-base-dir=<(DEPTH)'], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/jinja_template.py', + '<@(jinja_inputs)', + '<@(jinja_includes)', + ], + 'conditions': [ + ['jinja_output != ""', { + 'outputs': [ '<(jinja_output)' ], + 'variables': { + 'jinja_args': ['--output', '<(jinja_output)'], + }, + }], + ['jinja_outputs_zip != ""', { + 'outputs': [ '<(jinja_outputs_zip)' ], + 'variables': { + 'jinja_args': ['--outputs-zip', '<(jinja_outputs_zip)'], + }, + }], + ['jinja_inputs_base_dir != ""', { + 'variables': { + 'jinja_args': ['--inputs-base-dir', '<(jinja_inputs_base_dir)'], + }, + }], + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/jinja_template.py', + '--inputs', '<(jinja_inputs)', + '--variables', '<(jinja_variables)', + '<@(jinja_args)', + ], + }, + ], +} diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py new file mode 100644 index 00000000000..5c2dde81ee9 --- /dev/null +++ b/build/android/lighttpd_server.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides a convenient wrapper for spawning a test lighttpd instance. + +Usage: + lighttpd_server PATH_TO_DOC_ROOT +""" + +import codecs +import contextlib +import httplib +import os +import random +import shutil +import socket +import subprocess +import sys +import tempfile +import time + +from pylib import constants +from pylib import pexpect + +class LighttpdServer(object): + """Wraps lighttpd server, providing robust startup. + + Args: + document_root: Path to root of this server's hosted files. + port: TCP port on the _host_ machine that the server will listen on. If + ommitted it will attempt to use 9000, or if unavailable it will find + a free port from 8001 - 8999. + lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries. + base_config_path: If supplied this file will replace the built-in default + lighttpd config file. + extra_config_contents: If specified, this string will be appended to the + base config (default built-in, or from base_config_path). + config_path, error_log, access_log: Optional paths where the class should + place temprary files for this session. + """ + + def __init__(self, document_root, port=None, + lighttpd_path=None, lighttpd_module_path=None, + base_config_path=None, extra_config_contents=None, + config_path=None, error_log=None, access_log=None): + self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android') + self.document_root = os.path.abspath(document_root) + self.fixed_port = port + self.port = port or constants.LIGHTTPD_DEFAULT_PORT + self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999)) + self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd' + self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd' + self.base_config_path = base_config_path + self.extra_config_contents = extra_config_contents + self.config_path = config_path or self._Mktmp('config') + self.error_log = error_log or self._Mktmp('error_log') + self.access_log = access_log or self._Mktmp('access_log') + self.pid_file = self._Mktmp('pid_file') + self.process = None + + def _Mktmp(self, name): + return os.path.join(self.temp_dir, name) + + @staticmethod + def _GetRandomPort(): + # The ports of test server is arranged in constants.py. + return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST, + constants.LIGHTTPD_RANDOM_PORT_LAST) + + def StartupHttpServer(self): + """Starts up a http server with specified document root and port.""" + # If we want a specific port, make sure no one else is listening on it. + if self.fixed_port: + self._KillProcessListeningOnPort(self.fixed_port) + while True: + if self.base_config_path: + # Read the config + with codecs.open(self.base_config_path, 'r', 'utf-8') as f: + config_contents = f.read() + else: + config_contents = self._GetDefaultBaseConfig() + if self.extra_config_contents: + config_contents += self.extra_config_contents + # Write out the config, filling in placeholders from the members of |self| + with codecs.open(self.config_path, 'w', 'utf-8') as f: + f.write(config_contents % self.__dict__) + if (not os.path.exists(self.lighttpd_path) or + not os.access(self.lighttpd_path, os.X_OK)): + raise EnvironmentError( + 'Could not find lighttpd at %s.\n' + 'It may need to be installed (e.g. sudo apt-get install lighttpd)' + % self.lighttpd_path) + self.process = pexpect.spawn(self.lighttpd_path, + ['-D', '-f', self.config_path, + '-m', self.lighttpd_module_path], + cwd=self.temp_dir) + client_error, server_error = self._TestServerConnection() + if not client_error: + assert int(open(self.pid_file, 'r').read()) == self.process.pid + break + self.process.close() + + if self.fixed_port or not 'in use' in server_error: + print 'Client error:', client_error + print 'Server error:', server_error + return False + self.port = self._GetRandomPort() + return True + + def ShutdownHttpServer(self): + """Shuts down our lighttpd processes.""" + if self.process: + self.process.terminate() + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def _TestServerConnection(self): + # Wait for server to start + server_msg = '' + for timeout in xrange(1, 5): + client_error = None + try: + with contextlib.closing(httplib.HTTPConnection( + '127.0.0.1', self.port, timeout=timeout)) as http: + http.set_debuglevel(timeout > 3) + http.request('HEAD', '/') + r = http.getresponse() + r.read() + if (r.status == 200 and r.reason == 'OK' and + r.getheader('Server') == self.server_tag): + return (None, server_msg) + client_error = ('Bad response: %s %s version %s\n ' % + (r.status, r.reason, r.version) + + '\n '.join([': '.join(h) for h in r.getheaders()])) + except (httplib.HTTPException, socket.error) as client_error: + pass # Probably too quick connecting: try again + # Check for server startup error messages + ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'], + timeout=timeout) + if ix == 2: # stdout spew from the server + server_msg += self.process.match.group(0) # pylint: disable=no-member + elif ix == 1: # EOF -- server has quit so giveup. + client_error = client_error or 'Server exited' + break + return (client_error or 'Timeout', server_msg) + + @staticmethod + def _KillProcessListeningOnPort(port): + """Checks if there is a process listening on port number |port| and + terminates it if found. + + Args: + port: Port number to check. + """ + if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0: + # Give the process some time to terminate and check that it is gone. + time.sleep(2) + assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \ + 'Unable to kill process listening on port %d.' % port + + @staticmethod + def _GetDefaultBaseConfig(): + return """server.tag = "%(server_tag)s" +server.modules = ( "mod_access", + "mod_accesslog", + "mod_alias", + "mod_cgi", + "mod_rewrite" ) + +# default document root required +#server.document-root = "." + +# files to check for if .../ is requested +index-file.names = ( "index.php", "index.pl", "index.cgi", + "index.html", "index.htm", "default.htm" ) +# mimetype mapping +mimetype.assign = ( + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".svg" => "image/svg+xml", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".xhtml" => "application/xhtml+xml", + ".xhtmlmp" => "application/vnd.wap.xhtml+xml", + ".js" => "application/x-javascript", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".manifest" => "text/cache-manifest", + ) + +# Use the "Content-Type" extended attribute to obtain mime type if possible +mimetype.use-xattr = "enable" + +## +# which extensions should not be handle via static-file transfer +# +# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi +static-file.exclude-extensions = ( ".php", ".pl", ".cgi" ) + +server.bind = "127.0.0.1" +server.port = %(port)s + +## virtual directory listings +dir-listing.activate = "enable" +#dir-listing.encoding = "iso-8859-2" +#dir-listing.external-css = "style/oldstyle.css" + +## enable debugging +#debug.log-request-header = "enable" +#debug.log-response-header = "enable" +#debug.log-request-handling = "enable" +#debug.log-file-not-found = "enable" + +#### SSL engine +#ssl.engine = "enable" +#ssl.pemfile = "server.pem" + +# Autogenerated test-specific config follows. + +cgi.assign = ( ".cgi" => "/usr/bin/env", + ".pl" => "/usr/bin/env", + ".asis" => "/bin/cat", + ".php" => "/usr/bin/php-cgi" ) + +server.errorlog = "%(error_log)s" +accesslog.filename = "%(access_log)s" +server.upload-dirs = ( "/tmp" ) +server.pid-file = "%(pid_file)s" +server.document-root = "%(document_root)s" + +""" + + +def main(argv): + server = LighttpdServer(*argv[1:]) + try: + if server.StartupHttpServer(): + raw_input('Server running at http://127.0.0.1:%s -' + ' press Enter to exit it.' % server.port) + else: + print 'Server exit code:', server.process.exitstatus + finally: + server.ShutdownHttpServer() + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/lint/suppress.py b/build/android/lint/suppress.py new file mode 100644 index 00000000000..32774586720 --- /dev/null +++ b/build/android/lint/suppress.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Add all generated lint_result.xml files to suppressions.xml""" + +# pylint: disable=no-member + + +import collections +import optparse +import os +import re +import sys +from xml.dom import minidom + +_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..') +sys.path.append(_BUILD_ANDROID_DIR) + +from pylib.constants import host_paths + +_TMP_DIR_RE = re.compile(r'^/tmp/.*/(SRC_ROOT[0-9]+|PRODUCT_DIR)/') +_THIS_FILE = os.path.abspath(__file__) +_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE), 'suppressions.xml') +_DOC = ( + '\nSTOP! It looks like you want to suppress some lint errors:\n' + '- Have you tried identifing the offending patch?\n' + ' Ask the author for a fix and/or revert the patch.\n' + '- It is preferred to add suppressions in the code instead of\n' + ' sweeping it under the rug here. See:\n\n' + ' http://developer.android.com/tools/debugging/improving-w-lint.html\n' + '\n' + 'Still reading?\n' + '- You can edit this file manually to suppress an issue\n' + ' globally if it is not applicable to the project.\n' + '- You can also automatically add issues found so for in the\n' + ' build process by running:\n\n' + ' ' + os.path.relpath(_THIS_FILE, host_paths.DIR_SOURCE_ROOT) + '\n\n' + ' which will generate this file (Comments are not preserved).\n' + ' Note: PRODUCT_DIR will be substituted at run-time with actual\n' + ' directory path (e.g. out/Debug)\n' +) + + +_Issue = collections.namedtuple('Issue', ['severity', 'paths', 'regexps']) + + +def _ParseConfigFile(config_path): + print 'Parsing %s' % config_path + issues_dict = {} + dom = minidom.parse(config_path) + for issue in dom.getElementsByTagName('issue'): + issue_id = issue.attributes['id'].value + severity = issue.getAttribute('severity') + + path_elements = ( + p.attributes.get('path') + for p in issue.getElementsByTagName('ignore')) + paths = set(p.value for p in path_elements if p) + + regexp_elements = ( + p.attributes.get('regexp') + for p in issue.getElementsByTagName('ignore')) + regexps = set(r.value for r in regexp_elements if r) + + issues_dict[issue_id] = _Issue(severity, paths, regexps) + return issues_dict + + +def _ParseAndMergeResultFile(result_path, issues_dict): + print 'Parsing and merging %s' % result_path + dom = minidom.parse(result_path) + for issue in dom.getElementsByTagName('issue'): + issue_id = issue.attributes['id'].value + severity = issue.attributes['severity'].value + path = issue.getElementsByTagName('location')[0].attributes['file'].value + # Strip temporary file path and use regex instead of path. + regexp = re.sub(_TMP_DIR_RE, '', path) + if issue_id not in issues_dict: + issues_dict[issue_id] = _Issue(severity, set(), set()) + issues_dict[issue_id].regexps.add(regexp) + + +def _WriteConfigFile(config_path, issues_dict): + new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None) + top_element = new_dom.documentElement + top_element.appendChild(new_dom.createComment(_DOC)) + for issue_id, issue in sorted(issues_dict.iteritems(), key=lambda i: i[0]): + issue_element = new_dom.createElement('issue') + issue_element.attributes['id'] = issue_id + if issue.severity: + issue_element.attributes['severity'] = issue.severity + if issue.severity == 'ignore': + print 'Warning: [%s] is suppressed globally.' % issue_id + else: + for path in sorted(issue.paths): + ignore_element = new_dom.createElement('ignore') + ignore_element.attributes['path'] = path + issue_element.appendChild(ignore_element) + for regexp in sorted(issue.regexps): + ignore_element = new_dom.createElement('ignore') + ignore_element.attributes['regexp'] = regexp + issue_element.appendChild(ignore_element) + top_element.appendChild(issue_element) + + with open(config_path, 'w') as f: + f.write(new_dom.toprettyxml(indent=' ', encoding='utf-8')) + print 'Updated %s' % config_path + + +def _Suppress(config_path, result_path): + issues_dict = _ParseConfigFile(config_path) + _ParseAndMergeResultFile(result_path, issues_dict) + _WriteConfigFile(config_path, issues_dict) + + +def main(): + parser = optparse.OptionParser(usage='%prog RESULT-FILE') + _, args = parser.parse_args() + + if len(args) != 1 or not os.path.exists(args[0]): + parser.error('Must provide RESULT-FILE') + + _Suppress(_CONFIG_PATH, args[0]) + + +if __name__ == '__main__': + main() diff --git a/build/android/lint/suppressions.xml b/build/android/lint/suppressions.xml new file mode 100644 index 00000000000..806f4371df5 --- /dev/null +++ b/build/android/lint/suppressions.xml @@ -0,0 +1,140 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/build/android/lint_action.gypi b/build/android/lint_action.gypi new file mode 100644 index 00000000000..382666213da --- /dev/null +++ b/build/android/lint_action.gypi @@ -0,0 +1,51 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule to +# run lint on java/class files. + +{ + 'action_name': 'lint_<(_target_name)', + 'message': 'Linting <(_target_name)', + 'variables': { + 'conditions': [ + ['chromium_code != 0 and android_lint != 0 and never_lint == 0', { + 'additional_args': ['--enable'], + }, { + 'additional_args': [], + }] + ], + 'android_lint_cache_stamp': '<(PRODUCT_DIR)/android_lint_cache/android_lint_cache.stamp', + 'android_manifest_path%': '<(DEPTH)/build/android/AndroidManifest.xml', + 'resource_dir%': '<(DEPTH)/build/android/ant/empty/res', + 'suppressions_file%': '<(DEPTH)/build/android/lint/suppressions.xml', + 'platform_xml_path': '<(android_sdk_root)/platform-tools/api/api-versions.xml', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/lint.py', + '<(android_lint_cache_stamp)', + '<(android_manifest_path)', + '<(lint_jar_path)', + '<(suppressions_file)', + '<(platform_xml_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/lint.py', + '--lint-path=<(android_sdk_root)/tools/lint', + '--config-path=<(suppressions_file)', + '--processed-config-path=<(config_path)', + '--cache-dir', '<(PRODUCT_DIR)/android_lint_cache', + '--platform-xml-path', '<(platform_xml_path)', + '--manifest-path=<(android_manifest_path)', + '--result-path=<(result_path)', + '--resource-dir=<(resource_dir)', + '--product-dir=<(PRODUCT_DIR)', + '--src-dirs=>(src_dirs)', + '--jar-path=<(lint_jar_path)', + '--can-fail-build', + '--stamp=<(stamp_path)', + '<@(additional_args)', + ], +} diff --git a/build/android/locale_pak_resources.gypi b/build/android/locale_pak_resources.gypi new file mode 100644 index 00000000000..020b831ebf8 --- /dev/null +++ b/build/android/locale_pak_resources.gypi @@ -0,0 +1,54 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Creates a resources.zip with locale.pak files placed into appropriate +# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates +# a locale_paks TypedArray so that resource files can be enumerated at runtime. +# +# If this target is included in the deps of an android resources/library/apk, +# the resources will be included with that target. +# +# Variables: +# locale_pak_files - List of .pak files to process. +# Names must be of the form "en.pak" or "en-US.pak". +# resource_zip_path - the path of generated zip file, optional, normally, you +# don't need to set this variable. +# +# Example +# { +# 'target_name': 'my_locale_resources', +# 'type': 'none', +# 'variables': { +# 'locale_paks_files': ['path1/fr.pak'], +# }, +# 'includes': [ '../build/android/locale_pak_resources.gypi' ], +# }, +# +{ + 'variables': { + 'resources_zip_path%': '<(PRODUCT_DIR)/res.java/<(_target_name).zip', + }, + 'all_dependent_settings': { + 'variables': { + 'additional_locale_input_paths': ['<(resources_zip_path)'], + 'dependencies_locale_zip_paths': ['<(resources_zip_path)'], + }, + }, + 'actions': [{ + 'action_name': '<(_target_name)_locale_pak_resources', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/locale_pak_resources.py', + '<@(locale_pak_files)', + ], + 'outputs': [ + '<(resources_zip_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/locale_pak_resources.py', + '--locale-paks', '<(locale_pak_files)', + '--resources-zip', '<(resources_zip_path)', + ], + }], +} diff --git a/build/android/main_dex_action.gypi b/build/android/main_dex_action.gypi new file mode 100644 index 00000000000..40764189db1 --- /dev/null +++ b/build/android/main_dex_action.gypi @@ -0,0 +1,46 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# generates a list of classes that must be kept in the main dex file. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'some name for the action' +# 'actions': [ +# 'variables': { +# 'jar_paths': ['path to jar', ...], +# 'output_path': 'output path', +# }, +# 'includes': [ 'relative/path/to/main_dex_action.gypi' ], +# ], +# }, +# + +{ + 'message': 'Generating main dex classes list for <(jar_path)', + 'variables': { + 'jar_paths%': [], + 'output_path%': '', + 'main_dex_list_script': '<(DEPTH)/build/android/gyp/main_dex_list.py', + 'main_dex_rules_path': '<(DEPTH)/build/android/main_dex_classes.flags', + }, + 'inputs': [ + '<@(jar_paths)', + '<(main_dex_list_script)', + '<(main_dex_rules_path)', + '<(multidex_configuration_path)', + ], + 'outputs': [ + '<(output_path)', + ], + 'action': [ + 'python', '<(main_dex_list_script)', + '--main-dex-list-path', '<(output_path)', + '--android-sdk-tools', '<(android_sdk_tools)', + '--main-dex-rules-path', '<(main_dex_rules_path)', + '--multidex-configuration-path', '<(multidex_configuration_path)', + '<@(jar_paths)', + ] +} diff --git a/build/android/main_dex_classes.flags b/build/android/main_dex_classes.flags new file mode 100644 index 00000000000..81152dcc1ea --- /dev/null +++ b/build/android/main_dex_classes.flags @@ -0,0 +1,12 @@ +-keep @**.MainDex class * { + *; +} + +-keepclasseswithmembers class * { + public static ** asInterface(android.os.IBinder); +} + +# Required when code coverage is enabled. +-keep class com.vladium.** { + *; +} diff --git a/build/android/method_count.py b/build/android/method_count.py new file mode 100644 index 00000000000..fdbdaf5606f --- /dev/null +++ b/build/android/method_count.py @@ -0,0 +1,129 @@ +#! /usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import collections +import os +import re +import shutil +import sys +import tempfile +import zipfile + +import devil_chromium +from devil.android.sdk import dexdump +from pylib.constants import host_paths + +sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'util', 'lib', + 'common')) +import perf_tests_results_helper # pylint: disable=import-error + +# Example dexdump output: +# DEX file header: +# magic : 'dex\n035\0' +# checksum : b664fc68 +# signature : ae73...87f1 +# file_size : 4579656 +# header_size : 112 +# link_size : 0 +# link_off : 0 (0x000000) +# string_ids_size : 46148 +# string_ids_off : 112 (0x000070) +# type_ids_size : 5730 +# type_ids_off : 184704 (0x02d180) +# proto_ids_size : 8289 +# proto_ids_off : 207624 (0x032b08) +# field_ids_size : 17854 +# field_ids_off : 307092 (0x04af94) +# method_ids_size : 33699 +# method_ids_off : 449924 (0x06dd84) +# class_defs_size : 2616 +# class_defs_off : 719516 (0x0afa9c) +# data_size : 3776428 +# data_off : 803228 (0x0c419c) + +# For what these mean, refer to: +# https://source.android.com/devices/tech/dalvik/dex-format.html + + +_CONTRIBUTORS_TO_DEX_CACHE = {'type_ids_size': 'types', + 'string_ids_size': 'strings', + 'method_ids_size': 'methods', + 'field_ids_size': 'fields'} + + +def _ExtractSizesFromDexFile(dex_path): + counts = {} + for line in dexdump.DexDump(dex_path, file_summary=True): + if not line.strip(): + # Each method, type, field, and string contributes 4 bytes (1 reference) + # to our DexCache size. + counts['dex_cache_size'] = ( + sum(counts[x] for x in _CONTRIBUTORS_TO_DEX_CACHE)) * 4 + return counts + m = re.match(r'([a-z_]+_size) *: (\d+)', line) + if m: + counts[m.group(1)] = int(m.group(2)) + raise Exception('Unexpected end of output.') + + +def _ExtractSizesFromZip(path): + tmpdir = tempfile.mkdtemp(suffix='_dex_extract') + try: + counts = collections.defaultdict(int) + with zipfile.ZipFile(path, 'r') as z: + for subpath in z.namelist(): + if not subpath.endswith('.dex'): + continue + extracted_path = z.extract(subpath, tmpdir) + cur_counts = _ExtractSizesFromDexFile(extracted_path) + for k in cur_counts: + counts[k] += cur_counts[k] + return dict(counts) + finally: + shutil.rmtree(tmpdir) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--apk-name', help='Name of the APK to which the dexfile corresponds.') + parser.add_argument('dexfile') + + args = parser.parse_args() + + devil_chromium.Initialize() + + if not args.apk_name: + dirname, basename = os.path.split(args.dexfile) + while basename: + if 'apk' in basename: + args.apk_name = basename + break + dirname, basename = os.path.split(dirname) + else: + parser.error( + 'Unable to determine apk name from %s, ' + 'and --apk-name was not provided.' % args.dexfile) + + if os.path.splitext(args.dexfile)[1] in ('.zip', '.apk', '.jar'): + sizes = _ExtractSizesFromZip(args.dexfile) + else: + sizes = _ExtractSizesFromDexFile(args.dexfile) + + def print_result(name, value_key, description=None): + perf_tests_results_helper.PrintPerfResult( + '%s_%s' % (args.apk_name, name), 'total', [sizes[value_key]], + description or name) + + for dex_header_name, readable_name in _CONTRIBUTORS_TO_DEX_CACHE.iteritems(): + print_result(readable_name, dex_header_name) + print_result( + 'DexCache_size', 'dex_cache_size', 'bytes of permanent dirty memory') + return 0 + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/native_app_dependencies.gypi b/build/android/native_app_dependencies.gypi new file mode 100644 index 00000000000..4651ac36e54 --- /dev/null +++ b/build/android/native_app_dependencies.gypi @@ -0,0 +1,76 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to strip and place dependent shared libraries required by a native binary in a +# single folder that can later be pushed to the device. +# +# NOTE: consider packaging your binary as an apk instead of running a native +# library. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'target_that_depends_on_my_binary', +# 'type': 'none', +# 'dependencies': [ +# 'my_binary', +# ], +# 'variables': { +# 'native_binary': '<(PRODUCT_DIR)/my_binary', +# 'output_dir': 'location to place binary and dependent libraries' +# }, +# 'includes': [ '../../build/android/native_app_dependencies.gypi' ], +# }, +# + +{ + 'variables': { + 'include_main_binary%': 1, + 'extra_files%': [], + }, + 'conditions': [ + ['android_must_copy_system_libraries == 1', { + 'dependencies': [ + '<(DEPTH)/build/android/setup.gyp:copy_system_libraries', + ], + 'variables': { + 'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)', + 'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json', + }, + 'actions': [ + { + 'variables': { + 'input_libraries': ['<(native_binary)'], + }, + 'includes': ['../../build/android/write_ordered_libraries.gypi'], + }, + { + 'action_name': 'stripping native libraries', + 'variables': { + 'stripped_libraries_dir%': '<(output_dir)', + 'input_paths': ['<(native_binary)'], + 'stamp': '<(intermediate_dir)/strip.stamp', + }, + 'includes': ['../../build/android/strip_native_libraries.gypi'], + }, + ], + }], + ['extra_files!=[]', { + 'copies': [ + { + 'destination': '<(output_dir)', + 'files': [ '<@(extra_files)' ], + } + ], + }], + ['include_main_binary==1', { + 'copies': [ + { + 'destination': '<(output_dir)', + 'files': [ '<(native_binary)' ], + } + ], + }], + ], +} diff --git a/build/android/ndk.gyp b/build/android/ndk.gyp new file mode 100644 index 00000000000..b491db242b4 --- /dev/null +++ b/build/android/ndk.gyp @@ -0,0 +1,26 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'cpu_features', + 'type': 'static_library', + 'direct_dependent_settings': { + 'include_dirs': [ + '<(android_ndk_root)/sources/android/cpufeatures', + ], + }, + 'sources': [ + '<(android_ndk_root)/sources/android/cpufeatures/cpu-features.c', + ], + 'variables': { + 'clang_warning_flags': [ + # cpu-features.c has few unused functions on x86 b/26403333 + '-Wno-unused-function', + ], + }, + }, + ], +} diff --git a/build/android/pack_relocations.gypi b/build/android/pack_relocations.gypi new file mode 100644 index 00000000000..61b4e2c4704 --- /dev/null +++ b/build/android/pack_relocations.gypi @@ -0,0 +1,77 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# packs relocations in Release builds of native libraries. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'pack_relocations', +# 'actions': [ +# 'variables': { +# 'enable_packing': 'pack relocations if 1, plain file copy if 0' +# 'exclude_packing_list': 'names of libraries explicitly not packed', +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'input_paths': 'files to be added to the list of inputs' +# 'stamp': 'file to touch when the action is complete' +# 'stripped_libraries_dir': 'directory holding stripped libraries', +# 'packed_libraries_dir': 'directory holding packed libraries', +# 'includes': [ '../../build/android/pack_relocations.gypi' ], +# ], +# }, +# + +{ + 'variables': { + 'input_paths': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/pack_relocations.py', + '<(ordered_libraries_file)', + '>@(input_paths)', + ], + 'outputs': [ + '<(stamp)', + ], + 'conditions': [ + ['enable_packing == 1', { + 'message': 'Packing relocations for <(_target_name)', + 'dependencies': [ + '<(DEPTH)/third_party/android_platform/relocation_packer.gyp:android_relocation_packer#host', + ], + 'inputs': [ + '<(PRODUCT_DIR)/android_relocation_packer', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/pack_relocations.py', + '--configuration-name=<(CONFIGURATION_NAME)', + '--enable-packing=1', + '--exclude-packing-list=<@(exclude_packing_list)', + '--android-pack-relocations=<(PRODUCT_DIR)/android_relocation_packer', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--packed-libraries-dir=<(packed_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(stamp)', + ], + }, { + 'message': 'Copying libraries (no relocation packing) for <(_target_name)', + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/pack_relocations.py', + '--configuration-name=<(CONFIGURATION_NAME)', + '--enable-packing=0', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--packed-libraries-dir=<(packed_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(stamp)', + ], + }], + ['android_must_copy_system_libraries == 1', { + # Add a fake output to force the build to always re-run this step. This + # is required because the real inputs are not known at gyp-time and + # changing base.so may not trigger changes to dependent libraries. + 'outputs': [ '<(stamp).fake' ] + }], + ], +} diff --git a/build/android/package_resources_action.gypi b/build/android/package_resources_action.gypi new file mode 100644 index 00000000000..a83c02dc77f --- /dev/null +++ b/build/android/package_resources_action.gypi @@ -0,0 +1,105 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is a helper to java_apk.gypi. It should be used to create an +# action that runs ApkBuilder via ANT. +# +# Required variables: +# apk_name - File name (minus path & extension) of the output apk. +# android_manifest_path - Path to AndroidManifest.xml. +# app_manifest_version_name - set the apps 'human readable' version number. +# app_manifest_version_code - set the apps version number. +# Optional variables: +# asset_location - The directory where assets are located (if any). +# create_density_splits - Whether to create density-based apk splits. Splits +# are supported only for minSdkVersion >= 21. +# language_splits - List of languages to create apk splits for. +# resource_zips - List of paths to resource zip files. +# shared_resources - Make a resource package that can be loaded by a different +# application at runtime to access the package's resources. +# app_as_shared_library - Make a resource package that can be loaded as shared +# library. +# extensions_to_not_compress - E.g.: 'pak,dat,bin' +# extra_inputs - List of extra action inputs. +{ + 'variables': { + 'asset_location%': '', + 'create_density_splits%': 0, + 'resource_zips%': [], + 'shared_resources%': 0, + 'app_as_shared_library%': 0, + 'extensions_to_not_compress%': '', + 'extra_inputs%': [], + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + }, + 'action_name': 'package_resources_<(apk_name)', + 'message': 'packaging resources for <(apk_name)', + 'inputs': [ + # TODO: This isn't always rerun correctly, http://crbug.com/351928 + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/package_resources.py', + '<(android_manifest_path)', + '<@(extra_inputs)', + ], + 'outputs': [ + '<(resource_packaged_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/package_resources.py', + '--android-sdk-jar', '<(android_sdk_jar)', + '--aapt-path', '<(android_aapt_path)', + '--configuration-name', '<(CONFIGURATION_NAME)', + '--android-manifest', '<(android_manifest_path)', + '--version-code', '<(app_manifest_version_code)', + '--version-name', '<(app_manifest_version_name)', + '--no-compress', '<(extensions_to_not_compress)', + '--apk-path', '<(resource_packaged_apk_path)', + ], + 'conditions': [ + ['shared_resources == 1', { + 'action': [ + '--shared-resources', + ], + }], + ['app_as_shared_library == 1', { + 'action': [ + '--app-as-shared-lib', + ], + }], + ['asset_location != ""', { + 'action': [ + '--asset-dir', '<(asset_location)', + ], + }], + ['create_density_splits == 1', { + 'action': [ + '--create-density-splits', + ], + 'outputs': [ + '<(resource_packaged_apk_path)_hdpi', + '<(resource_packaged_apk_path)_xhdpi', + '<(resource_packaged_apk_path)_xxhdpi', + '<(resource_packaged_apk_path)_xxxhdpi', + '<(resource_packaged_apk_path)_tvdpi', + ], + }], + ['language_splits != []', { + 'action': [ + '--language-splits=<(language_splits)', + ], + 'outputs': [ + "(resource_zips)', + ], + 'inputs': [ + '>@(resource_zips)', + ], + }], + ], +} diff --git a/build/android/play_services/LICENSE.sha1 b/build/android/play_services/LICENSE.sha1 new file mode 100644 index 00000000000..8e606a72162 --- /dev/null +++ b/build/android/play_services/LICENSE.sha1 @@ -0,0 +1 @@ +11cc73d4b7fa82560fbf5bbc1095dbac30308e7c \ No newline at end of file diff --git a/build/android/play_services/__init__.py b/build/android/play_services/__init__.py new file mode 100644 index 00000000000..50b23dff631 --- /dev/null +++ b/build/android/play_services/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/play_services/config.json b/build/android/play_services/config.json new file mode 100644 index 00000000000..fb38ef9d042 --- /dev/null +++ b/build/android/play_services/config.json @@ -0,0 +1,4 @@ +{ + "version_number": 8487000, + "version_xml_path": "res/values/version.xml" +} diff --git a/build/android/play_services/google_play_services_library.zip.sha1 b/build/android/play_services/google_play_services_library.zip.sha1 new file mode 100644 index 00000000000..fbd34e4ec66 --- /dev/null +++ b/build/android/play_services/google_play_services_library.zip.sha1 @@ -0,0 +1 @@ +1db2c536157710a4fe7edb59454e0b8f8b7e51bd \ No newline at end of file diff --git a/build/android/play_services/preprocess.py b/build/android/play_services/preprocess.py new file mode 100644 index 00000000000..ef7d97c7cbd --- /dev/null +++ b/build/android/play_services/preprocess.py @@ -0,0 +1,292 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +'''Prepares the Google Play services split client libraries before usage by +Chrome's build system. + +We need to preprocess Google Play services before using it in Chrome +builds for 2 main reasons: + +- Getting rid of unused resources: unsupported languages, unused +drawables, etc. + +- Merging the differents jars so that it can be proguarded more +easily. This is necessary since debug and test apks get very close +to the dex limit. + +The script is supposed to be used with the maven repository that can be +obtained by downloading the "extra-google-m2repository" from the Android SDK +Manager. It also supports importing from already extracted AAR files using the +--is-extracted-repo flag. The expected directory structure in that case would +look like: + + REPOSITORY_DIR + +-- CLIENT_1 + | +-- + +-- CLIENT_2 + +-- etc. + +The output is a directory with the following structure: + + OUT_DIR + +-- google-play-services.jar + +-- res + | +-- CLIENT_1 + | | +-- color + | | +-- values + | | +-- etc. + | +-- CLIENT_2 + | +-- ... + +-- stub + +-- res/[.git-keep-directory] + +-- src/android/UnusedStub.java + +Requires the `jar` utility in the path. + +''' + +import argparse +import glob +import itertools +import os +import shutil +import stat +import sys +import tempfile +import zipfile + +from datetime import datetime + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +import devil_chromium +from devil.utils import cmd_helper +from play_services import utils +from pylib.utils import argparse_utils + + +def main(): + parser = argparse.ArgumentParser(description=( + "Prepares the Google Play services split client libraries before usage " + "by Chrome's build system. See the script's documentation for more a " + "detailed help.")) + argparse_utils.CustomHelpAction.EnableFor(parser) + required_args = parser.add_argument_group('required named arguments') + required_args.add_argument('-r', + '--repository', + help=('the Google Play services repository ' + 'location'), + required=True, + metavar='FILE') + required_args.add_argument('-o', + '--out-dir', + help='the output directory', + required=True, + metavar='FILE') + required_args.add_argument('-c', + '--config-file', + help='the config file path', + required=True, + metavar='FILE') + parser.add_argument('-x', + '--is-extracted-repo', + action='store_true', + help='the provided repository is not made of AAR files') + parser.add_argument('--config-help', + action='custom_help', + custom_help_text=utils.ConfigParser.__doc__, + help='show the configuration file format help') + + args = parser.parse_args() + + devil_chromium.Initialize() + + return ProcessGooglePlayServices(args.repository, + args.out_dir, + args.config_file, + args.is_extracted_repo) + + +def ProcessGooglePlayServices(repo, out_dir, config_path, is_extracted_repo): + config = utils.ConfigParser(config_path) + + tmp_root = tempfile.mkdtemp() + try: + tmp_paths = _SetupTempDir(tmp_root) + + if is_extracted_repo: + _ImportFromExtractedRepo(config, tmp_paths, repo) + else: + _ImportFromAars(config, tmp_paths, repo) + + _GenerateCombinedJar(tmp_paths) + _ProcessResources(config, tmp_paths, repo) + _BuildOutput(config, tmp_paths, out_dir) + finally: + shutil.rmtree(tmp_root) + + return 0 + + +def _SetupTempDir(tmp_root): + tmp_paths = { + 'root': tmp_root, + 'imported_clients': os.path.join(tmp_root, 'imported_clients'), + 'extracted_jars': os.path.join(tmp_root, 'jar'), + 'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'), + } + os.mkdir(tmp_paths['imported_clients']) + os.mkdir(tmp_paths['extracted_jars']) + + return tmp_paths + + +def _SetupOutputDir(out_dir): + out_paths = { + 'root': out_dir, + 'res': os.path.join(out_dir, 'res'), + 'jar': os.path.join(out_dir, 'google-play-services.jar'), + 'stub': os.path.join(out_dir, 'stub'), + } + + shutil.rmtree(out_paths['jar'], ignore_errors=True) + shutil.rmtree(out_paths['res'], ignore_errors=True) + shutil.rmtree(out_paths['stub'], ignore_errors=True) + + return out_paths + + +def _MakeWritable(dir_path): + for root, dirs, files in os.walk(dir_path): + for path in itertools.chain(dirs, files): + st = os.stat(os.path.join(root, path)) + os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR) + + +# E.g. turn "base_1p" into "base" +def _RemovePartySuffix(client): + return client[:-3] if client[-3:] == '_1p' else client + + +def _ImportFromAars(config, tmp_paths, repo): + for client in config.clients: + client_name = _RemovePartySuffix(client) + aar_name = 'client_' + client + '.aar' + aar_path = os.path.join(repo, client_name, aar_name) + aar_out_path = os.path.join(tmp_paths['imported_clients'], client) + _ExtractAll(aar_path, aar_out_path) + + client_jar_path = os.path.join(aar_out_path, 'classes.jar') + _ExtractAll(client_jar_path, tmp_paths['extracted_jars']) + + +def _ImportFromExtractedRepo(config, tmp_paths, repo): + # Import the clients + try: + for client in config.clients: + client_out_dir = os.path.join(tmp_paths['imported_clients'], client) + shutil.copytree(os.path.join(repo, client), client_out_dir) + + client_jar_path = os.path.join(client_out_dir, 'classes.jar') + _ExtractAll(client_jar_path, tmp_paths['extracted_jars']) + finally: + _MakeWritable(tmp_paths['imported_clients']) + + +def _GenerateCombinedJar(tmp_paths): + out_file_name = tmp_paths['combined_jar'] + working_dir = tmp_paths['extracted_jars'] + cmd_helper.Call(['jar', '-cf', out_file_name, '-C', working_dir, '.']) + + +def _ProcessResources(config, tmp_paths, repo): + LOCALIZED_VALUES_BASE_NAME = 'values-' + locale_whitelist = set(config.locale_whitelist) + + # The directory structure here is: + # /_1p/res//.xml + for client_dir in os.listdir(tmp_paths['imported_clients']): + client_prefix = _RemovePartySuffix(client_dir) + '_' + + res_path = os.path.join(tmp_paths['imported_clients'], client_dir, 'res') + if not os.path.isdir(res_path): + continue + for res_type in os.listdir(res_path): + res_type_path = os.path.join(res_path, res_type) + + if res_type.startswith('drawable'): + shutil.rmtree(res_type_path) + continue + + if res_type.startswith(LOCALIZED_VALUES_BASE_NAME): + dir_locale = res_type[len(LOCALIZED_VALUES_BASE_NAME):] + if dir_locale not in locale_whitelist: + shutil.rmtree(res_type_path) + continue + + if res_type.startswith('values'): + # Beginning with v3, resource file names are not necessarily unique, and + # would overwrite each other when merged at build time. Prefix each + # "values" resource file with its client name. + for res_file in os.listdir(res_type_path): + os.rename(os.path.join(res_type_path, res_file), + os.path.join(res_type_path, client_prefix + res_file)) + + # Reimport files from the whitelist. + for res_path in config.resource_whitelist: + for whitelisted_file in glob.glob(os.path.join(repo, res_path)): + resolved_file = os.path.relpath(whitelisted_file, repo) + rebased_res = os.path.join(tmp_paths['imported_clients'], resolved_file) + + if not os.path.exists(os.path.dirname(rebased_res)): + os.makedirs(os.path.dirname(rebased_res)) + + shutil.copy(os.path.join(repo, whitelisted_file), rebased_res) + + +def _BuildOutput(config, tmp_paths, out_dir): + generation_date = datetime.utcnow() + version_xml_path = os.path.join(tmp_paths['imported_clients'], + config.version_xml_path) + play_services_full_version = utils.GetVersionNumberFromLibraryResources( + version_xml_path) + + out_paths = _SetupOutputDir(out_dir) + + # Copy the resources to the output dir + for client in config.clients: + res_in_tmp_dir = os.path.join(tmp_paths['imported_clients'], client, 'res') + if os.path.isdir(res_in_tmp_dir) and os.listdir(res_in_tmp_dir): + res_in_final_dir = os.path.join(out_paths['res'], client) + shutil.copytree(res_in_tmp_dir, res_in_final_dir) + + # Copy the jar + shutil.copyfile(tmp_paths['combined_jar'], out_paths['jar']) + + # Write the java dummy stub. Needed for gyp to create the resource jar + stub_location = os.path.join(out_paths['stub'], 'src', 'android') + os.makedirs(stub_location) + with open(os.path.join(stub_location, 'UnusedStub.java'), 'w') as stub: + stub.write('package android;' + 'public final class UnusedStub {' + ' private UnusedStub() {}' + '}') + + # Create the main res directory. It is needed by gyp + stub_res_location = os.path.join(out_paths['stub'], 'res') + os.makedirs(stub_res_location) + with open(os.path.join(stub_res_location, '.res-stamp'), 'w') as stamp: + content_str = 'google_play_services_version: %s\nutc_date: %s\n' + stamp.write(content_str % (play_services_full_version, generation_date)) + + config.UpdateVersionNumber(play_services_full_version) + + +def _ExtractAll(zip_path, out_path): + with zipfile.ZipFile(zip_path, 'r') as zip_file: + zip_file.extractall(out_path) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/play_services/update.py b/build/android/play_services/update.py new file mode 100644 index 00000000000..8a703254bab --- /dev/null +++ b/build/android/play_services/update.py @@ -0,0 +1,515 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +''' +Script to help uploading and downloading the Google Play services library to +and from a Google Cloud storage. +''' + +import argparse +import logging +import os +import re +import shutil +import sys +import tempfile +import zipfile + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +import devil_chromium +from devil.utils import cmd_helper +from play_services import utils +from pylib import constants +from pylib.constants import host_paths +from pylib.utils import logging_utils + +sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build')) +import find_depot_tools # pylint: disable=import-error,unused-import +import breakpad +import download_from_google_storage +import upload_to_google_storage + + +# Directory where the SHA1 files for the zip and the license are stored +# It should be managed by git to provided information about new versions. +SHA1_DIRECTORY = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'android', + 'play_services') + +# Default bucket used for storing the files. +GMS_CLOUD_STORAGE = 'chromium-android-tools/play-services' + +# Path to the default configuration file. It exposes the currently installed +# version of the library in a human readable way. +CONFIG_DEFAULT_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', + 'android', 'play_services', 'config.json') + +LICENSE_FILE_NAME = 'LICENSE' +ZIP_FILE_NAME = 'google_play_services_library.zip' +GMS_PACKAGE_ID = 'extra-google-google_play_services' # used by sdk manager + +LICENSE_PATTERN = re.compile(r'^Pkg\.License=(?P.*)$', re.MULTILINE) + + +def main(raw_args): + parser = argparse.ArgumentParser( + description=__doc__ + 'Please see the subcommand help for more details.', + formatter_class=utils.DefaultsRawHelpFormatter) + subparsers = parser.add_subparsers(title='commands') + + # Download arguments + parser_download = subparsers.add_parser( + 'download', + help='download the library from the cloud storage', + description=Download.__doc__, + formatter_class=utils.DefaultsRawHelpFormatter) + parser_download.set_defaults(func=Download) + AddBasicArguments(parser_download) + AddBucketArguments(parser_download) + + # SDK Update arguments + parser_sdk = subparsers.add_parser( + 'sdk', + help='get the latest Google Play services SDK using Android SDK Manager', + description=UpdateSdk.__doc__, + formatter_class=utils.DefaultsRawHelpFormatter) + parser_sdk.set_defaults(func=UpdateSdk) + AddBasicArguments(parser_sdk) + + # Upload arguments + parser_upload = subparsers.add_parser( + 'upload', + help='upload the library to the cloud storage', + description=Upload.__doc__, + formatter_class=utils.DefaultsRawHelpFormatter) + + parser_upload.add_argument('--skip-git', + action='store_true', + help="don't commit the changes at the end") + parser_upload.set_defaults(func=Upload) + AddBasicArguments(parser_upload) + AddBucketArguments(parser_upload) + + args = parser.parse_args(raw_args) + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + logging_utils.ColorStreamHandler.MakeDefault(not _IsBotEnvironment()) + devil_chromium.Initialize() + return args.func(args) + + +def AddBasicArguments(parser): + ''' + Defines the common arguments on subparser rather than the main one. This + allows to put arguments after the command: `foo.py upload --debug --force` + instead of `foo.py --debug upload --force` + ''' + + parser.add_argument('--sdk-root', + help='base path to the Android SDK tools root', + default=constants.ANDROID_SDK_ROOT) + + parser.add_argument('-v', '--verbose', + action='store_true', + help='print debug information') + + +def AddBucketArguments(parser): + parser.add_argument('--bucket', + help='name of the bucket where the files are stored', + default=GMS_CLOUD_STORAGE) + + parser.add_argument('--config', + help='JSON Configuration file', + default=CONFIG_DEFAULT_PATH) + + parser.add_argument('--dry-run', + action='store_true', + help=('run the script in dry run mode. Files will be ' + 'copied to a local directory instead of the ' + 'cloud storage. The bucket name will be as path ' + 'to that directory relative to the repository ' + 'root.')) + + parser.add_argument('-f', '--force', + action='store_true', + help='run even if the library is already up to date') + + +def Download(args): + ''' + Downloads the Google Play services library from a Google Cloud Storage bucket + and installs it to + //third_party/android_tools/sdk/extras/google/google_play_services. + + A license check will be made, and the user might have to accept the license + if that has not been done before. + ''' + + if not os.path.isdir(args.sdk_root): + logging.debug('Did not find the Android SDK root directory at "%s".', + args.sdk_root) + if not args.force: + logging.info('Skipping, not on an android checkout.') + return 0 + + config = utils.ConfigParser(args.config) + paths = PlayServicesPaths(args.sdk_root, config.version_xml_path) + + if os.path.isdir(paths.package) and not os.access(paths.package, os.W_OK): + logging.error('Failed updating the Google Play Services library. ' + 'The location is not writable. Please remove the ' + 'directory (%s) and try again.', paths.package) + return -2 + + new_lib_zip_sha1 = os.path.join(SHA1_DIRECTORY, ZIP_FILE_NAME + '.sha1') + + logging.debug('Comparing zip hashes: %s and %s', new_lib_zip_sha1, + paths.lib_zip_sha1) + if utils.FileEquals(new_lib_zip_sha1, paths.lib_zip_sha1) and not args.force: + logging.info('Skipping, the Google Play services library is up to date.') + return 0 + + bucket_path = _VerifyBucketPathFormat(args.bucket, + config.version_number, + args.dry_run) + + tmp_root = tempfile.mkdtemp() + try: + # setup the destination directory + if not os.path.isdir(paths.package): + os.makedirs(paths.package) + + # download license file from bucket/{version_number}/license.sha1 + new_license = os.path.join(tmp_root, LICENSE_FILE_NAME) + + license_sha1 = os.path.join(SHA1_DIRECTORY, LICENSE_FILE_NAME + '.sha1') + _DownloadFromBucket(bucket_path, license_sha1, new_license, + args.verbose, args.dry_run) + + if (not _IsBotEnvironment() and + not _CheckLicenseAgreement(new_license, paths.license, + config.version_number)): + logging.warning('Your version of the Google Play services library is ' + 'not up to date. You might run into issues building ' + 'or running the app. Please run `%s download` to ' + 'retry downloading it.', __file__) + return 0 + + new_lib_zip = os.path.join(tmp_root, ZIP_FILE_NAME) + _DownloadFromBucket(bucket_path, new_lib_zip_sha1, new_lib_zip, + args.verbose, args.dry_run) + + try: + # We remove the current version of the Google Play services SDK. + if os.path.exists(paths.package): + shutil.rmtree(paths.package) + os.makedirs(paths.package) + + logging.debug('Extracting the library to %s', paths.lib) + with zipfile.ZipFile(new_lib_zip, "r") as new_lib_zip_file: + new_lib_zip_file.extractall(paths.lib) + + logging.debug('Copying %s to %s', new_license, paths.license) + shutil.copy(new_license, paths.license) + + logging.debug('Copying %s to %s', new_lib_zip_sha1, paths.lib_zip_sha1) + shutil.copy(new_lib_zip_sha1, paths.lib_zip_sha1) + + logging.info('Update complete.') + + except Exception as e: # pylint: disable=broad-except + logging.error('Failed updating the Google Play Services library. ' + 'An error occurred while installing the new version in ' + 'the SDK directory: %s ', e) + return -3 + finally: + shutil.rmtree(tmp_root) + + return 0 + + +def UpdateSdk(args): + ''' + Uses the Android SDK Manager to download the latest Google Play services SDK + locally. Its usual installation path is + //third_party/android_tools/sdk/extras/google/google_play_services + ''' + + # This should function should not run on bots and could fail for many user + # and setup related reasons. Also, exceptions here are not caught, so we + # disable breakpad to avoid spamming the logs. + breakpad.IS_ENABLED = False + + sdk_manager = os.path.join(args.sdk_root, 'tools', 'android') + cmd = [sdk_manager, 'update', 'sdk', '--no-ui', '--filter', GMS_PACKAGE_ID] + cmd_helper.Call(cmd) + # If no update is needed, it still returns successfully so we just do nothing + + return 0 + + +def Upload(args): + ''' + Uploads the library from the local Google Play services SDK to a Google Cloud + storage bucket. + + By default, a local commit will be made at the end of the operation. + ''' + + # This should function should not run on bots and could fail for many user + # and setup related reasons. Also, exceptions here are not caught, so we + # disable breakpad to avoid spamming the logs. + breakpad.IS_ENABLED = False + + config = utils.ConfigParser(args.config) + paths = PlayServicesPaths(args.sdk_root, config.version_xml_path) + + if not args.skip_git and utils.IsRepoDirty(host_paths.DIR_SOURCE_ROOT): + logging.error('The repo is dirty. Please commit or stash your changes.') + return -1 + + new_version_number = utils.GetVersionNumberFromLibraryResources( + paths.version_xml) + logging.debug('comparing versions: new=%d, old=%s', + new_version_number, config.version_number) + if new_version_number <= config.version_number and not args.force: + logging.info('The checked in version of the library is already the latest ' + 'one. No update is needed. Please rerun with --force to skip ' + 'this check.') + return 0 + + tmp_root = tempfile.mkdtemp() + try: + new_lib_zip = os.path.join(tmp_root, ZIP_FILE_NAME) + new_license = os.path.join(tmp_root, LICENSE_FILE_NAME) + + # need to strip '.zip' from the file name here + shutil.make_archive(new_lib_zip[:-4], 'zip', paths.lib) + _ExtractLicenseFile(new_license, paths.source_prop) + + bucket_path = _VerifyBucketPathFormat(args.bucket, new_version_number, + args.dry_run) + files_to_upload = [new_lib_zip, new_license] + logging.debug('Uploading %s to %s', files_to_upload, bucket_path) + _UploadToBucket(bucket_path, files_to_upload, args.dry_run) + + new_lib_zip_sha1 = os.path.join(SHA1_DIRECTORY, + ZIP_FILE_NAME + '.sha1') + new_license_sha1 = os.path.join(SHA1_DIRECTORY, + LICENSE_FILE_NAME + '.sha1') + shutil.copy(new_lib_zip + '.sha1', new_lib_zip_sha1) + shutil.copy(new_license + '.sha1', new_license_sha1) + finally: + shutil.rmtree(tmp_root) + + config.UpdateVersionNumber(new_version_number) + + if not args.skip_git: + commit_message = ('Update the Google Play services dependency to %s\n' + '\n') % new_version_number + utils.MakeLocalCommit(host_paths.DIR_SOURCE_ROOT, + [new_lib_zip_sha1, new_license_sha1, config.path], + commit_message) + + return 0 + + +def _DownloadFromBucket(bucket_path, sha1_file, destination, verbose, + is_dry_run): + '''Downloads the file designated by the provided sha1 from a cloud bucket.''' + + download_from_google_storage.download_from_google_storage( + input_filename=sha1_file, + base_url=bucket_path, + gsutil=_InitGsutil(is_dry_run), + num_threads=1, + directory=None, + recursive=False, + force=False, + output=destination, + ignore_errors=False, + sha1_file=sha1_file, + verbose=verbose, + auto_platform=True, + extract=False) + + +def _UploadToBucket(bucket_path, files_to_upload, is_dry_run): + '''Uploads the files designated by the provided paths to a cloud bucket. ''' + + upload_to_google_storage.upload_to_google_storage( + input_filenames=files_to_upload, + base_url=bucket_path, + gsutil=_InitGsutil(is_dry_run), + force=False, + use_md5=False, + num_threads=1, + skip_hashing=False, + gzip=None) + + +def _InitGsutil(is_dry_run): + '''Initialize the Gsutil object as regular or dummy version for dry runs. ''' + + if is_dry_run: + return DummyGsutil() + else: + return download_from_google_storage.Gsutil( + download_from_google_storage.GSUTIL_DEFAULT_PATH) + + +def _ExtractLicenseFile(license_path, prop_file_path): + with open(prop_file_path, 'r') as prop_file: + prop_file_content = prop_file.read() + + match = LICENSE_PATTERN.search(prop_file_content) + if not match: + raise AttributeError('The license was not found in ' + + os.path.abspath(prop_file_path)) + + with open(license_path, 'w') as license_file: + license_file.write(match.group('text')) + + +def _CheckLicenseAgreement(expected_license_path, actual_license_path, + version_number): + ''' + Checks that the new license is the one already accepted by the user. If it + isn't, it prompts the user to accept it. Returns whether the expected license + has been accepted. + ''' + + if utils.FileEquals(expected_license_path, actual_license_path): + return True + + with open(expected_license_path) as license_file: + # Uses plain print rather than logging to make sure this is not formatted + # by the logger. + print ('Updating the Google Play services SDK to ' + 'version %d.' % version_number) + + # The output is buffered when running as part of gclient hooks. We split + # the text here and flush is explicitly to avoid having part of it dropped + # out. + # Note: text contains *escaped* new lines, so we split by '\\n', not '\n'. + for license_part in license_file.read().split('\\n'): + print license_part + sys.stdout.flush() + + # Need to put the prompt on a separate line otherwise the gclient hook buffer + # only prints it after we received an input. + print 'Do you accept the license? [y/n]: ' + sys.stdout.flush() + return raw_input('> ') in ('Y', 'y') + + +def _IsBotEnvironment(): + return bool(os.environ.get('CHROME_HEADLESS')) + + +def _VerifyBucketPathFormat(bucket_name, version_number, is_dry_run): + ''' + Formats and checks the download/upload path depending on whether we are + running in dry run mode or not. Returns a supposedly safe path to use with + Gsutil. + ''' + + if is_dry_run: + bucket_path = os.path.abspath(os.path.join(bucket_name, + str(version_number))) + if not os.path.isdir(bucket_path): + os.makedirs(bucket_path) + else: + if bucket_name.startswith('gs://'): + # We enforce the syntax without gs:// for consistency with the standalone + # download/upload scripts and to make dry run transition easier. + raise AttributeError('Please provide the bucket name without the gs:// ' + 'prefix (e.g. %s)' % GMS_CLOUD_STORAGE) + bucket_path = 'gs://%s/%d' % (bucket_name, version_number) + + return bucket_path + + +class PlayServicesPaths(object): + ''' + Describes the different paths to be used in the update process. + + Filesystem hierarchy | Exposed property / notes + ---------------------------------------------------|------------------------- + [sdk_root] | sdk_root / (1) + +- extras | + +- google | + +- google_play_services | package / (2) + +- source.properties | source_prop / (3) + +- LICENSE | license / (4) + +- google_play_services_library.zip.sha1 | lib_zip_sha1 / (5) + +- libproject | + +- google-play-services_lib | lib / (6) + +- res | + +- values | + +- version.xml | version_xml (7) + + Notes: + + 1. sdk_root: Path provided as a parameter to the script (--sdk_root) + 2. package: This directory contains the Google Play services SDK itself. + When downloaded via the Android SDK manager, it will contain, + documentation, samples and other files in addition to the library. When + the update script downloads the library from our cloud storage, it is + cleared. + 3. source_prop: File created by the Android SDK manager that contains + the package information, such as the version info and the license. + 4. license: File created by the update script. Contains the license accepted + by the user. + 5. lib_zip_sha1: sha1 of the library zip that has been installed by the + update script. It is compared with the one required by the config file to + check if an update is necessary. + 6. lib: Contains the library itself: jar and resources. This is what is + downloaded from the cloud storage. + 7. version_xml: File that contains the exact Google Play services library + version, the one that we track. The version looks like 811500, is used in + the code and the on-device APK, as opposed to the SDK package version + which looks like 27.0.0 and is used only by the Android SDK manager. + + ''' + + def __init__(self, sdk_root, version_xml_path): + relative_package = os.path.join('extras', 'google', 'google_play_services') + relative_lib = os.path.join(relative_package, 'libproject', + 'google-play-services_lib') + self.sdk_root = sdk_root + + self.package = os.path.join(sdk_root, relative_package) + self.lib_zip_sha1 = os.path.join(self.package, ZIP_FILE_NAME + '.sha1') + self.license = os.path.join(self.package, LICENSE_FILE_NAME) + self.source_prop = os.path.join(self.package, 'source.properties') + + self.lib = os.path.join(sdk_root, relative_lib) + self.version_xml = os.path.join(self.lib, version_xml_path) + + +class DummyGsutil(download_from_google_storage.Gsutil): + ''' + Class that replaces Gsutil to use a local directory instead of an online + bucket. It relies on the fact that Gsutil commands are very similar to shell + ones, so for the ones used here (ls, cp), it works to just use them with a + local directory. + ''' + + def __init__(self): + super(DummyGsutil, self).__init__( + download_from_google_storage.GSUTIL_DEFAULT_PATH) + + def call(self, *args): + logging.debug('Calling command "%s"', str(args)) + return cmd_helper.GetCmdStatusOutputAndError(args) + + def check_call(self, *args): + logging.debug('Calling command "%s"', str(args)) + return cmd_helper.GetCmdStatusOutputAndError(args) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/play_services/update_test.py b/build/android/play_services/update_test.py new file mode 100644 index 00000000000..fd6815405f7 --- /dev/null +++ b/build/android/play_services/update_test.py @@ -0,0 +1,416 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +'''Unittests for update.py. + +They set up a temporary directory that is used to mock a bucket, the directory +containing the configuration files and the android sdk directory. + +Tests run the script with various inputs and check the status of the filesystem +''' + +import shutil +import tempfile +import unittest +import os +import sys +import zipfile +import contextlib + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from play_services import update + + +class TestFunctions(unittest.TestCase): + DEFAULT_CONFIG_VERSION = 42 + DEFAULT_LICENSE = 'Default License' + DEFAULT_ZIP_SHA1 = 'zip0and0filling0to0forty0chars0000000000' + + def __init__(self, *args, **kwargs): + super(TestFunctions, self).__init__(*args, **kwargs) + self.paths = None # Initialized in SetUpWorkdir + self.workdir = None # Initialized in setUp + + #override + def setUp(self): + self.workdir = tempfile.mkdtemp() + + #override + def tearDown(self): + shutil.rmtree(self.workdir) + self.workdir = None + + def testUpload(self): + version = 1337 + self.SetUpWorkdir( + xml_version=version, + gms_lib=True, + source_prop=True) + + status = update.main([ + 'upload', + '--dry-run', + '--skip-git', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root + ]) + self.assertEqual(status, 0, 'the command should have succeeded.') + + # bucket should contain license, name = license.sha1 + self.assertTrue(os.path.isfile(self.paths.config_license_sha1)) + license_sha1 = _GetFileContent(self.paths.config_license_sha1) + bucket_license = os.path.join(self.paths.bucket, str(version), + license_sha1) + self.assertTrue(os.path.isfile(bucket_license)) + self.assertEqual(_GetFileContent(bucket_license), self.DEFAULT_LICENSE) + + # bucket should contain zip, name = zip.sha1 + self.assertTrue(os.path.isfile(self.paths.config_zip_sha1)) + bucket_zip = os.path.join(self.paths.bucket, str(version), + _GetFileContent(self.paths.config_zip_sha1)) + self.assertTrue(os.path.isfile(bucket_zip)) + + # unzip, should contain expected files + with zipfile.ZipFile(bucket_zip, "r") as bucket_zip_file: + self.assertEqual(bucket_zip_file.namelist(), + ['dummy_file', 'res/values/version.xml']) + + def testUploadAlreadyLatestVersion(self): + self.SetUpWorkdir( + xml_version=self.DEFAULT_CONFIG_VERSION, + gms_lib=True, + source_prop=True) + + status = update.main([ + 'upload', + '--dry-run', + '--skip-git', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + self.assertEqual(status, 0, 'the command should have succeeded.') + + # bucket should be empty + self.assertFalse(os.listdir(self.paths.bucket)) + self.assertFalse(os.path.isfile(self.paths.config_license_sha1)) + self.assertFalse(os.path.isfile(self.paths.config_zip_sha1)) + + def testDownload(self): + self.SetUpWorkdir(populate_bucket=True) + + with _MockedInput('y'): + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # sdk_root should contain zip contents, zip sha1, license + self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertTrue(os.path.isfile(self.paths.gms_root_sha1)) + self.assertTrue(os.path.isfile(self.paths.gms_root_license)) + self.assertEquals(_GetFileContent(self.paths.gms_root_license), + self.DEFAULT_LICENSE) + + def testDownloadBot(self): + self.SetUpWorkdir(populate_bucket=True, bot_env=True) + + # No need to type 'y' on bots + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # sdk_root should contain zip contents, zip sha1, license + self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertTrue(os.path.isfile(self.paths.gms_root_sha1)) + self.assertTrue(os.path.isfile(self.paths.gms_root_license)) + self.assertEquals(_GetFileContent(self.paths.gms_root_license), + self.DEFAULT_LICENSE) + + def testDownloadAlreadyUpToDate(self): + self.SetUpWorkdir( + populate_bucket=True, + existing_zip_sha1=self.DEFAULT_ZIP_SHA1) + + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # there should not be new files downloaded to sdk_root + self.assertFalse(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertFalse(os.path.isfile(self.paths.gms_root_license)) + + def testDownloadAcceptedLicense(self): + self.SetUpWorkdir( + populate_bucket=True, + existing_license=self.DEFAULT_LICENSE) + + # License already accepted, no need to type + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # sdk_root should contain zip contents, zip sha1, license + self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertTrue(os.path.isfile(self.paths.gms_root_sha1)) + self.assertTrue(os.path.isfile(self.paths.gms_root_license)) + self.assertEquals(_GetFileContent(self.paths.gms_root_license), + self.DEFAULT_LICENSE) + + def testDownloadNewLicense(self): + self.SetUpWorkdir( + populate_bucket=True, + existing_license='Old license') + + with _MockedInput('y'): + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # sdk_root should contain zip contents, zip sha1, NEW license + self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertTrue(os.path.isfile(self.paths.gms_root_sha1)) + self.assertTrue(os.path.isfile(self.paths.gms_root_license)) + self.assertEquals(_GetFileContent(self.paths.gms_root_license), + self.DEFAULT_LICENSE) + + def testDownloadRefusedLicense(self): + self.SetUpWorkdir( + populate_bucket=True, + existing_license='Old license') + + with _MockedInput('n'): + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', self.paths.sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + + # there should not be new files downloaded to sdk_root + self.assertFalse(os.path.isfile(os.path.join(self.paths.gms_lib, + 'dummy_file'))) + self.assertEquals(_GetFileContent(self.paths.gms_root_license), + 'Old license') + + def testDownloadNoAndroidSDK(self): + self.SetUpWorkdir( + populate_bucket=True, + existing_license='Old license') + + non_existing_sdk_root = os.path.join(self.workdir, 'non_existing_sdk_root') + # Should not run, no typing needed + status = update.main([ + 'download', + '--dry-run', + '--bucket', self.paths.bucket, + '--config', self.paths.config_file, + '--sdk-root', non_existing_sdk_root, + ]) + + self.assertEqual(status, 0, 'the command should have succeeded.') + self.assertFalse(os.path.isdir(non_existing_sdk_root)) + + def SetUpWorkdir(self, + bot_env=False, + config_version=DEFAULT_CONFIG_VERSION, + existing_license=None, + existing_zip_sha1=None, + gms_lib=False, + populate_bucket=False, + source_prop=None, + xml_version=None): + '''Prepares workdir by putting it in the specified state + + Args: + - general + bot_env: sets or unsets CHROME_HEADLESS + + - bucket + populate_bucket: boolean. Populate the bucket with a zip and license + file. The sha1s will be copied to the config directory + + - config + config_version: number. Version of the current SDK. Defaults to + `self.DEFAULT_CONFIG_VERSION` + + - sdk_root + existing_license: string. Create a LICENSE file setting the specified + text as content of the currently accepted license. + existing_zip_sha1: string. Create a sha1 file setting the specified + hash as hash of the SDK supposed to be installed + gms_lib: boolean. Create a dummy file in the location of the play + services SDK. + source_prop: boolean. Create a source.properties file that contains + the license to upload. + xml_version: number. Create a version.xml file with the specified + version that is used when uploading + ''' + self.paths = Paths(self.workdir) + + # Create the main directories + _MakeDirs(self.paths.sdk_root) + _MakeDirs(self.paths.config_dir) + _MakeDirs(self.paths.bucket) + + # is not configured via argument. + update.SHA1_DIRECTORY = self.paths.config_dir + + os.environ['CHROME_HEADLESS'] = '1' if bot_env else '' + + if config_version: + _MakeDirs(os.path.dirname(self.paths.config_file)) + with open(self.paths.config_file, 'w') as stream: + stream.write(('{"version_number":%d,' + '"version_xml_path": "res/values/version.xml"}' + '\n') % config_version) + + if existing_license: + _MakeDirs(self.paths.gms_root) + with open(self.paths.gms_root_license, 'w') as stream: + stream.write(existing_license) + + if existing_zip_sha1: + _MakeDirs(self.paths.gms_root) + with open(self.paths.gms_root_sha1, 'w') as stream: + stream.write(existing_zip_sha1) + + if gms_lib: + _MakeDirs(self.paths.gms_lib) + with open(os.path.join(self.paths.gms_lib, 'dummy_file'), 'w') as stream: + stream.write('foo\n') + + if source_prop: + _MakeDirs(os.path.dirname(self.paths.source_prop)) + with open(self.paths.source_prop, 'w') as stream: + stream.write('Foo=Bar\n' + 'Pkg.License=%s\n' + 'Baz=Fizz\n' % self.DEFAULT_LICENSE) + + if populate_bucket: + _MakeDirs(self.paths.config_dir) + bucket_dir = os.path.join(self.paths.bucket, str(config_version)) + _MakeDirs(bucket_dir) + + # TODO(dgn) should we use real sha1s? comparison with the real sha1 is + # done but does not do anything other than displaying a message. + config_license_sha1 = 'license0and0filling0to0forty0chars000000' + with open(self.paths.config_license_sha1, 'w') as stream: + stream.write(config_license_sha1) + + with open(os.path.join(bucket_dir, config_license_sha1), 'w') as stream: + stream.write(self.DEFAULT_LICENSE) + + config_zip_sha1 = self.DEFAULT_ZIP_SHA1 + with open(self.paths.config_zip_sha1, 'w') as stream: + stream.write(config_zip_sha1) + + pre_zip_lib = os.path.join(self.workdir, 'pre_zip_lib') + post_zip_lib = os.path.join(bucket_dir, config_zip_sha1) + _MakeDirs(pre_zip_lib) + with open(os.path.join(pre_zip_lib, 'dummy_file'), 'w') as stream: + stream.write('foo\n') + shutil.make_archive(post_zip_lib, 'zip', pre_zip_lib) + # make_archive appends .zip + shutil.move(post_zip_lib + '.zip', post_zip_lib) + + if xml_version: + _MakeDirs(os.path.dirname(self.paths.xml_version)) + with open(self.paths.xml_version, 'w') as stream: + stream.write( + '\n' + '\n' + ' %d\n' + '\n' % xml_version) + + +class Paths(object): + '''Declaration of the paths commonly manipulated in the tests.''' + + def __init__(self, workdir): + self.bucket = os.path.join(workdir, 'bucket') + + self.config_dir = os.path.join(workdir, 'config') + self.config_file = os.path.join(self.config_dir, 'config.json') + self.config_license_sha1 = os.path.join(self.config_dir, 'LICENSE.sha1') + self.config_zip_sha1 = os.path.join( + self.config_dir, + 'google_play_services_library.zip.sha1') + + self.sdk_root = os.path.join(workdir, 'sdk_root') + self.gms_root = os.path.join(self.sdk_root, 'extras', 'google', + 'google_play_services') + self.gms_root_sha1 = os.path.join(self.gms_root, + 'google_play_services_library.zip.sha1') + self.gms_root_license = os.path.join(self.gms_root, 'LICENSE') + self.source_prop = os.path.join(self.gms_root, 'source.properties') + self.gms_lib = os.path.join(self.gms_root, 'libproject', + 'google-play-services_lib') + self.xml_version = os.path.join(self.gms_lib, 'res', 'values', + 'version.xml') + + +def _GetFileContent(file_path): + with open(file_path, 'r') as stream: + return stream.read() + + +def _MakeDirs(path): + '''Avoids having to do the error handling everywhere.''' + if not os.path.exists(path): + os.makedirs(path) + + +@contextlib.contextmanager +def _MockedInput(typed_string): + '''Makes raw_input return |typed_string| while inside the context.''' + try: + original_raw_input = __builtins__.raw_input + __builtins__.raw_input = lambda _: typed_string + yield + finally: + __builtins__.raw_input = original_raw_input + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/play_services/utils.py b/build/android/play_services/utils.py new file mode 100644 index 00000000000..acc6cf45096 --- /dev/null +++ b/build/android/play_services/utils.py @@ -0,0 +1,170 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +''' +Utility functions for all things related to manipulating google play services +related files. +''' + +import argparse +import filecmp +import json +import logging +import os +import re +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from devil.utils import cmd_helper + + +_XML_VERSION_NUMBER_PATTERN = re.compile( + r'(\d+)<\/integer>') + + +class DefaultsRawHelpFormatter(argparse.ArgumentDefaultsHelpFormatter, + argparse.RawDescriptionHelpFormatter): + ''' + Combines the features of RawDescriptionHelpFormatter and + ArgumentDefaultsHelpFormatter, providing defaults for the arguments and raw + text for the description. + ''' + pass + + +class ConfigParser(object): + '''Reads and writes the configuration files for play services related scripts + + The configuration files are JSON files. Here is the data they are expected + to contain: + + - version_number + Number. Mirrors @integer/google_play_services_version from the library. + Example: 815000 + + - sdk_version + Version of the Play Services SDK to retrieve, when preprocessing the + library from a maven/gradle repository. + Example: "8.1.0" + + - clients + List of strings. Name of the clients (or play services modules) to + include when preprocessing the library. + Example: ["play-services-base", "play-services-cast"] + + - version_xml_path + String. Path to the version.xml string describing the current version. + Should be relative to the library base directory + Example: "res/values/version.xml" + + - locale_whitelist + List of strings. List of locales to keep from the resources. Can be + obtained by generating an android build and looking at the content of + `out/Debug/gen/chrome/java/res`; or looking at the android section in + `//chrome/app/generated_resources.grd` + Example: ["am", "ar", "bg", "ca", "cs"] + + - resource_whitelist + List of strings. List of resource files to explicitely keep in the final + output. Use it to keep drawables for example, as we currently remove them + all. + Example: ["play-services-base/res/drawables/foobar.xml"] + ''' + _VERSION_NUMBER_KEY = 'version_number' + + def __init__(self, path): + self.path = path + self._data = {} + + with open(path, 'r') as stream: + self._data = json.load(stream) + + @property + def version_number(self): + return self._data.get(self._VERSION_NUMBER_KEY) + + @property + def sdk_version(self): + return self._data.get('sdk_version') + + @property + def clients(self): + return self._data.get('clients') or [] + + @property + def version_xml_path(self): + return self._data.get('version_xml_path') + + @property + def locale_whitelist(self): + return self._data.get('locale_whitelist') or [] + + @property + def resource_whitelist(self): + return self._data.get('resource_whitelist') or [] + + def UpdateVersionNumber(self, new_version_number): + '''Updates the version number and saves it in the configuration file. ''' + + with open(self.path, 'w') as stream: + self._data[self._VERSION_NUMBER_KEY] = new_version_number + stream.write(DumpTrimmedJson(self._data)) + + +def DumpTrimmedJson(json_data): + ''' + Default formatting when dumping json to string has trailing spaces and lacks + a new line at the end. This function fixes that. + ''' + + out = json.dumps(json_data, sort_keys=True, indent=2) + out = out.replace(' ' + os.linesep, os.linesep) + return out + os.linesep + + +def FileEquals(expected_file, actual_file): + ''' + Returns whether the two files are equal. Returns False if any of the files + doesn't exist. + ''' + + if not os.path.isfile(actual_file) or not os.path.isfile(expected_file): + return False + return filecmp.cmp(expected_file, actual_file) + + +def IsRepoDirty(repo_root): + '''Returns True if there are no staged or modified files, False otherwise.''' + + # diff-index returns 1 if there are staged changes or modified files, + # 0 otherwise + cmd = ['git', 'diff-index', '--quiet', 'HEAD'] + return cmd_helper.Call(cmd, cwd=repo_root) == 1 + + +def GetVersionNumberFromLibraryResources(version_xml): + ''' + Extracts a Google Play services version number from its version.xml file. + ''' + + with open(version_xml, 'r') as version_file: + version_file_content = version_file.read() + + match = _XML_VERSION_NUMBER_PATTERN.search(version_file_content) + if not match: + raise AttributeError('A value for google_play_services_version was not ' + 'found in ' + version_xml) + return int(match.group(1)) + + +def MakeLocalCommit(repo_root, files_to_commit, message): + '''Makes a local git commit.''' + + logging.debug('Staging files (%s) for commit.', files_to_commit) + if cmd_helper.Call(['git', 'add'] + files_to_commit, cwd=repo_root) != 0: + raise Exception('The local commit failed.') + + logging.debug('Committing.') + if cmd_helper.Call(['git', 'commit', '-m', message], cwd=repo_root) != 0: + raise Exception('The local commit failed.') diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py new file mode 100644 index 00000000000..d079edeeb5c --- /dev/null +++ b/build/android/provision_devices.py @@ -0,0 +1,559 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provisions Android devices with settings required for bots. + +Usage: + ./provision_devices.py [-d ] +""" + +import argparse +import datetime +import json +import logging +import os +import posixpath +import re +import subprocess +import sys +import time + +# Import _strptime before threaded code. datetime.datetime.strptime is +# threadsafe except for the initial import of the _strptime module. +# See crbug.com/584730 and https://bugs.python.org/issue7980. +import _strptime # pylint: disable=unused-import + +import devil_chromium +from devil.android import battery_utils +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import device_utils +from devil.android.sdk import keyevent +from devil.android.sdk import version_codes +from devil.constants import exit_codes +from devil.utils import run_tests_helper +from devil.utils import timeout_retry +from pylib import constants +from pylib import device_settings +from pylib.constants import host_paths + +_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle'] +_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*') +_TOMBSTONE_REGEX = re.compile('tombstone.*') + + +class _DEFAULT_TIMEOUTS(object): + # L can take a while to reboot after a wipe. + LOLLIPOP = 600 + PRE_LOLLIPOP = 180 + + HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP) + + +class _PHASES(object): + WIPE = 'wipe' + PROPERTIES = 'properties' + FINISH = 'finish' + + ALL = [WIPE, PROPERTIES, FINISH] + + +def ProvisionDevices(args): + blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + devices = [d for d in device_utils.DeviceUtils.HealthyDevices(blacklist) + if not args.emulators or d.adb.is_emulator] + if args.device: + devices = [d for d in devices if d == args.device] + if not devices: + raise device_errors.DeviceUnreachableError(args.device) + parallel_devices = device_utils.DeviceUtils.parallel(devices) + if args.emulators: + parallel_devices.pMap(SetProperties, args) + else: + parallel_devices.pMap(ProvisionDevice, blacklist, args) + if args.auto_reconnect: + _LaunchHostHeartbeat() + blacklisted_devices = blacklist.Read() if blacklist else [] + if args.output_device_blacklist: + with open(args.output_device_blacklist, 'w') as f: + json.dump(blacklisted_devices, f) + if all(d in blacklisted_devices for d in devices): + raise device_errors.NoDevicesError + return 0 + + +def ProvisionDevice(device, blacklist, options): + def should_run_phase(phase_name): + return not options.phases or phase_name in options.phases + + def run_phase(phase_func, reboot_timeout, reboot=True): + try: + device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0) + except device_errors.CommandTimeoutError: + logging.error('Device did not finish booting. Will try to reboot.') + device.Reboot(timeout=reboot_timeout) + phase_func(device, options) + if reboot: + device.Reboot(False, retries=0) + device.adb.WaitForDevice() + + try: + if options.reboot_timeout: + reboot_timeout = options.reboot_timeout + elif device.build_version_sdk >= version_codes.LOLLIPOP: + reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP + else: + reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP + + if should_run_phase(_PHASES.WIPE): + if (options.chrome_specific_wipe or device.IsUserBuild() or + device.build_version_sdk >= version_codes.MARSHMALLOW): + run_phase(WipeChromeData, reboot_timeout) + else: + run_phase(WipeDevice, reboot_timeout) + + if should_run_phase(_PHASES.PROPERTIES): + run_phase(SetProperties, reboot_timeout) + + if should_run_phase(_PHASES.FINISH): + run_phase(FinishProvisioning, reboot_timeout, reboot=False) + + if options.chrome_specific_wipe: + package = "com.google.android.gms" + version_name = device.GetApplicationVersion(package) + logging.info("Version name for %s is %s", package, version_name) + + CheckExternalStorage(device) + + except device_errors.CommandTimeoutError: + logging.exception('Timed out waiting for device %s. Adding to blacklist.', + str(device)) + if blacklist: + blacklist.Extend([str(device)], reason='provision_timeout') + + except device_errors.CommandFailedError: + logging.exception('Failed to provision device %s. Adding to blacklist.', + str(device)) + if blacklist: + blacklist.Extend([str(device)], reason='provision_failure') + +def CheckExternalStorage(device): + """Checks that storage is writable and if not makes it writable. + + Arguments: + device: The device to check. + """ + try: + with device_temp_file.DeviceTempFile( + device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f: + device.WriteFile(f.name, 'test') + except device_errors.CommandFailedError: + logging.info('External storage not writable. Remounting / as RW') + device.RunShellCommand(['mount', '-o', 'remount,rw', '/'], + check_return=True, as_root=True) + device.EnableRoot() + with device_temp_file.DeviceTempFile( + device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f: + device.WriteFile(f.name, 'test') + +def WipeChromeData(device, options): + """Wipes chrome specific data from device + + (1) uninstall any app whose name matches *chrom*, except + com.android.chrome, which is the chrome stable package. Doing so also + removes the corresponding dirs under /data/data/ and /data/app/ + (2) remove any dir under /data/app-lib/ whose name matches *chrom* + (3) remove any files under /data/tombstones/ whose name matches "tombstone*" + (4) remove /data/local.prop if there is any + (5) remove /data/local/chrome-command-line if there is any + (6) remove anything under /data/local/.config/ if the dir exists + (this is telemetry related) + (7) remove anything under /data/local/tmp/ + + Arguments: + device: the device to wipe + """ + if options.skip_wipe: + return + + try: + if device.IsUserBuild(): + _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX, + constants.PACKAGE_INFO['chrome_stable'].package) + device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(), + check_return=True) + device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True) + else: + device.EnableRoot() + _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX, + constants.PACKAGE_INFO['chrome_stable'].package) + _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX) + _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX) + + _WipeFileOrDir(device, '/data/local.prop') + _WipeFileOrDir(device, '/data/local/chrome-command-line') + _WipeFileOrDir(device, '/data/local/.config/') + _WipeFileOrDir(device, '/data/local/tmp/') + device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(), + check_return=True) + except device_errors.CommandFailedError: + logging.exception('Possible failure while wiping the device. ' + 'Attempting to continue.') + + +def WipeDevice(device, options): + """Wipes data from device, keeping only the adb_keys for authorization. + + After wiping data on a device that has been authorized, adb can still + communicate with the device, but after reboot the device will need to be + re-authorized because the adb keys file is stored in /data/misc/adb/. + Thus, adb_keys file is rewritten so the device does not need to be + re-authorized. + + Arguments: + device: the device to wipe + """ + if options.skip_wipe: + return + + try: + device.EnableRoot() + device_authorized = device.FileExists(constants.ADB_KEYS_FILE) + if device_authorized: + adb_keys = device.ReadFile(constants.ADB_KEYS_FILE, + as_root=True).splitlines() + device.RunShellCommand(['wipe', 'data'], + as_root=True, check_return=True) + device.adb.WaitForDevice() + + if device_authorized: + adb_keys_set = set(adb_keys) + for adb_key_file in options.adb_key_files or []: + try: + with open(adb_key_file, 'r') as f: + adb_public_keys = f.readlines() + adb_keys_set.update(adb_public_keys) + except IOError: + logging.warning('Unable to find adb keys file %s.', adb_key_file) + _WriteAdbKeysFile(device, '\n'.join(adb_keys_set)) + except device_errors.CommandFailedError: + logging.exception('Possible failure while wiping the device. ' + 'Attempting to continue.') + + +def _WriteAdbKeysFile(device, adb_keys_string): + dir_path = posixpath.dirname(constants.ADB_KEYS_FILE) + device.RunShellCommand(['mkdir', '-p', dir_path], + as_root=True, check_return=True) + device.RunShellCommand(['restorecon', dir_path], + as_root=True, check_return=True) + device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True) + device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE], + as_root=True, check_return=True) + + +def SetProperties(device, options): + try: + device.EnableRoot() + except device_errors.CommandFailedError as e: + logging.warning(str(e)) + + if not device.IsUserBuild(): + _ConfigureLocalProperties(device, options.enable_java_debug) + else: + logging.warning('Cannot configure properties in user builds.') + device_settings.ConfigureContentSettings( + device, device_settings.DETERMINISTIC_DEVICE_SETTINGS) + if options.disable_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_LOCATION_SETTINGS) + + if options.disable_mock_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS) + + device_settings.SetLockScreenSettings(device) + if options.disable_network: + device_settings.ConfigureContentSettings( + device, device_settings.NETWORK_DISABLED_SETTINGS) + if device.build_version_sdk >= version_codes.MARSHMALLOW: + # Ensure that NFC is also switched off. + device.RunShellCommand(['svc', 'nfc', 'disable'], + as_root=True, check_return=True) + + if options.disable_system_chrome: + # The system chrome version on the device interferes with some tests. + device.RunShellCommand(['pm', 'disable', 'com.android.chrome'], + check_return=True) + + if options.remove_system_webview: + if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS): + logging.info('System WebView exists and needs to be removed') + if device.HasRoot(): + # Disabled Marshmallow's Verity security feature + if device.build_version_sdk >= version_codes.MARSHMALLOW: + device.adb.DisableVerity() + device.Reboot() + device.WaitUntilFullyBooted() + device.EnableRoot() + + # This is required, e.g., to replace the system webview on a device. + device.adb.Remount() + device.RunShellCommand(['stop'], check_return=True) + device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS, + check_return=True) + device.RunShellCommand(['start'], check_return=True) + else: + logging.warning('Cannot remove system webview from a non-rooted device') + else: + logging.info('System WebView already removed') + + # Some device types can momentarily disappear after setting properties. + device.adb.WaitForDevice() + + +def _ConfigureLocalProperties(device, java_debug=True): + """Set standard readonly testing device properties prior to reboot.""" + local_props = [ + 'persist.sys.usb.config=adb', + 'ro.monkey=1', + 'ro.test_harness=1', + 'ro.audio.silent=1', + 'ro.setupwizard.mode=DISABLED', + ] + if java_debug: + local_props.append( + '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY) + local_props.append('debug.checkjni=1') + try: + device.WriteFile( + device.LOCAL_PROPERTIES_PATH, + '\n'.join(local_props), as_root=True) + # Android will not respect the local props file if it is world writable. + device.RunShellCommand( + ['chmod', '644', device.LOCAL_PROPERTIES_PATH], + as_root=True, check_return=True) + except device_errors.CommandFailedError: + logging.exception('Failed to configure local properties.') + + +def FinishProvisioning(device, options): + # The lockscreen can't be disabled on user builds, so send a keyevent + # to unlock it. + if device.IsUserBuild(): + device.SendKeyEvent(keyevent.KEYCODE_MENU) + + if options.min_battery_level is not None: + battery = battery_utils.BatteryUtils(device) + try: + battery.ChargeDeviceToLevel(options.min_battery_level) + except device_errors.DeviceChargingError: + device.Reboot() + battery.ChargeDeviceToLevel(options.min_battery_level) + + if options.max_battery_temp is not None: + try: + battery = battery_utils.BatteryUtils(device) + battery.LetBatteryCoolToTemperature(options.max_battery_temp) + except device_errors.CommandFailedError: + logging.exception('Unable to let battery cool to specified temperature.') + + def _set_and_verify_date(): + if device.build_version_sdk >= version_codes.MARSHMALLOW: + date_format = '%m%d%H%M%Y.%S' + set_date_command = ['date', '-u'] + get_date_command = ['date', '-u'] + else: + date_format = '%Y%m%d.%H%M%S' + set_date_command = ['date', '-s'] + get_date_command = ['date'] + + # TODO(jbudorick): This is wrong on pre-M devices -- get/set are + # dealing in local time, but we're setting based on GMT. + strgmtime = time.strftime(date_format, time.gmtime()) + set_date_command.append(strgmtime) + device.RunShellCommand(set_date_command, as_root=True, check_return=True) + + get_date_command.append('+"%Y%m%d.%H%M%S"') + device_time = device.RunShellCommand( + get_date_command, as_root=True, single_line=True).replace('"', '') + device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S") + correct_time = datetime.datetime.strptime(strgmtime, date_format) + tdelta = (correct_time - device_time).seconds + if tdelta <= 1: + logging.info('Date/time successfully set on %s', device) + return True + else: + logging.error('Date mismatch. Device: %s Correct: %s', + device_time.isoformat(), correct_time.isoformat()) + return False + + # Sometimes the date is not set correctly on the devices. Retry on failure. + if device.IsUserBuild(): + # TODO(bpastene): Figure out how to set the date & time on user builds. + pass + else: + if not timeout_retry.WaitFor( + _set_and_verify_date, wait_period=1, max_tries=2): + raise device_errors.CommandFailedError( + 'Failed to set date & time.', device_serial=str(device)) + + props = device.RunShellCommand('getprop', check_return=True) + for prop in props: + logging.info(' %s', prop) + if options.auto_reconnect: + _PushAndLaunchAdbReboot(device, options.target) + + +def _UninstallIfMatch(device, pattern, app_to_keep): + installed_packages = device.RunShellCommand(['pm', 'list', 'packages']) + installed_system_packages = [ + pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list', + 'packages', '-s'])] + for package_output in installed_packages: + package = package_output.split(":")[1] + if pattern.match(package) and not package == app_to_keep: + if not device.IsUserBuild() or package not in installed_system_packages: + device.Uninstall(package) + + +def _WipeUnderDirIfMatch(device, path, pattern): + for filename in device.ListDirectory(path): + if pattern.match(filename): + _WipeFileOrDir(device, posixpath.join(path, filename)) + + +def _WipeFileOrDir(device, path): + if device.PathExists(path): + device.RunShellCommand(['rm', '-rf', path], check_return=True) + + +def _PushAndLaunchAdbReboot(device, target): + """Pushes and launches the adb_reboot binary on the device. + + Arguments: + device: The DeviceUtils instance for the device to which the adb_reboot + binary should be pushed. + target: The build target (example, Debug or Release) which helps in + locating the adb_reboot binary. + """ + logging.info('Will push and launch adb_reboot on %s', str(device)) + # Kill if adb_reboot is already running. + device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True) + # Push adb_reboot + logging.info(' Pushing adb_reboot ...') + adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'out/%s/adb_reboot' % target) + device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')]) + # Launch adb_reboot + logging.info(' Launching adb_reboot ...') + device.RunShellCommand( + ['/data/local/tmp/adb_reboot'], + check_return=True) + + +def _LaunchHostHeartbeat(): + # Kill if existing host_heartbeat + KillHostHeartbeat() + # Launch a new host_heartbeat + logging.info('Spawning host heartbeat...') + subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT, + 'build/android/host_heartbeat.py')]) + +def KillHostHeartbeat(): + ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) + stdout, _ = ps.communicate() + matches = re.findall('\\n.*host_heartbeat.*', stdout) + for match in matches: + logging.info('An instance of host heart beart running... will kill') + pid = re.findall(r'(\S+)', match)[1] + subprocess.call(['kill', str(pid)]) + +def main(): + # Recommended options on perf bots: + # --disable-network + # TODO(tonyg): We eventually want network on. However, currently radios + # can cause perfbots to drain faster than they charge. + # --min-battery-level 95 + # Some perf bots run benchmarks with USB charging disabled which leads + # to gradual draining of the battery. We must wait for a full charge + # before starting a run in order to keep the devices online. + + parser = argparse.ArgumentParser( + description='Provision Android devices with settings required for bots.') + parser.add_argument('-d', '--device', metavar='SERIAL', + help='the serial number of the device to be provisioned' + ' (the default is to provision all devices attached)') + parser.add_argument('--adb-path', + help='Absolute path to the adb binary to use.') + parser.add_argument('--blacklist-file', help='Device blacklist JSON file.') + parser.add_argument('--phase', action='append', choices=_PHASES.ALL, + dest='phases', + help='Phases of provisioning to run. ' + '(If omitted, all phases will be run.)') + parser.add_argument('--skip-wipe', action='store_true', default=False, + help="don't wipe device data during provisioning") + parser.add_argument('--reboot-timeout', metavar='SECS', type=int, + help='when wiping the device, max number of seconds to' + ' wait after each reboot ' + '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT) + parser.add_argument('--min-battery-level', type=int, metavar='NUM', + help='wait for the device to reach this minimum battery' + ' level before trying to continue') + parser.add_argument('--disable-location', action='store_true', + help='disable Google location services on devices') + parser.add_argument('--disable-mock-location', action='store_true', + default=False, help='Set ALLOW_MOCK_LOCATION to false') + parser.add_argument('--disable-network', action='store_true', + help='disable network access on devices') + parser.add_argument('--disable-java-debug', action='store_false', + dest='enable_java_debug', default=True, + help='disable Java property asserts and JNI checking') + parser.add_argument('--disable-system-chrome', action='store_true', + help='Disable the system chrome from devices.') + parser.add_argument('--remove-system-webview', action='store_true', + help='Remove the system webview from devices.') + parser.add_argument('-t', '--target', default='Debug', + help='the build target (default: %(default)s)') + parser.add_argument('-r', '--auto-reconnect', action='store_true', + help='push binary which will reboot the device on adb' + ' disconnections') + parser.add_argument('--adb-key-files', type=str, nargs='+', + help='list of adb keys to push to device') + parser.add_argument('-v', '--verbose', action='count', default=1, + help='Log more information.') + parser.add_argument('--max-battery-temp', type=int, metavar='NUM', + help='Wait for the battery to have this temp or lower.') + parser.add_argument('--output-device-blacklist', + help='Json file to output the device blacklist.') + parser.add_argument('--chrome-specific-wipe', action='store_true', + help='only wipe chrome specific data during provisioning') + parser.add_argument('--emulators', action='store_true', + help='provision only emulators and ignore usb devices') + args = parser.parse_args() + constants.SetBuildType(args.target) + + run_tests_helper.SetLogLevel(args.verbose) + + devil_chromium.Initialize(adb_path=args.adb_path) + + try: + return ProvisionDevices(args) + except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError): + return exit_codes.INFRA + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi new file mode 100644 index 00000000000..8bce79884c4 --- /dev/null +++ b/build/android/push_libraries.gypi @@ -0,0 +1,49 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# pushes stripped shared libraries to the attached Android device. This should +# only be used with the gyp_managed_install flag set. +# +# To use this, create a gyp target with the following form: +# { +# 'actions': [ +# 'variables': { +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'strip_stamp': 'stamp from strip action to block on' +# 'libraries_source_dir': 'location where stripped libraries are stored' +# 'device_library_dir': 'location on the device where to put pushed libraries', +# 'push_stamp': 'file to touch when the action is complete' +# 'configuration_name': 'The build CONFIGURATION_NAME' +# }, +# 'includes': [ '../../build/android/push_libraries.gypi' ], +# ], +# }, +# + +{ + 'action_name': 'push_libraries_<(_target_name)', + 'message': 'Pushing libraries to device for <(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/util/md5_check.py', + '<(DEPTH)/build/android/gyp/push_libraries.py', + '<(strip_stamp)', + '<(strip_additional_stamp)', + '<(build_device_config_path)', + '<(pack_relocations_stamp)', + ], + 'outputs': [ + '<(push_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/push_libraries.py', + '--build-device-configuration=<(build_device_config_path)', + '--libraries-dir=<(libraries_source_dir)', + '--device-dir=<(device_library_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(push_stamp)', + '--output-directory=<(PRODUCT_DIR)', + ], +} diff --git a/build/android/pylib/OWNERS b/build/android/pylib/OWNERS new file mode 100644 index 00000000000..dbbbba7f275 --- /dev/null +++ b/build/android/pylib/OWNERS @@ -0,0 +1,4 @@ +jbudorick@chromium.org +klundberg@chromium.org +navabi@chromium.org +skyostil@chromium.org diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py new file mode 100644 index 00000000000..16ee312e7c6 --- /dev/null +++ b/build/android/pylib/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys + +_DEVIL_PATH = os.path.abspath(os.path.join( + os.path.dirname(__file__), '..', '..', '..', 'third_party', 'catapult', + 'devil')) + +if _DEVIL_PATH not in sys.path: + sys.path.append(_DEVIL_PATH) diff --git a/build/android/pylib/base/__init__.py b/build/android/pylib/base/__init__.py new file mode 100644 index 00000000000..727e987e6b6 --- /dev/null +++ b/build/android/pylib/base/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py new file mode 100644 index 00000000000..333211fa321 --- /dev/null +++ b/build/android/pylib/base/base_test_result.py @@ -0,0 +1,236 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Module containing base test results classes.""" + +import threading + + +class ResultType(object): + """Class enumerating test types.""" + PASS = 'PASS' + SKIP = 'SKIP' + FAIL = 'FAIL' + CRASH = 'CRASH' + TIMEOUT = 'TIMEOUT' + UNKNOWN = 'UNKNOWN' + + @staticmethod + def GetTypes(): + """Get a list of all test types.""" + return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL, + ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN] + + +class BaseTestResult(object): + """Base class for a single test result.""" + + def __init__(self, name, test_type, duration=0, log=''): + """Construct a BaseTestResult. + + Args: + name: Name of the test which defines uniqueness. + test_type: Type of the test result as defined in ResultType. + duration: Time it took for the test to run in milliseconds. + log: An optional string listing any errors. + """ + assert name + assert test_type in ResultType.GetTypes() + self._name = name + self._test_type = test_type + self._duration = duration + self._log = log + self._tombstones = None + + def __str__(self): + return self._name + + def __repr__(self): + return self._name + + def __cmp__(self, other): + # pylint: disable=W0212 + return cmp(self._name, other._name) + + def __hash__(self): + return hash(self._name) + + def SetName(self, name): + """Set the test name. + + Because we're putting this into a set, this should only be used if moving + this test result into another set. + """ + self._name = name + + def GetName(self): + """Get the test name.""" + return self._name + + def SetType(self, test_type): + """Set the test result type.""" + assert test_type in ResultType.GetTypes() + self._test_type = test_type + + def GetType(self): + """Get the test result type.""" + return self._test_type + + def GetDuration(self): + """Get the test duration.""" + return self._duration + + def SetLog(self, log): + """Set the test log.""" + self._log = log + + def GetLog(self): + """Get the test log.""" + return self._log + + def SetTombstones(self, tombstones): + self._tombstones = tombstones + + def GetTombstones(self): + return self._tombstones + +class TestRunResults(object): + """Set of results for a test run.""" + + def __init__(self): + self._results = set() + self._results_lock = threading.RLock() + + def GetLogs(self): + """Get the string representation of all test logs.""" + with self._results_lock: + s = [] + for test_type in ResultType.GetTypes(): + if test_type != ResultType.PASS: + for t in sorted(self._GetType(test_type)): + log = t.GetLog() + if log: + s.append('[%s] %s:' % (test_type, t)) + s.append(log) + return '\n'.join(s) + + def GetGtestForm(self): + """Get the gtest string representation of this object.""" + with self._results_lock: + s = [] + plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s) + tests = lambda n: plural(n, 'test', 'tests') + + s.append('[==========] %s ran.' % (tests(len(self.GetAll())))) + s.append('[ PASSED ] %s.' % (tests(len(self.GetPass())))) + + skipped = self.GetSkip() + if skipped: + s.append('[ SKIPPED ] Skipped %s, listed below:' % tests(len(skipped))) + for t in sorted(skipped): + s.append('[ SKIPPED ] %s' % str(t)) + + all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(), + self.GetUnknown()) + if all_failures: + s.append('[ FAILED ] %s, listed below:' % tests(len(all_failures))) + for t in sorted(self.GetFail()): + s.append('[ FAILED ] %s' % str(t)) + for t in sorted(self.GetCrash()): + s.append('[ FAILED ] %s (CRASHED)' % str(t)) + for t in sorted(self.GetTimeout()): + s.append('[ FAILED ] %s (TIMEOUT)' % str(t)) + for t in sorted(self.GetUnknown()): + s.append('[ FAILED ] %s (UNKNOWN)' % str(t)) + s.append('') + s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS')) + return '\n'.join(s) + + def GetShortForm(self): + """Get the short string representation of this object.""" + with self._results_lock: + s = [] + s.append('ALL: %d' % len(self._results)) + for test_type in ResultType.GetTypes(): + s.append('%s: %d' % (test_type, len(self._GetType(test_type)))) + return ''.join([x.ljust(15) for x in s]) + + def __str__(self): + return self.GetGtestForm() + + def AddResult(self, result): + """Add |result| to the set. + + Args: + result: An instance of BaseTestResult. + """ + assert isinstance(result, BaseTestResult) + with self._results_lock: + self._results.discard(result) + self._results.add(result) + + def AddResults(self, results): + """Add |results| to the set. + + Args: + results: An iterable of BaseTestResult objects. + """ + with self._results_lock: + for t in results: + self.AddResult(t) + + def AddTestRunResults(self, results): + """Add the set of test results from |results|. + + Args: + results: An instance of TestRunResults. + """ + assert isinstance(results, TestRunResults), ( + 'Expected TestRunResult object: %s' % type(results)) + with self._results_lock: + # pylint: disable=W0212 + self._results.update(results._results) + + def GetAll(self): + """Get the set of all test results.""" + with self._results_lock: + return self._results.copy() + + def _GetType(self, test_type): + """Get the set of test results with the given test type.""" + with self._results_lock: + return set(t for t in self._results if t.GetType() == test_type) + + def GetPass(self): + """Get the set of all passed test results.""" + return self._GetType(ResultType.PASS) + + def GetSkip(self): + """Get the set of all skipped test results.""" + return self._GetType(ResultType.SKIP) + + def GetFail(self): + """Get the set of all failed test results.""" + return self._GetType(ResultType.FAIL) + + def GetCrash(self): + """Get the set of all crashed test results.""" + return self._GetType(ResultType.CRASH) + + def GetTimeout(self): + """Get the set of all timed out test results.""" + return self._GetType(ResultType.TIMEOUT) + + def GetUnknown(self): + """Get the set of all unknown test results.""" + return self._GetType(ResultType.UNKNOWN) + + def GetNotPass(self): + """Get the set of all non-passed test results.""" + return self.GetAll() - self.GetPass() + + def DidRunPass(self): + """Return whether the test run was successful.""" + return not self.GetNotPass() - self.GetSkip() + diff --git a/build/android/pylib/base/base_test_result_unittest.py b/build/android/pylib/base/base_test_result_unittest.py new file mode 100644 index 00000000000..6f0cba7726e --- /dev/null +++ b/build/android/pylib/base/base_test_result_unittest.py @@ -0,0 +1,82 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unittests for TestRunResults.""" + +import unittest + +from pylib.base.base_test_result import BaseTestResult +from pylib.base.base_test_result import TestRunResults +from pylib.base.base_test_result import ResultType + + +class TestTestRunResults(unittest.TestCase): + def setUp(self): + self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1') + other_p1 = BaseTestResult('p1', ResultType.PASS) + self.p2 = BaseTestResult('p2', ResultType.PASS) + self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1') + self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1') + self.u1 = BaseTestResult('u1', ResultType.UNKNOWN) + self.tr = TestRunResults() + self.tr.AddResult(self.p1) + self.tr.AddResult(other_p1) + self.tr.AddResult(self.p2) + self.tr.AddResults(set([self.f1, self.c1, self.u1])) + + def testGetAll(self): + self.assertFalse( + self.tr.GetAll().symmetric_difference( + [self.p1, self.p2, self.f1, self.c1, self.u1])) + + def testGetPass(self): + self.assertFalse(self.tr.GetPass().symmetric_difference( + [self.p1, self.p2])) + + def testGetNotPass(self): + self.assertFalse(self.tr.GetNotPass().symmetric_difference( + [self.f1, self.c1, self.u1])) + + def testGetAddTestRunResults(self): + tr2 = TestRunResults() + other_p1 = BaseTestResult('p1', ResultType.PASS) + f2 = BaseTestResult('f2', ResultType.FAIL) + tr2.AddResult(other_p1) + tr2.AddResult(f2) + tr2.AddTestRunResults(self.tr) + self.assertFalse( + tr2.GetAll().symmetric_difference( + [self.p1, self.p2, self.f1, self.c1, self.u1, f2])) + + def testGetLogs(self): + log_print = ('[FAIL] f1:\n' + 'failure1\n' + '[CRASH] c1:\n' + 'crash1') + self.assertEqual(self.tr.GetLogs(), log_print) + + def testGetShortForm(self): + short_print = ('ALL: 5 PASS: 2 FAIL: 1 ' + 'CRASH: 1 TIMEOUT: 0 UNKNOWN: 1 ') + self.assertEqual(self.tr.GetShortForm(), short_print) + + def testGetGtestForm(self): + gtest_print = ('[==========] 5 tests ran.\n' + '[ PASSED ] 2 tests.\n' + '[ FAILED ] 3 tests, listed below:\n' + '[ FAILED ] f1\n' + '[ FAILED ] c1 (CRASHED)\n' + '[ FAILED ] u1 (UNKNOWN)\n' + '\n' + '3 FAILED TESTS') + self.assertEqual(gtest_print, self.tr.GetGtestForm()) + + def testRunPassed(self): + self.assertFalse(self.tr.DidRunPass()) + tr2 = TestRunResults() + self.assertTrue(tr2.DidRunPass()) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/pylib/base/base_test_runner.py b/build/android/pylib/base/base_test_runner.py new file mode 100644 index 00000000000..77d05f7952d --- /dev/null +++ b/build/android/pylib/base/base_test_runner.py @@ -0,0 +1,138 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Base class for running tests on a single device.""" + +# TODO(jbudorick) Deprecate and remove this class and all subclasses after +# any relevant parts have been ported to the new environment + test instance +# model. + +import logging + +from devil.android import device_utils +from devil.android import forwarder +from devil.android import ports +from pylib.valgrind_tools import CreateTool +# TODO(frankf): Move this to pylib/utils + + +# A file on device to store ports of net test server. The format of the file is +# test-spawner-server-port:test-server-port +NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports' + + +class BaseTestRunner(object): + """Base class for running tests on a single device.""" + + def __init__(self, device, tool): + """ + Args: + device: An instance of DeviceUtils that the tests will run on. + tool: Name of the Valgrind tool. + """ + assert isinstance(device, device_utils.DeviceUtils) + self.device = device + self.device_serial = self.device.adb.GetDeviceSerial() + self.tool = CreateTool(tool, self.device) + self._http_server = None + self._forwarder_device_port = 8000 + self.forwarder_base_url = ('http://localhost:%d' % + self._forwarder_device_port) + # We will allocate port for test server spawner when calling method + # LaunchChromeTestServerSpawner and allocate port for test server when + # starting it in TestServerThread. + self.test_server_spawner_port = 0 + self.test_server_port = 0 + + def _PushTestServerPortInfoToDevice(self): + """Pushes the latest port information to device.""" + self.device.WriteFile( + self.device.GetExternalStoragePath() + '/' + + NET_TEST_SERVER_PORT_INFO_FILE, + '%d:%d' % (self.test_server_spawner_port, self.test_server_port)) + + def RunTest(self, test): + """Runs a test. Needs to be overridden. + + Args: + test: A test to run. + + Returns: + Tuple containing: + (base_test_result.TestRunResults, tests to rerun or None) + """ + raise NotImplementedError + + def InstallTestPackage(self): + """Installs the test package once before all tests are run.""" + pass + + def SetUp(self): + """Run once before all tests are run.""" + self.InstallTestPackage() + + def TearDown(self): + """Run once after all tests are run.""" + self.ShutdownHelperToolsForTestSuite() + + def LaunchTestHttpServer(self, document_root, port=None, + extra_config_contents=None): + """Launches an HTTP server to serve HTTP tests. + + Args: + document_root: Document root of the HTTP server. + port: port on which we want to the http server bind. + extra_config_contents: Extra config contents for the HTTP server. + """ + import lighttpd_server + self._http_server = lighttpd_server.LighttpdServer( + document_root, port=port, extra_config_contents=extra_config_contents) + if self._http_server.StartupHttpServer(): + logging.info('http server started: http://localhost:%s', + self._http_server.port) + else: + logging.critical('Failed to start http server') + self._ForwardPortsForHttpServer() + return (self._forwarder_device_port, self._http_server.port) + + def _ForwardPorts(self, port_pairs): + """Forwards a port.""" + forwarder.Forwarder.Map(port_pairs, self.device, self.tool) + + def _UnmapPorts(self, port_pairs): + """Unmap previously forwarded ports.""" + for (device_port, _) in port_pairs: + forwarder.Forwarder.UnmapDevicePort(device_port, self.device) + + # Deprecated: Use ForwardPorts instead. + def StartForwarder(self, port_pairs): + """Starts TCP traffic forwarding for the given |port_pairs|. + + Args: + host_port_pairs: A list of (device_port, local_port) tuples to forward. + """ + self._ForwardPorts(port_pairs) + + def _ForwardPortsForHttpServer(self): + """Starts a forwarder for the HTTP server. + + The forwarder forwards HTTP requests and responses between host and device. + """ + self._ForwardPorts([(self._forwarder_device_port, self._http_server.port)]) + + def _RestartHttpServerForwarderIfNecessary(self): + """Restarts the forwarder if it's not open.""" + # Checks to see if the http server port is being used. If not forwards the + # request. + # TODO(dtrainor): This is not always reliable because sometimes the port + # will be left open even after the forwarder has been killed. + if not ports.IsDevicePortUsed(self.device, self._forwarder_device_port): + self._ForwardPortsForHttpServer() + + def ShutdownHelperToolsForTestSuite(self): + """Shuts down the server and the forwarder.""" + if self._http_server: + self._UnmapPorts([(self._forwarder_device_port, self._http_server.port)]) + self._http_server.ShutdownHttpServer() + diff --git a/build/android/pylib/base/environment.py b/build/android/pylib/base/environment.py new file mode 100644 index 00000000000..3f49f41f299 --- /dev/null +++ b/build/android/pylib/base/environment.py @@ -0,0 +1,34 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class Environment(object): + """An environment in which tests can be run. + + This is expected to handle all logic that is applicable to an entire specific + environment but is independent of the test type. + + Examples include: + - The local device environment, for running tests on devices attached to + the local machine. + - The local machine environment, for running tests directly on the local + machine. + """ + + def __init__(self): + pass + + def SetUp(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + self.TearDown() + diff --git a/build/android/pylib/base/environment_factory.py b/build/android/pylib/base/environment_factory.py new file mode 100644 index 00000000000..f4fe935e221 --- /dev/null +++ b/build/android/pylib/base/environment_factory.py @@ -0,0 +1,21 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib import constants +from pylib.local.device import local_device_environment +try: + from pylib.remote.device import remote_device_environment +except ImportError: + remote_device_environment = None + +def CreateEnvironment(args, error_func): + + if args.environment == 'local': + if args.command not in constants.LOCAL_MACHINE_TESTS: + return local_device_environment.LocalDeviceEnvironment(args, error_func) + # TODO(jbudorick) Add local machine environment. + if args.environment == 'remote_device' and remote_device_environment: + return remote_device_environment.RemoteDeviceEnvironment(args, + error_func) + error_func('Unable to create %s environment.' % args.environment) diff --git a/build/android/pylib/base/test_collection.py b/build/android/pylib/base/test_collection.py new file mode 100644 index 00000000000..de510272bd7 --- /dev/null +++ b/build/android/pylib/base/test_collection.py @@ -0,0 +1,80 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import threading + +class TestCollection(object): + """A threadsafe collection of tests. + + Args: + tests: List of tests to put in the collection. + """ + + def __init__(self, tests=None): + if not tests: + tests = [] + self._lock = threading.Lock() + self._tests = [] + self._tests_in_progress = 0 + # Used to signal that an item is available or all items have been handled. + self._item_available_or_all_done = threading.Event() + for t in tests: + self.add(t) + + def _pop(self): + """Pop a test from the collection. + + Waits until a test is available or all tests have been handled. + + Returns: + A test or None if all tests have been handled. + """ + while True: + # Wait for a test to be available or all tests to have been handled. + self._item_available_or_all_done.wait() + with self._lock: + # Check which of the two conditions triggered the signal. + if self._tests_in_progress == 0: + return None + try: + return self._tests.pop(0) + except IndexError: + # Another thread beat us to the available test, wait again. + self._item_available_or_all_done.clear() + + def add(self, test): + """Add a test to the collection. + + Args: + test: A test to add. + """ + with self._lock: + self._tests.append(test) + self._item_available_or_all_done.set() + self._tests_in_progress += 1 + + def test_completed(self): + """Indicate that a test has been fully handled.""" + with self._lock: + self._tests_in_progress -= 1 + if self._tests_in_progress == 0: + # All tests have been handled, signal all waiting threads. + self._item_available_or_all_done.set() + + def __iter__(self): + """Iterate through tests in the collection until all have been handled.""" + while True: + r = self._pop() + if r is None: + break + yield r + + def __len__(self): + """Return the number of tests currently in the collection.""" + return len(self._tests) + + def test_names(self): + """Return a list of the names of the tests currently in the collection.""" + with self._lock: + return list(t.test for t in self._tests) diff --git a/build/android/pylib/base/test_dispatcher.py b/build/android/pylib/base/test_dispatcher.py new file mode 100644 index 00000000000..327709ca609 --- /dev/null +++ b/build/android/pylib/base/test_dispatcher.py @@ -0,0 +1,343 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Dispatches tests, either sharding or replicating them. + +Performs the following steps: +* Create a test collection factory, using the given tests + - If sharding: test collection factory returns the same shared test collection + to all test runners + - If replciating: test collection factory returns a unique test collection to + each test runner, with the same set of tests in each. +* Create a test runner for each device. +* Run each test runner in its own thread, grabbing tests from the test + collection until there are no tests left. +""" + +# TODO(jbudorick) Deprecate and remove this class after any relevant parts have +# been ported to the new environment / test instance model. + +import logging +import threading + +from devil.android import device_errors +from devil.utils import reraiser_thread +from devil.utils import watchdog_timer +from pylib import constants +from pylib.base import base_test_result +from pylib.base import test_collection + + +DEFAULT_TIMEOUT = 7 * 60 # seven minutes + + +class _ThreadSafeCounter(object): + """A threadsafe counter.""" + + def __init__(self): + self._lock = threading.Lock() + self._value = 0 + + def GetAndIncrement(self): + """Get the current value and increment it atomically. + + Returns: + The value before incrementing. + """ + with self._lock: + pre_increment = self._value + self._value += 1 + return pre_increment + + +class _Test(object): + """Holds a test with additional metadata.""" + + def __init__(self, test, tries=0): + """Initializes the _Test object. + + Args: + test: The test. + tries: Number of tries so far. + """ + self.test = test + self.tries = tries + + +def _RunTestsFromQueue(runner, collection, out_results, watcher, + num_retries, tag_results_with_device=False): + """Runs tests from the collection until empty using the given runner. + + Adds TestRunResults objects to the out_results list and may add tests to the + out_retry list. + + Args: + runner: A TestRunner object used to run the tests. + collection: A TestCollection from which to get _Test objects to run. + out_results: A list to add TestRunResults to. + watcher: A watchdog_timer.WatchdogTimer object, used as a shared timeout. + num_retries: Number of retries for a test. + tag_results_with_device: If True, appends the name of the device on which + the test was run to the test name. Used when replicating to identify + which device ran each copy of the test, and to ensure each copy of the + test is recorded separately. + """ + + def TagTestRunResults(test_run_results): + """Tags all results with the last 4 digits of the device id. + + Used when replicating tests to distinguish the same tests run on different + devices. We use a set to store test results, so the hash (generated from + name and tag) must be unique to be considered different results. + """ + new_test_run_results = base_test_result.TestRunResults() + for test_result in test_run_results.GetAll(): + test_result.SetName('%s_%s' % (runner.device_serial[-4:], + test_result.GetName())) + new_test_run_results.AddResult(test_result) + return new_test_run_results + + for test in collection: + watcher.Reset() + try: + if not runner.device.IsOnline(): + # Device is unresponsive, stop handling tests on this device. + msg = 'Device %s is unresponsive.' % runner.device_serial + logging.warning(msg) + raise device_errors.DeviceUnreachableError(msg) + result, retry = runner.RunTest(test.test) + if tag_results_with_device: + result = TagTestRunResults(result) + test.tries += 1 + if retry and test.tries <= num_retries: + # Retry non-passing results, only record passing results. + pass_results = base_test_result.TestRunResults() + pass_results.AddResults(result.GetPass()) + out_results.append(pass_results) + logging.warning('Will retry test %s, try #%s.', retry, test.tries) + collection.add(_Test(test=retry, tries=test.tries)) + else: + # All tests passed or retry limit reached. Either way, record results. + out_results.append(result) + except: + # An unhandleable exception, ensure tests get run by another device and + # reraise this exception on the main thread. + collection.add(test) + raise + finally: + # Retries count as separate tasks so always mark the popped test as done. + collection.test_completed() + + +def _SetUp(runner_factory, device, out_runners, threadsafe_counter): + """Creates a test runner for each device and calls SetUp() in parallel. + + Note: if a device is unresponsive the corresponding TestRunner will not be + added to out_runners. + + Args: + runner_factory: Callable that takes a device and index and returns a + TestRunner object. + device: The device serial number to set up. + out_runners: List to add the successfully set up TestRunner object. + threadsafe_counter: A _ThreadSafeCounter object used to get shard indices. + """ + try: + index = threadsafe_counter.GetAndIncrement() + logging.warning('Creating shard %s for device %s.', index, device) + runner = runner_factory(device, index) + if runner: + runner.SetUp() + out_runners.append(runner) + else: + logging.info('Device %s is not active. Will not create shard %s.', + str(device), index) + except (device_errors.CommandFailedError, + device_errors.CommandTimeoutError, + device_errors.DeviceUnreachableError): + logging.exception('Failed to create shard for %s', str(device)) + + +def _RunAllTests(runners, test_collection_factory, num_retries, timeout=None, + tag_results_with_device=False): + """Run all tests using the given TestRunners. + + Args: + runners: A list of TestRunner objects. + test_collection_factory: A callable to generate a TestCollection object for + each test runner. + num_retries: Number of retries for a test. + timeout: Watchdog timeout in seconds. + tag_results_with_device: If True, appends the name of the device on which + the test was run to the test name. Used when replicating to identify + which device ran each copy of the test, and to ensure each copy of the + test is recorded separately. + + Returns: + A tuple of (TestRunResults object, exit code) + """ + logging.warning('Running tests with %s test %s.', + len(runners), 'runners' if len(runners) != 1 else 'runner') + results = [] + exit_code = 0 + run_results = base_test_result.TestRunResults() + watcher = watchdog_timer.WatchdogTimer(timeout) + test_collections = [test_collection_factory() for _ in runners] + + threads = [ + reraiser_thread.ReraiserThread( + _RunTestsFromQueue, + [r, tc, results, watcher, num_retries, tag_results_with_device], + name=r.device_serial[-4:]) + for r, tc in zip(runners, test_collections)] + + workers = reraiser_thread.ReraiserThreadGroup(threads) + workers.StartAll() + + try: + workers.JoinAll(watcher) + except device_errors.CommandFailedError: + logging.exception('Command failed on device.') + except device_errors.CommandTimeoutError: + logging.exception('Command timed out on device.') + except device_errors.DeviceUnreachableError: + logging.exception('Device became unreachable.') + + if not all((len(tc) == 0 for tc in test_collections)): + logging.error('Only ran %d tests (all devices are likely offline).', + len(results)) + for tc in test_collections: + run_results.AddResults(base_test_result.BaseTestResult( + t, base_test_result.ResultType.UNKNOWN) for t in tc.test_names()) + + for r in results: + run_results.AddTestRunResults(r) + if not run_results.DidRunPass(): + exit_code = constants.ERROR_EXIT_CODE + return (run_results, exit_code) + + +def _CreateRunners(runner_factory, devices, timeout=None): + """Creates a test runner for each device and calls SetUp() in parallel. + + Note: if a device is unresponsive the corresponding TestRunner will not be + included in the returned list. + + Args: + runner_factory: Callable that takes a device and index and returns a + TestRunner object. + devices: List of device serial numbers as strings. + timeout: Watchdog timeout in seconds, defaults to the default timeout. + + Returns: + A list of TestRunner objects. + """ + logging.warning('Creating %s test %s.', len(devices), + 'runners' if len(devices) != 1 else 'runner') + runners = [] + counter = _ThreadSafeCounter() + threads = reraiser_thread.ReraiserThreadGroup( + [reraiser_thread.ReraiserThread(_SetUp, + [runner_factory, d, runners, counter], + name=str(d)[-4:]) + for d in devices]) + threads.StartAll() + threads.JoinAll(watchdog_timer.WatchdogTimer(timeout)) + return runners + + +def _TearDownRunners(runners, timeout=None): + """Calls TearDown() for each test runner in parallel. + + Args: + runners: A list of TestRunner objects. + timeout: Watchdog timeout in seconds, defaults to the default timeout. + """ + threads = reraiser_thread.ReraiserThreadGroup( + [reraiser_thread.ReraiserThread(r.TearDown, name=r.device_serial[-4:]) + for r in runners]) + threads.StartAll() + threads.JoinAll(watchdog_timer.WatchdogTimer(timeout)) + + +def ApplyMaxPerRun(tests, max_per_run): + """Rearrange the tests so that no group contains more than max_per_run tests. + + Args: + tests: + max_per_run: + + Returns: + A list of tests with no more than max_per_run per run. + """ + tests_expanded = [] + for test_group in tests: + if type(test_group) != str: + # Do not split test objects which are not strings. + tests_expanded.append(test_group) + else: + test_split = test_group.split(':') + for i in range(0, len(test_split), max_per_run): + tests_expanded.append(':'.join(test_split[i:i+max_per_run])) + return tests_expanded + + +def RunTests(tests, runner_factory, devices, shard=True, + test_timeout=DEFAULT_TIMEOUT, setup_timeout=DEFAULT_TIMEOUT, + num_retries=2, max_per_run=256): + """Run all tests on attached devices, retrying tests that don't pass. + + Args: + tests: List of tests to run. + runner_factory: Callable that takes a device and index and returns a + TestRunner object. + devices: List of attached devices. + shard: True if we should shard, False if we should replicate tests. + - Sharding tests will distribute tests across all test runners through a + shared test collection. + - Replicating tests will copy all tests to each test runner through a + unique test collection for each test runner. + test_timeout: Watchdog timeout in seconds for running tests. + setup_timeout: Watchdog timeout in seconds for creating and cleaning up + test runners. + num_retries: Number of retries for a test. + max_per_run: Maximum number of tests to run in any group. + + Returns: + A tuple of (base_test_result.TestRunResults object, exit code). + """ + if not tests: + logging.critical('No tests to run.') + return (base_test_result.TestRunResults(), constants.ERROR_EXIT_CODE) + + tests_expanded = ApplyMaxPerRun(tests, max_per_run) + if shard: + # Generate a shared TestCollection object for all test runners, so they + # draw from a common pool of tests. + shared_test_collection = test_collection.TestCollection( + [_Test(t) for t in tests_expanded]) + test_collection_factory = lambda: shared_test_collection + tag_results_with_device = False + log_string = 'sharded across devices' + else: + # Generate a unique TestCollection object for each test runner, but use + # the same set of tests. + test_collection_factory = lambda: test_collection.TestCollection( + [_Test(t) for t in tests_expanded]) + tag_results_with_device = True + log_string = 'replicated on each device' + + logging.info('Will run %d tests (%s): %s', + len(tests_expanded), log_string, str(tests_expanded)) + runners = _CreateRunners(runner_factory, devices, setup_timeout) + try: + return _RunAllTests(runners, test_collection_factory, + num_retries, test_timeout, tag_results_with_device) + finally: + try: + _TearDownRunners(runners, setup_timeout) + except device_errors.DeviceUnreachableError as e: + logging.warning('Device unresponsive during TearDown: [%s]', e) + except Exception: # pylint: disable=broad-except + logging.exception('Unexpected exception caught during TearDown') diff --git a/build/android/pylib/base/test_dispatcher_unittest.py b/build/android/pylib/base/test_dispatcher_unittest.py new file mode 100644 index 00000000000..186a0721d25 --- /dev/null +++ b/build/android/pylib/base/test_dispatcher_unittest.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unittests for test_dispatcher.py.""" + +# pylint: disable=no-self-use +# pylint: disable=protected-access + +import unittest + +from pylib.base import base_test_result +from pylib.base import test_collection +from pylib.base import test_dispatcher +from pylib.constants import host_paths + +with host_paths.SysPath(host_paths.DEVIL_PATH): + from devil.android import device_utils + from devil.android.sdk import adb_wrapper + from devil.constants import exit_codes + from devil.utils import watchdog_timer + +with host_paths.SysPath(host_paths.PYMOCK_PATH): + import mock # pylint: disable=import-error + + +class TestException(Exception): + pass + + +def _MockDevice(serial): + d = mock.MagicMock(spec=device_utils.DeviceUtils) + d.__str__.return_value = serial + d.adb = mock.MagicMock(spec=adb_wrapper.AdbWrapper) + d.adb.GetDeviceSerial = mock.MagicMock(return_value=serial) + d.IsOnline = mock.MagicMock(return_value=True) + return d + + +class MockRunner(object): + """A mock TestRunner.""" + def __init__(self, device=None, shard_index=0): + self.device = device or _MockDevice('0') + self.device_serial = self.device.adb.GetDeviceSerial() + self.shard_index = shard_index + self.setups = 0 + self.teardowns = 0 + + def RunTest(self, test): + results = base_test_result.TestRunResults() + results.AddResult( + base_test_result.BaseTestResult(test, base_test_result.ResultType.PASS)) + return (results, None) + + def SetUp(self): + self.setups += 1 + + def TearDown(self): + self.teardowns += 1 + + +class MockRunnerFail(MockRunner): + def RunTest(self, test): + results = base_test_result.TestRunResults() + results.AddResult( + base_test_result.BaseTestResult(test, base_test_result.ResultType.FAIL)) + return (results, test) + + +class MockRunnerFailTwice(MockRunner): + def __init__(self, device=None, shard_index=0): + super(MockRunnerFailTwice, self).__init__(device, shard_index) + self._fails = 0 + + def RunTest(self, test): + self._fails += 1 + results = base_test_result.TestRunResults() + if self._fails <= 2: + results.AddResult(base_test_result.BaseTestResult( + test, base_test_result.ResultType.FAIL)) + return (results, test) + else: + results.AddResult(base_test_result.BaseTestResult( + test, base_test_result.ResultType.PASS)) + return (results, None) + + +class MockRunnerException(MockRunner): + def RunTest(self, test): + raise TestException + + +class TestFunctions(unittest.TestCase): + """Tests test_dispatcher._RunTestsFromQueue.""" + @staticmethod + def _RunTests(mock_runner, tests): + results = [] + tests = test_collection.TestCollection( + [test_dispatcher._Test(t) for t in tests]) + test_dispatcher._RunTestsFromQueue(mock_runner, tests, results, + watchdog_timer.WatchdogTimer(None), 2) + run_results = base_test_result.TestRunResults() + for r in results: + run_results.AddTestRunResults(r) + return run_results + + def testRunTestsFromQueue(self): + results = TestFunctions._RunTests(MockRunner(), ['a', 'b']) + self.assertEqual(len(results.GetPass()), 2) + self.assertEqual(len(results.GetNotPass()), 0) + + def testRunTestsFromQueueRetry(self): + results = TestFunctions._RunTests(MockRunnerFail(), ['a', 'b']) + self.assertEqual(len(results.GetPass()), 0) + self.assertEqual(len(results.GetFail()), 2) + + def testRunTestsFromQueueFailTwice(self): + results = TestFunctions._RunTests(MockRunnerFailTwice(), ['a', 'b']) + self.assertEqual(len(results.GetPass()), 2) + self.assertEqual(len(results.GetNotPass()), 0) + + def testSetUp(self): + runners = [] + counter = test_dispatcher._ThreadSafeCounter() + test_dispatcher._SetUp(MockRunner, _MockDevice('0'), runners, counter) + self.assertEqual(len(runners), 1) + self.assertEqual(runners[0].setups, 1) + + def testThreadSafeCounter(self): + counter = test_dispatcher._ThreadSafeCounter() + for i in xrange(5): + self.assertEqual(counter.GetAndIncrement(), i) + + def testApplyMaxPerRun(self): + self.assertEqual( + ['A:B', 'C:D', 'E', 'F:G', 'H:I'], + test_dispatcher.ApplyMaxPerRun(['A:B', 'C:D:E', 'F:G:H:I'], 2)) + + +class TestThreadGroupFunctions(unittest.TestCase): + """Tests test_dispatcher._RunAllTests and test_dispatcher._CreateRunners.""" + def setUp(self): + self.tests = ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + shared_test_collection = test_collection.TestCollection( + [test_dispatcher._Test(t) for t in self.tests]) + self.test_collection_factory = lambda: shared_test_collection + + def testCreate(self): + runners = test_dispatcher._CreateRunners( + MockRunner, [_MockDevice('0'), _MockDevice('1')]) + for runner in runners: + self.assertEqual(runner.setups, 1) + self.assertEqual(set([r.device_serial for r in runners]), + set(['0', '1'])) + self.assertEqual(set([r.shard_index for r in runners]), + set([0, 1])) + + def testRun(self): + runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))] + results, exit_code = test_dispatcher._RunAllTests( + runners, self.test_collection_factory, 0) + self.assertEqual(len(results.GetPass()), len(self.tests)) + self.assertEqual(exit_code, 0) + + def testTearDown(self): + runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))] + test_dispatcher._TearDownRunners(runners) + for runner in runners: + self.assertEqual(runner.teardowns, 1) + + def testRetry(self): + runners = test_dispatcher._CreateRunners( + MockRunnerFail, [_MockDevice('0'), _MockDevice('1')]) + results, exit_code = test_dispatcher._RunAllTests( + runners, self.test_collection_factory, 0) + self.assertEqual(len(results.GetFail()), len(self.tests)) + self.assertEqual(exit_code, exit_codes.ERROR) + + def testReraise(self): + runners = test_dispatcher._CreateRunners( + MockRunnerException, [_MockDevice('0'), _MockDevice('1')]) + with self.assertRaises(TestException): + test_dispatcher._RunAllTests(runners, self.test_collection_factory, 0) + + +class TestShard(unittest.TestCase): + """Tests test_dispatcher.RunTests with sharding.""" + @staticmethod + def _RunShard(runner_factory): + return test_dispatcher.RunTests( + ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')], + shard=True) + + def testShard(self): + results, exit_code = TestShard._RunShard(MockRunner) + self.assertEqual(len(results.GetPass()), 3) + self.assertEqual(exit_code, 0) + + def testFailing(self): + results, exit_code = TestShard._RunShard(MockRunnerFail) + self.assertEqual(len(results.GetPass()), 0) + self.assertEqual(len(results.GetFail()), 3) + self.assertEqual(exit_code, exit_codes.ERROR) + + def testNoTests(self): + results, exit_code = test_dispatcher.RunTests( + [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=True) + self.assertEqual(len(results.GetAll()), 0) + self.assertEqual(exit_code, exit_codes.ERROR) + + +class TestReplicate(unittest.TestCase): + """Tests test_dispatcher.RunTests with replication.""" + @staticmethod + def _RunReplicate(runner_factory): + return test_dispatcher.RunTests( + ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')], + shard=False) + + def testReplicate(self): + results, exit_code = TestReplicate._RunReplicate(MockRunner) + # We expect 6 results since each test should have been run on every device + self.assertEqual(len(results.GetPass()), 6) + self.assertEqual(exit_code, 0) + + def testFailing(self): + results, exit_code = TestReplicate._RunReplicate(MockRunnerFail) + self.assertEqual(len(results.GetPass()), 0) + self.assertEqual(len(results.GetFail()), 6) + self.assertEqual(exit_code, exit_codes.ERROR) + + def testNoTests(self): + results, exit_code = test_dispatcher.RunTests( + [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=False) + self.assertEqual(len(results.GetAll()), 0) + self.assertEqual(exit_code, exit_codes.ERROR) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/pylib/base/test_instance.py b/build/android/pylib/base/test_instance.py new file mode 100644 index 00000000000..cdf678f2d28 --- /dev/null +++ b/build/android/pylib/base/test_instance.py @@ -0,0 +1,35 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestInstance(object): + """A type of test. + + This is expected to handle all logic that is test-type specific but + independent of the environment or device. + + Examples include: + - gtests + - instrumentation tests + """ + + def __init__(self): + pass + + def TestType(self): + raise NotImplementedError + + def SetUp(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + self.TearDown() + diff --git a/build/android/pylib/base/test_instance_factory.py b/build/android/pylib/base/test_instance_factory.py new file mode 100644 index 00000000000..5cc37237984 --- /dev/null +++ b/build/android/pylib/base/test_instance_factory.py @@ -0,0 +1,25 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.gtest import gtest_test_instance +from pylib.instrumentation import instrumentation_test_instance +from pylib.perf import perf_test_instance +from pylib.uirobot import uirobot_test_instance +from pylib.utils import isolator + + +def CreateTestInstance(args, error_func): + + if args.command == 'gtest': + return gtest_test_instance.GtestTestInstance( + args, isolator.Isolator(), error_func) + elif args.command == 'instrumentation': + return instrumentation_test_instance.InstrumentationTestInstance( + args, isolator.Isolator(), error_func) + elif args.command == 'perf': + return perf_test_instance.PerfTestInstance(args, error_func) + elif args.command == 'uirobot': + return uirobot_test_instance.UirobotTestInstance(args, error_func) + + error_func('Unable to create %s test instance.' % args.command) diff --git a/build/android/pylib/base/test_run.py b/build/android/pylib/base/test_run.py new file mode 100644 index 00000000000..7380e787086 --- /dev/null +++ b/build/android/pylib/base/test_run.py @@ -0,0 +1,39 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestRun(object): + """An execution of a particular test on a particular device. + + This is expected to handle all logic that is specific to the combination of + environment and test type. + + Examples include: + - local gtests + - local instrumentation tests + """ + + def __init__(self, env, test_instance): + self._env = env + self._test_instance = test_instance + + def TestPackage(self): + raise NotImplementedError + + def SetUp(self): + raise NotImplementedError + + def RunTests(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.TearDown() + diff --git a/build/android/pylib/base/test_run_factory.py b/build/android/pylib/base/test_run_factory.py new file mode 100644 index 00000000000..46c807633a7 --- /dev/null +++ b/build/android/pylib/base/test_run_factory.py @@ -0,0 +1,70 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.gtest import gtest_test_instance +from pylib.instrumentation import instrumentation_test_instance +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_gtest_run +from pylib.local.device import local_device_instrumentation_test_run +from pylib.local.device import local_device_perf_test_run +from pylib.perf import perf_test_instance +from pylib.uirobot import uirobot_test_instance + +try: + from pylib.remote.device import remote_device_environment + from pylib.remote.device import remote_device_gtest_run + from pylib.remote.device import remote_device_instrumentation_test_run + from pylib.remote.device import remote_device_uirobot_test_run +except ImportError: + remote_device_environment = None + remote_device_gtest_run = None + remote_device_instrumentation_test_run = None + remote_device_uirobot_test_run = None + + +def _CreatePerfTestRun(args, env, test_instance): + if args.print_step: + return local_device_perf_test_run.PrintStep( + env, test_instance) + elif args.output_json_list: + return local_device_perf_test_run.OutputJsonList( + env, test_instance) + return local_device_perf_test_run.LocalDevicePerfTestRun( + env, test_instance) + + +def CreateTestRun(args, env, test_instance, error_func): + if isinstance(env, local_device_environment.LocalDeviceEnvironment): + if isinstance(test_instance, gtest_test_instance.GtestTestInstance): + return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance) + if isinstance(test_instance, + instrumentation_test_instance.InstrumentationTestInstance): + return (local_device_instrumentation_test_run + .LocalDeviceInstrumentationTestRun(env, test_instance)) + if isinstance(test_instance, + perf_test_instance.PerfTestInstance): + return _CreatePerfTestRun(args, env, test_instance) + + if (remote_device_environment + and isinstance(env, remote_device_environment.RemoteDeviceEnvironment)): + # The remote_device modules should be all or nothing. + assert (remote_device_gtest_run + and remote_device_instrumentation_test_run + and remote_device_uirobot_test_run) + + if isinstance(test_instance, gtest_test_instance.GtestTestInstance): + return remote_device_gtest_run.RemoteDeviceGtestTestRun( + env, test_instance) + if isinstance(test_instance, + instrumentation_test_instance.InstrumentationTestInstance): + return (remote_device_instrumentation_test_run + .RemoteDeviceInstrumentationTestRun(env, test_instance)) + if isinstance(test_instance, uirobot_test_instance.UirobotTestInstance): + return remote_device_uirobot_test_run.RemoteDeviceUirobotTestRun( + env, test_instance) + + + error_func('Unable to create test run for %s tests in %s environment' + % (str(test_instance), str(env))) + diff --git a/build/android/pylib/base/test_server.py b/build/android/pylib/base/test_server.py new file mode 100644 index 00000000000..085a51e9bae --- /dev/null +++ b/build/android/pylib/base/test_server.py @@ -0,0 +1,19 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +class TestServer(object): + """Base class for any server that needs to be set up for the tests.""" + + def __init__(self, *args, **kwargs): + pass + + def SetUp(self): + raise NotImplementedError + + def Reset(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + diff --git a/build/android/pylib/chrome_test_server_spawner.py b/build/android/pylib/chrome_test_server_spawner.py new file mode 100644 index 00000000000..e1eb6b384f6 --- /dev/null +++ b/build/android/pylib/chrome_test_server_spawner.py @@ -0,0 +1,432 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A "Test Server Spawner" that handles killing/stopping per-test test servers. + +It's used to accept requests from the device to spawn and kill instances of the +chrome test server on the host. +""" +# pylint: disable=W0702 + +import BaseHTTPServer +import json +import logging +import os +import select +import struct +import subprocess +import sys +import threading +import time +import urlparse + +from devil.android import forwarder +from devil.android import ports + +from pylib import constants +from pylib.constants import host_paths + + +# Path that are needed to import necessary modules when launching a testserver. +os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s' + % (os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), + os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'tlslite'), + os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib', + 'src'), + os.path.join(host_paths.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'), + os.path.join(host_paths.DIR_SOURCE_ROOT, 'components', 'sync', 'tools', + 'testserver'))) + + +SERVER_TYPES = { + 'http': '', + 'ftp': '-f', + 'sync': '', # Sync uses its own script, and doesn't take a server type arg. + 'tcpecho': '--tcp-echo', + 'udpecho': '--udp-echo', +} + + +# The timeout (in seconds) of starting up the Python test server. +TEST_SERVER_STARTUP_TIMEOUT = 10 + +def _WaitUntil(predicate, max_attempts=5): + """Blocks until the provided predicate (function) is true. + + Returns: + Whether the provided predicate was satisfied once (before the timeout). + """ + sleep_time_sec = 0.025 + for _ in xrange(1, max_attempts): + if predicate(): + return True + time.sleep(sleep_time_sec) + sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec. + return False + + +def _CheckPortAvailable(port): + """Returns True if |port| is available.""" + return _WaitUntil(lambda: ports.IsHostPortAvailable(port)) + + +def _CheckPortNotAvailable(port): + """Returns True if |port| is not available.""" + return _WaitUntil(lambda: not ports.IsHostPortAvailable(port)) + + +def _CheckDevicePortStatus(device, port): + """Returns whether the provided port is used.""" + return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port)) + + +def _GetServerTypeCommandLine(server_type): + """Returns the command-line by the given server type. + + Args: + server_type: the server type to be used (e.g. 'http'). + + Returns: + A string containing the command-line argument. + """ + if server_type not in SERVER_TYPES: + raise NotImplementedError('Unknown server type: %s' % server_type) + if server_type == 'udpecho': + raise Exception('Please do not run UDP echo tests because we do not have ' + 'a UDP forwarder tool.') + return SERVER_TYPES[server_type] + + +class TestServerThread(threading.Thread): + """A thread to run the test server in a separate process.""" + + def __init__(self, ready_event, arguments, device, tool): + """Initialize TestServerThread with the following argument. + + Args: + ready_event: event which will be set when the test server is ready. + arguments: dictionary of arguments to run the test server. + device: An instance of DeviceUtils. + tool: instance of runtime error detection tool. + """ + threading.Thread.__init__(self) + self.wait_event = threading.Event() + self.stop_flag = False + self.ready_event = ready_event + self.ready_event.clear() + self.arguments = arguments + self.device = device + self.tool = tool + self.test_server_process = None + self.is_ready = False + self.host_port = self.arguments['port'] + assert isinstance(self.host_port, int) + # The forwarder device port now is dynamically allocated. + self.forwarder_device_port = 0 + # Anonymous pipe in order to get port info from test server. + self.pipe_in = None + self.pipe_out = None + self.process = None + self.command_line = [] + + def _WaitToStartAndGetPortFromTestServer(self): + """Waits for the Python test server to start and gets the port it is using. + + The port information is passed by the Python test server with a pipe given + by self.pipe_out. It is written as a result to |self.host_port|. + + Returns: + Whether the port used by the test server was successfully fetched. + """ + assert self.host_port == 0 and self.pipe_out and self.pipe_in + (in_fds, _, _) = select.select([self.pipe_in, ], [], [], + TEST_SERVER_STARTUP_TIMEOUT) + if len(in_fds) == 0: + logging.error('Failed to wait to the Python test server to be started.') + return False + # First read the data length as an unsigned 4-byte value. This + # is _not_ using network byte ordering since the Python test server packs + # size as native byte order and all Chromium platforms so far are + # configured to use little-endian. + # TODO(jnd): Change the Python test server and local_test_server_*.cc to + # use a unified byte order (either big-endian or little-endian). + data_length = os.read(self.pipe_in, struct.calcsize('=L')) + if data_length: + (data_length,) = struct.unpack('=L', data_length) + assert data_length + if not data_length: + logging.error('Failed to get length of server data.') + return False + port_json = os.read(self.pipe_in, data_length) + if not port_json: + logging.error('Failed to get server data.') + return False + logging.info('Got port json data: %s', port_json) + port_json = json.loads(port_json) + if port_json.has_key('port') and isinstance(port_json['port'], int): + self.host_port = port_json['port'] + return _CheckPortNotAvailable(self.host_port) + logging.error('Failed to get port information from the server data.') + return False + + def _GenerateCommandLineArguments(self): + """Generates the command line to run the test server. + + Note that all options are processed by following the definitions in + testserver.py. + """ + if self.command_line: + return + + args_copy = dict(self.arguments) + + # Translate the server type. + type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type')) + if type_cmd: + self.command_line.append(type_cmd) + + # Use a pipe to get the port given by the instance of Python test server + # if the test does not specify the port. + assert self.host_port == args_copy['port'] + if self.host_port == 0: + (self.pipe_in, self.pipe_out) = os.pipe() + self.command_line.append('--startup-pipe=%d' % self.pipe_out) + + # Pass the remaining arguments as-is. + for key, values in args_copy.iteritems(): + if not isinstance(values, list): + values = [values] + for value in values: + if value is None: + self.command_line.append('--%s' % key) + else: + self.command_line.append('--%s=%s' % (key, value)) + + def _CloseUnnecessaryFDsForTestServerProcess(self): + # This is required to avoid subtle deadlocks that could be caused by the + # test server child process inheriting undesirable file descriptors such as + # file lock file descriptors. + for fd in xrange(0, 1024): + if fd != self.pipe_out: + try: + os.close(fd) + except: + pass + + def run(self): + logging.info('Start running the thread!') + self.wait_event.clear() + self._GenerateCommandLineArguments() + command = host_paths.DIR_SOURCE_ROOT + if self.arguments['server-type'] == 'sync': + command = [os.path.join(command, 'components', 'sync', 'tools', + 'testserver', + 'sync_testserver.py')] + self.command_line + else: + command = [os.path.join(command, 'net', 'tools', 'testserver', + 'testserver.py')] + self.command_line + logging.info('Running: %s', command) + + # Disable PYTHONUNBUFFERED because it has a bad interaction with the + # testserver. Remove once this interaction is fixed. + unbuf = os.environ.pop('PYTHONUNBUFFERED', None) + + # Pass DIR_SOURCE_ROOT as the child's working directory so that relative + # paths in the arguments are resolved correctly. + self.process = subprocess.Popen( + command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess, + cwd=host_paths.DIR_SOURCE_ROOT) + if unbuf: + os.environ['PYTHONUNBUFFERED'] = unbuf + if self.process: + if self.pipe_out: + self.is_ready = self._WaitToStartAndGetPortFromTestServer() + else: + self.is_ready = _CheckPortNotAvailable(self.host_port) + if self.is_ready: + forwarder.Forwarder.Map([(0, self.host_port)], self.device, self.tool) + # Check whether the forwarder is ready on the device. + self.is_ready = False + device_port = forwarder.Forwarder.DevicePortForHostPort(self.host_port) + if device_port and _CheckDevicePortStatus(self.device, device_port): + self.is_ready = True + self.forwarder_device_port = device_port + # Wake up the request handler thread. + self.ready_event.set() + # Keep thread running until Stop() gets called. + _WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint) + if self.process.poll() is None: + self.process.kill() + forwarder.Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device) + self.process = None + self.is_ready = False + if self.pipe_out: + os.close(self.pipe_in) + os.close(self.pipe_out) + self.pipe_in = None + self.pipe_out = None + logging.info('Test-server has died.') + self.wait_event.set() + + def Stop(self): + """Blocks until the loop has finished. + + Note that this must be called in another thread. + """ + if not self.process: + return + self.stop_flag = True + self.wait_event.wait() + + +class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + """A handler used to process http GET/POST request.""" + + def _SendResponse(self, response_code, response_reason, additional_headers, + contents): + """Generates a response sent to the client from the provided parameters. + + Args: + response_code: number of the response status. + response_reason: string of reason description of the response. + additional_headers: dict of additional headers. Each key is the name of + the header, each value is the content of the header. + contents: string of the contents we want to send to client. + """ + self.send_response(response_code, response_reason) + self.send_header('Content-Type', 'text/html') + # Specify the content-length as without it the http(s) response will not + # be completed properly (and the browser keeps expecting data). + self.send_header('Content-Length', len(contents)) + for header_name in additional_headers: + self.send_header(header_name, additional_headers[header_name]) + self.end_headers() + self.wfile.write(contents) + self.wfile.flush() + + def _StartTestServer(self): + """Starts the test server thread.""" + logging.info('Handling request to spawn a test server.') + content_type = self.headers.getheader('content-type') + if content_type != 'application/json': + raise Exception('Bad content-type for start request.') + content_length = self.headers.getheader('content-length') + if not content_length: + content_length = 0 + try: + content_length = int(content_length) + except: + raise Exception('Bad content-length for start request.') + logging.info(content_length) + test_server_argument_json = self.rfile.read(content_length) + logging.info(test_server_argument_json) + assert not self.server.test_server_instance + ready_event = threading.Event() + self.server.test_server_instance = TestServerThread( + ready_event, + json.loads(test_server_argument_json), + self.server.device, + self.server.tool) + self.server.test_server_instance.setDaemon(True) + self.server.test_server_instance.start() + ready_event.wait() + if self.server.test_server_instance.is_ready: + self._SendResponse(200, 'OK', {}, json.dumps( + {'port': self.server.test_server_instance.forwarder_device_port, + 'message': 'started'})) + logging.info('Test server is running on port: %d.', + self.server.test_server_instance.host_port) + else: + self.server.test_server_instance.Stop() + self.server.test_server_instance = None + self._SendResponse(500, 'Test Server Error.', {}, '') + logging.info('Encounter problem during starting a test server.') + + def _KillTestServer(self): + """Stops the test server instance.""" + # There should only ever be one test server at a time. This may do the + # wrong thing if we try and start multiple test servers. + if not self.server.test_server_instance: + return + port = self.server.test_server_instance.host_port + logging.info('Handling request to kill a test server on port: %d.', port) + self.server.test_server_instance.Stop() + # Make sure the status of test server is correct before sending response. + if _CheckPortAvailable(port): + self._SendResponse(200, 'OK', {}, 'killed') + logging.info('Test server on port %d is killed', port) + else: + self._SendResponse(500, 'Test Server Error.', {}, '') + logging.info('Encounter problem during killing a test server.') + self.server.test_server_instance = None + + def do_POST(self): + parsed_path = urlparse.urlparse(self.path) + action = parsed_path.path + logging.info('Action for POST method is: %s.', action) + if action == '/start': + self._StartTestServer() + else: + self._SendResponse(400, 'Unknown request.', {}, '') + logging.info('Encounter unknown request: %s.', action) + + def do_GET(self): + parsed_path = urlparse.urlparse(self.path) + action = parsed_path.path + params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1) + logging.info('Action for GET method is: %s.', action) + for param in params: + logging.info('%s=%s', param, params[param][0]) + if action == '/kill': + self._KillTestServer() + elif action == '/ping': + # The ping handler is used to check whether the spawner server is ready + # to serve the requests. We don't need to test the status of the test + # server when handling ping request. + self._SendResponse(200, 'OK', {}, 'ready') + logging.info('Handled ping request and sent response.') + else: + self._SendResponse(400, 'Unknown request', {}, '') + logging.info('Encounter unknown request: %s.', action) + + +class SpawningServer(object): + """The class used to start/stop a http server.""" + + def __init__(self, test_server_spawner_port, device, tool): + logging.info('Creating new spawner on port: %d.', test_server_spawner_port) + self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port), + SpawningServerRequestHandler) + self.server.device = device + self.server.tool = tool + self.server.test_server_instance = None + self.server.build_type = constants.GetBuildType() + + def _Listen(self): + logging.info('Starting test server spawner') + self.server.serve_forever() + + def Start(self): + """Starts the test server spawner.""" + listener_thread = threading.Thread(target=self._Listen) + listener_thread.setDaemon(True) + listener_thread.start() + + def Stop(self): + """Stops the test server spawner. + + Also cleans the server state. + """ + self.CleanupState() + self.server.shutdown() + + def CleanupState(self): + """Cleans up the spawning server state. + + This should be called if the test server spawner is reused, + to avoid sharing the test server instance. + """ + if self.server.test_server_instance: + self.server.test_server_instance.Stop() + self.server.test_server_instance = None diff --git a/build/android/pylib/constants/__init__.py b/build/android/pylib/constants/__init__.py new file mode 100644 index 00000000000..9b25dcd07ca --- /dev/null +++ b/build/android/pylib/constants/__init__.py @@ -0,0 +1,225 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Defines a set of constants shared by test runners and other scripts.""" + +# TODO(jbudorick): Split these constants into coherent modules. + +# pylint: disable=W0212 + +import collections +import glob +import logging +import os +import subprocess + +import devil.android.sdk.keyevent +from devil.android.constants import chrome +from devil.android.sdk import version_codes +from devil.constants import exit_codes + + +keyevent = devil.android.sdk.keyevent + + +DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT', + os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir))) + +PACKAGE_INFO = dict(chrome.PACKAGE_INFO) +PACKAGE_INFO.update({ + 'legacy_browser': chrome.PackageInfo( + 'com.google.android.browser', + 'com.android.browser.BrowserActivity', + None, + None), + 'chromecast_shell': chrome.PackageInfo( + 'com.google.android.apps.mediashell', + 'com.google.android.apps.mediashell.MediaShellActivity', + '/data/local/tmp/castshell-command-line', + None), + 'android_webview_shell': chrome.PackageInfo( + 'org.chromium.android_webview.shell', + 'org.chromium.android_webview.shell.AwShellActivity', + '/data/local/tmp/android-webview-command-line', + None), + 'gtest': chrome.PackageInfo( + 'org.chromium.native_test', + 'org.chromium.native_test.NativeUnitTestActivity', + '/data/local/tmp/chrome-native-tests-command-line', + None), + 'components_browsertests': chrome.PackageInfo( + 'org.chromium.components_browsertests_apk', + ('org.chromium.components_browsertests_apk' + + '.ComponentsBrowserTestsActivity'), + '/data/local/tmp/chrome-native-tests-command-line', + None), + 'content_browsertests': chrome.PackageInfo( + 'org.chromium.content_browsertests_apk', + 'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity', + '/data/local/tmp/chrome-native-tests-command-line', + None), + 'chromedriver_webview_shell': chrome.PackageInfo( + 'org.chromium.chromedriver_webview_shell', + 'org.chromium.chromedriver_webview_shell.Main', + None, + None), +}) + + +# Ports arrangement for various test servers used in Chrome for Android. +# Lighttpd server will attempt to use 9000 as default port, if unavailable it +# will find a free port from 8001 - 8999. +LIGHTTPD_DEFAULT_PORT = 9000 +LIGHTTPD_RANDOM_PORT_FIRST = 8001 +LIGHTTPD_RANDOM_PORT_LAST = 8999 +TEST_SYNC_SERVER_PORT = 9031 +TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041 +TEST_POLICY_SERVER_PORT = 9051 + + +TEST_EXECUTABLE_DIR = '/data/local/tmp' +# Directories for common java libraries for SDK build. +# These constants are defined in build/android/ant/common.xml +SDK_BUILD_JAVALIB_DIR = 'lib.java' +SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java' +SDK_BUILD_APKS_DIR = 'apks' + +ADB_KEYS_FILE = '/data/misc/adb/adb_keys' + +PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results') +# The directory on the device where perf test output gets saved to. +DEVICE_PERF_OUTPUT_DIR = ( + '/data/data/' + PACKAGE_INFO['chrome'].package + '/files') + +SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots') + +ANDROID_SDK_VERSION = version_codes.MARSHMALLOW +ANDROID_SDK_BUILD_TOOLS_VERSION = '23.0.1' +ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, + 'third_party', 'android_tools', 'sdk') +ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT, + 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION) +ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT, + 'third_party', 'android_tools', 'ndk') + +PROGUARD_SCRIPT_PATH = os.path.join( + ANDROID_SDK_ROOT, 'tools', 'proguard', 'bin', 'proguard.sh') + +PROGUARD_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard') + +BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT, + os.environ.get('CHROMIUM_OUT_DIR', 'out'), + 'bad_devices.json') + +UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com' + +# TODO(jbudorick): Remove once unused. +DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop' + +# TODO(jbudorick): Rework this into testing/buildbot/ +PYTHON_UNIT_TEST_SUITES = { + 'pylib_py_unittests': { + 'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android'), + 'test_modules': [ + 'devil.android.device_utils_test', + 'devil.android.md5sum_test', + 'devil.utils.cmd_helper_test', + 'pylib.results.json_results_test', + 'pylib.utils.proguard_test', + ] + }, + 'gyp_py_unittests': { + 'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'), + 'test_modules': [ + 'java_cpp_enum_tests', + 'java_google_api_keys_tests', + ] + }, +} + +LOCAL_MACHINE_TESTS = ['junit', 'python'] +VALID_ENVIRONMENTS = ['local', 'remote_device'] +VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey', + 'perf', 'python', 'uirobot'] +VALID_DEVICE_TYPES = ['Android', 'iOS'] + + +def GetBuildType(): + try: + return os.environ['BUILDTYPE'] + except KeyError: + raise EnvironmentError( + 'The BUILDTYPE environment variable has not been set') + + +def SetBuildType(build_type): + os.environ['BUILDTYPE'] = build_type + + +def SetBuildDirectory(build_directory): + os.environ['CHROMIUM_OUT_DIR'] = build_directory + + +def SetOutputDirectory(output_directory): + os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory + + +def GetOutDirectory(build_type=None): + """Returns the out directory where the output binaries are built. + + Args: + build_type: Build type, generally 'Debug' or 'Release'. Defaults to the + globally set build type environment variable BUILDTYPE. + """ + if 'CHROMIUM_OUTPUT_DIR' in os.environ: + return os.path.abspath(os.path.join( + DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR'))) + + return os.path.abspath(os.path.join( + DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'), + GetBuildType() if build_type is None else build_type)) + + +def CheckOutputDirectory(): + """Checks that CHROMIUM_OUT_DIR or CHROMIUM_OUTPUT_DIR is set. + + If neither are set, but the current working directory is a build directory, + then CHROMIUM_OUTPUT_DIR is set to the current working directory. + + Raises: + Exception: If no output directory is detected. + """ + output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR') + out_dir = os.environ.get('CHROMIUM_OUT_DIR') + if not output_dir and not out_dir: + # If CWD is an output directory, then assume it's the desired one. + if os.path.exists('build.ninja'): + output_dir = os.getcwd() + SetOutputDirectory(output_dir) + elif os.environ.get('CHROME_HEADLESS'): + # When running on bots, see if the output directory is obvious. + dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja')) + if len(dirs) == 1: + SetOutputDirectory(dirs[0]) + else: + raise Exception('Neither CHROMIUM_OUTPUT_DIR nor CHROMIUM_OUT_DIR ' + 'has been set. CHROME_HEADLESS detected, but multiple ' + 'out dirs exist: %r' % dirs) + else: + raise Exception('Neither CHROMIUM_OUTPUT_DIR nor CHROMIUM_OUT_DIR ' + 'has been set') + + +# TODO(jbudorick): Convert existing callers to AdbWrapper.GetAdbPath() and +# remove this. +def GetAdbPath(): + from devil.android.sdk import adb_wrapper + return adb_wrapper.AdbWrapper.GetAdbPath() + + +# Exit codes +ERROR_EXIT_CODE = exit_codes.ERROR +INFRA_EXIT_CODE = exit_codes.INFRA +WARNING_EXIT_CODE = exit_codes.WARNING diff --git a/build/android/pylib/constants/host_paths.py b/build/android/pylib/constants/host_paths.py new file mode 100644 index 00000000000..98aa53dd0b9 --- /dev/null +++ b/build/android/pylib/constants/host_paths.py @@ -0,0 +1,38 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import contextlib +import os +import sys + +DIR_SOURCE_ROOT = os.environ.get( + 'CHECKOUT_SOURCE_ROOT', + os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir))) + +BUILD_COMMON_PATH = os.path.join( + DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common') + +# third-party libraries +ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development', + 'scripts') +DEVIL_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil') +PYMOCK_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'pymock') + +@contextlib.contextmanager +def SysPath(path, position=None): + if position is None: + sys.path.append(path) + else: + sys.path.insert(position, path) + try: + yield + finally: + if sys.path[-1] == path: + sys.path.pop() + else: + sys.path.remove(path) diff --git a/build/android/pylib/content_settings.py b/build/android/pylib/content_settings.py new file mode 100644 index 00000000000..3bf11bc490c --- /dev/null +++ b/build/android/pylib/content_settings.py @@ -0,0 +1,80 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class ContentSettings(dict): + + """A dict interface to interact with device content settings. + + System properties are key/value pairs as exposed by adb shell content. + """ + + def __init__(self, table, device): + super(ContentSettings, self).__init__() + self._table = table + self._device = device + + @staticmethod + def _GetTypeBinding(value): + if isinstance(value, bool): + return 'b' + if isinstance(value, float): + return 'f' + if isinstance(value, int): + return 'i' + if isinstance(value, long): + return 'l' + if isinstance(value, str): + return 's' + raise ValueError('Unsupported type %s' % type(value)) + + def iteritems(self): + # Example row: + # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05' + for row in self._device.RunShellCommand( + 'content query --uri content://%s' % self._table, as_root=True): + fields = row.split(', ') + key = None + value = None + for field in fields: + k, _, v = field.partition('=') + if k == 'name': + key = v + elif k == 'value': + value = v + if not key: + continue + if not value: + value = '' + yield key, value + + def __getitem__(self, key): + return self._device.RunShellCommand( + 'content query --uri content://%s --where "name=\'%s\'" ' + '--projection value' % (self._table, key), as_root=True).strip() + + def __setitem__(self, key, value): + if key in self: + self._device.RunShellCommand( + 'content update --uri content://%s ' + '--bind value:%s:%s --where "name=\'%s\'"' % ( + self._table, + self._GetTypeBinding(value), value, key), + as_root=True) + else: + self._device.RunShellCommand( + 'content insert --uri content://%s ' + '--bind name:%s:%s --bind value:%s:%s' % ( + self._table, + self._GetTypeBinding(key), key, + self._GetTypeBinding(value), value), + as_root=True) + + def __delitem__(self, key): + self._device.RunShellCommand( + 'content delete --uri content://%s ' + '--bind name:%s:%s' % ( + self._table, + self._GetTypeBinding(key), key), + as_root=True) diff --git a/build/android/pylib/device/__init__.py b/build/android/pylib/device/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/android/pylib/device/commands/BUILD.gn b/build/android/pylib/device/commands/BUILD.gn new file mode 100644 index 00000000000..c6d4b42f31f --- /dev/null +++ b/build/android/pylib/device/commands/BUILD.gn @@ -0,0 +1,17 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +group("commands") { + data_deps = [ + ":chromium_commands", + ] +} + +# GYP: //build/android/pylib/device/commands/commands.gyp:chromium_commands +android_library("chromium_commands") { + java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ] + dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar" +} diff --git a/build/android/pylib/device/commands/commands.gyp b/build/android/pylib/device/commands/commands.gyp new file mode 100644 index 00000000000..b5b5bc86043 --- /dev/null +++ b/build/android/pylib/device/commands/commands.gyp @@ -0,0 +1,20 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + # GN version: //build/android/pylib/devices/commands:chromium_commands + 'target_name': 'chromium_commands', + 'type': 'none', + 'variables': { + 'add_to_dependents_classpaths': 0, + 'java_in_dir': ['java'], + }, + 'includes': [ + '../../../../../build/java.gypi', + ], + } + ], +} diff --git a/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java new file mode 100644 index 00000000000..7cbbb732ced --- /dev/null +++ b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java @@ -0,0 +1,95 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.android.commands.unzip; + +import android.util.Log; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +/** + * Minimal implementation of the command-line unzip utility for Android. + */ +public class Unzip { + + private static final String TAG = "Unzip"; + + public static void main(String[] args) { + try { + (new Unzip()).run(args); + } catch (RuntimeException e) { + Log.e(TAG, e.toString()); + System.exit(1); + } + } + + private void showUsage(PrintStream s) { + s.println("Usage:"); + s.println("unzip [zipfile]"); + } + + @SuppressWarnings("Finally") + private void unzip(String[] args) { + ZipInputStream zis = null; + try { + String zipfile = args[0]; + zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile))); + ZipEntry ze = null; + + byte[] bytes = new byte[1024]; + while ((ze = zis.getNextEntry()) != null) { + File outputFile = new File(ze.getName()); + if (ze.isDirectory()) { + if (!outputFile.exists() && !outputFile.mkdirs()) { + throw new RuntimeException( + "Failed to create directory: " + outputFile.toString()); + } + } else { + File parentDir = outputFile.getParentFile(); + if (!parentDir.exists() && !parentDir.mkdirs()) { + throw new RuntimeException( + "Failed to create directory: " + parentDir.toString()); + } + OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile)); + int actual_bytes = 0; + int total_bytes = 0; + while ((actual_bytes = zis.read(bytes)) != -1) { + out.write(bytes, 0, actual_bytes); + total_bytes += actual_bytes; + } + out.close(); + } + zis.closeEntry(); + } + + } catch (IOException e) { + throw new RuntimeException("Error while unzipping: " + e.toString()); + } finally { + try { + if (zis != null) zis.close(); + } catch (IOException e) { + throw new RuntimeException("Error while closing zip: " + e.toString()); + } + } + } + + public void run(String[] args) { + if (args.length != 1) { + showUsage(System.err); + throw new RuntimeException("Incorrect usage."); + } + + unzip(args); + } +} + diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py new file mode 100644 index 00000000000..ab4ad1b900b --- /dev/null +++ b/build/android/pylib/device_settings.py @@ -0,0 +1,199 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging + +from pylib import content_settings + +_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db' +_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = ( + '/data/data/com.android.providers.settings/databases/settings.db') +PASSWORD_QUALITY_UNSPECIFIED = '0' +_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng'] + + +def ConfigureContentSettings(device, desired_settings): + """Configures device content setings from a list. + + Many settings are documented at: + http://developer.android.com/reference/android/provider/Settings.Global.html + http://developer.android.com/reference/android/provider/Settings.Secure.html + http://developer.android.com/reference/android/provider/Settings.System.html + + Many others are undocumented. + + Args: + device: A DeviceUtils instance for the device to configure. + desired_settings: A list of (table, [(key: value), ...]) for all + settings to configure. + """ + for table, key_value in desired_settings: + settings = content_settings.ContentSettings(table, device) + for key, value in key_value: + settings[key] = value + logging.info('\n%s %s', table, (80 - len(table)) * '-') + for key, value in sorted(settings.iteritems()): + logging.info('\t%s: %s', key, value) + + +def SetLockScreenSettings(device): + """Sets lock screen settings on the device. + + On certain device/Android configurations we need to disable the lock screen in + a different database. Additionally, the password type must be set to + DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED. + Lock screen settings are stored in sqlite on the device in: + /data/system/locksettings.db + + IMPORTANT: The first column is used as a primary key so that all rows with the + same value for that column are removed from the table prior to inserting the + new values. + + Args: + device: A DeviceUtils instance for the device to configure. + + Raises: + Exception if the setting was not properly set. + """ + if device.build_type not in _COMPATIBLE_BUILD_TYPES: + logging.warning('Unable to disable lockscreen on %s builds.', + device.build_type) + return + + def get_lock_settings(table): + return [(table, 'lockscreen.disabled', '1'), + (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED), + (table, 'lockscreen.password_type_alternate', + PASSWORD_QUALITY_UNSPECIFIED)] + + if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH): + db = _LOCK_SCREEN_SETTINGS_PATH + locksettings = get_lock_settings('locksettings') + columns = ['name', 'user', 'value'] + generate_values = lambda k, v: [k, '0', v] + elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH): + db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH + locksettings = get_lock_settings('secure') + get_lock_settings('system') + columns = ['name', 'value'] + generate_values = lambda k, v: [k, v] + else: + logging.warning('Unable to find database file to set lock screen settings.') + return + + for table, key, value in locksettings: + # Set the lockscreen setting for default user '0' + values = generate_values(key, value) + + cmd = """begin transaction; +delete from '%(table)s' where %(primary_key)s='%(primary_value)s'; +insert into '%(table)s' (%(columns)s) values (%(values)s); +commit transaction;""" % { + 'table': table, + 'primary_key': columns[0], + 'primary_value': values[0], + 'columns': ', '.join(columns), + 'values': ', '.join(["'%s'" % value for value in values]) + } + output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd), + as_root=True) + if output_msg: + logging.info(' '.join(output_msg)) + + +ENABLE_LOCATION_SETTINGS = [ + # Note that setting these in this order is required in order for all of + # them to take and stick through a reboot. + ('com.google.settings/partner', [ + ('use_location_for_services', 1), + ]), + ('settings/secure', [ + # Ensure Geolocation is enabled and allowed for tests. + ('location_providers_allowed', 'gps,network'), + ]), + ('com.google.settings/partner', [ + ('network_location_opt_in', 1), + ]) +] + +DISABLE_LOCATION_SETTINGS = [ + ('com.google.settings/partner', [ + ('use_location_for_services', 0), + ]), + ('settings/secure', [ + # Ensure Geolocation is disabled. + ('location_providers_allowed', ''), + ]), +] + +ENABLE_MOCK_LOCATION_SETTINGS = [ + ('settings/secure', [ + ('mock_location', 1), + ]), +] + +DISABLE_MOCK_LOCATION_SETTINGS = [ + ('settings/secure', [ + ('mock_location', 0), + ]), +] + +DETERMINISTIC_DEVICE_SETTINGS = [ + ('settings/global', [ + ('assisted_gps_enabled', 0), + + # Disable "auto time" and "auto time zone" to avoid network-provided time + # to overwrite the device's datetime and timezone synchronized from host + # when running tests later. See b/6569849. + ('auto_time', 0), + ('auto_time_zone', 0), + + ('development_settings_enabled', 1), + + # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents + # on application crashes and ANRs. If this is disabled, the crash/ANR dialog + # will never display the "Report" button. + # Type: int ( 0 = disallow, 1 = allow ) + ('send_action_app_error', 0), + + ('stay_on_while_plugged_in', 3), + + ('verifier_verify_adb_installs', 0), + ]), + ('settings/secure', [ + ('allowed_geolocation_origins', + 'http://www.google.co.uk http://www.google.com'), + + # Ensure that we never get random dialogs like "Unfortunately the process + # android.process.acore has stopped", which steal the focus, and make our + # automation fail (because the dialog steals the focus then mistakenly + # receives the injected user input events). + ('anr_show_background', 0), + + ('lockscreen.disabled', 1), + + ('screensaver_enabled', 0), + + ('skip_first_use_hints', 1), + ]), + ('settings/system', [ + # Don't want devices to accidentally rotate the screen as that could + # affect performance measurements. + ('accelerometer_rotation', 0), + + ('lockscreen.disabled', 1), + + # Turn down brightness and disable auto-adjust so that devices run cooler. + ('screen_brightness', 5), + ('screen_brightness_mode', 0), + + ('user_rotation', 0), + ]), +] + +NETWORK_DISABLED_SETTINGS = [ + ('settings/global', [ + ('airplane_mode_on', 1), + ('wifi_on', 0), + ]), +] diff --git a/build/android/pylib/gtest/__init__.py b/build/android/pylib/gtest/__init__.py new file mode 100644 index 00000000000..727e987e6b6 --- /dev/null +++ b/build/android/pylib/gtest/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/gtest/filter/OWNERS b/build/android/pylib/gtest/filter/OWNERS new file mode 100644 index 00000000000..72e8ffc0db8 --- /dev/null +++ b/build/android/pylib/gtest/filter/OWNERS @@ -0,0 +1 @@ +* diff --git a/build/android/pylib/gtest/filter/base_unittests_disabled b/build/android/pylib/gtest/filter/base_unittests_disabled new file mode 100644 index 00000000000..533d3e167b7 --- /dev/null +++ b/build/android/pylib/gtest/filter/base_unittests_disabled @@ -0,0 +1,25 @@ +# List of suppressions + +# Android will not support StackTrace. +StackTrace.* +# +# Sometimes this is automatically generated by run_tests.py +VerifyPathControlledByUserTest.Symlinks + +# http://crbug.com/138845 +MessagePumpLibeventTest.TestWatchingFromBadThread + +StringPrintfTest.StringPrintfMisc +StringPrintfTest.StringAppendfString +StringPrintfTest.StringAppendfInt +StringPrintfTest.StringPrintfBounds +# TODO(jrg): Fails on bots. Works locally. Figure out why. 2/6/12 +FieldTrialTest.* +# Flaky? +ScopedJavaRefTest.RefCounts +FileTest.MemoryCorruption +MessagePumpLibeventTest.QuitOutsideOfRun +ScopedFD.ScopedFDCrashesOnCloseFailure + +# http://crbug.com/245043 +StackContainer.BufferAlignment diff --git a/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled new file mode 100644 index 00000000000..85e8fd63634 --- /dev/null +++ b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled @@ -0,0 +1,10 @@ +# Addtional list of suppressions from emulator +# +# Automatically generated by run_tests.py +PathServiceTest.Get +SharedMemoryTest.OpenClose +StringPrintfTest.StringAppendfInt +StringPrintfTest.StringAppendfString +StringPrintfTest.StringPrintfBounds +StringPrintfTest.StringPrintfMisc +VerifyPathControlledByUserTest.Symlinks diff --git a/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/build/android/pylib/gtest/filter/breakpad_unittests_disabled new file mode 100644 index 00000000000..cefc64fd5ee --- /dev/null +++ b/build/android/pylib/gtest/filter/breakpad_unittests_disabled @@ -0,0 +1,9 @@ +FileIDStripTest.StripSelf +# crbug.com/303960 +ExceptionHandlerTest.InstructionPointerMemoryNullPointer +# crbug.com/171419 +MinidumpWriterTest.MappingInfoContained +# crbug.com/310088 +MinidumpWriterTest.MinidumpSizeLimit +# crbug.com/375838 +ElfCoreDumpTest.ValidCoreFile diff --git a/build/android/pylib/gtest/filter/content_browsertests_disabled b/build/android/pylib/gtest/filter/content_browsertests_disabled new file mode 100644 index 00000000000..d4fe3b6b5c6 --- /dev/null +++ b/build/android/pylib/gtest/filter/content_browsertests_disabled @@ -0,0 +1,56 @@ +# List of suppressions +# Timeouts +Http/MediaTest.* +File/MediaTest.* +MediaTest.* +DatabaseTest.* + +# Crashes +RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed + +# Plugins are not supported. +BrowserPluginThreadedCompositorPixelTest.* +BrowserPluginHostTest.* +BrowserPluginTest.* +PluginTest.* + +# http://crbug.com/463740 +CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility + +# http://crbug.com/297230 +DumpAccessibilityTreeTest.AccessibilityAriaLevel +DumpAccessibilityTreeTest.AccessibilityAriaProgressbar +DumpAccessibilityTreeTest.AccessibilityListMarkers +DumpAccessibilityTreeTest.AccessibilityUl +DumpAccessibilityTreeTest.AccessibilityCanvas +DumpAccessibilityTreeTest.AccessibilityDialog +DumpAccessibilityTreeTest.AccessibilityModalDialogClosed +DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened +RenderAccessibilityImplTest.DetachAccessibilityObject + +# http://crbug.com/187500 +RenderViewImplTest.ImeComposition +RenderViewImplTest.InsertCharacters +RenderViewImplTest.OnHandleKeyboardEvent +RenderViewImplTest.OnNavStateChanged +# ZoomLevel is not used on Android +RenderViewImplTest.ZoomLimit +RendererAccessibilityTest.SendFullAccessibilityTreeOnReload +RendererAccessibilityTest.HideAccessibilityObject +RendererAccessibilityTest.ShowAccessibilityObject +RendererAccessibilityTest.TextSelectionShouldSendRoot + +# http://crbug.com/386227 +IndexedDBBrowserTest.VersionChangeCrashResilience + +# http://crbug.com/233118 +IndexedDBBrowserTest.NullKeyPathPersistence + +# http://crbug.com/342525 +IndexedDBBrowserTestSingleProcess.RenderThreadShutdownTest + +# http://crbug.com/338421 +GinBrowserTest.GinAndGarbageCollection + +# http://crbug.com/343604 +MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0 diff --git a/build/android/pylib/gtest/filter/unit_tests_disabled b/build/android/pylib/gtest/filter/unit_tests_disabled new file mode 100644 index 00000000000..d6547b7872d --- /dev/null +++ b/build/android/pylib/gtest/filter/unit_tests_disabled @@ -0,0 +1,115 @@ +# List of suppressions + +# The UDP related tests currently do not work on Android because +# we lack a UDP forwarder tool. +NetworkStatsTestUDP.* + +# Missing test resource of 16MB. +HistoryProfileTest.TypicalProfileVersion + +# crbug.com/139408 +SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies +SQLitePersistentCookieStoreTest.PersistIsPersistent + +# crbug.com/139433 +AutofillTableTest.AutofillProfile* +AutofillTableTest.UpdateAutofillProfile + +# crbug.com/139400 +AutofillProfileTest.* +CreditCardTest.SetInfoExpirationMonth + +# crbug.com/139398 +DownloadItemModelTest.InterruptTooltip + +# Tests crashing in the APK +# l10n_util.cc(655)] Check failed: std::string::npos != pos +DownloadItemModelTest.InterruptStatus +# l10n_util.cc(655)] Check failed: std::string::npos != pos +WebsiteSettingsTest.OnSiteDataAccessed + +# crbug.com/139423 +ValueStoreFrontendTest.GetExistingData + +# crbug.com/139421 +ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall + +# http://crbug.com/139033 +ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways + +# Extension support is limited on Android. +# Some of these can be enabled if we register extension related prefs in +# browser_prefs.cc +ExtensionTest.* +ExtensionAPI.* +ExtensionFileUtilTest.* +ExtensionPermissionsTest.* +ExtensionUnpackerTest.* +ActiveTabTest.* +ExtensionAppsPromo.* +ComponentLoaderTest.* +ExtensionFromUserScript.* +ExtensionFromWebApp.* +ExtensionIconManagerTest.* +ExtensionServiceTest.* +ExtensionServiceTestSimple.* +ExtensionSourcePriorityTest.* +ExtensionSpecialStoragePolicyTest.* +ExternalPolicyProviderTest.* +ExternalProviderImplTest.* +MenuManagerTest.* +PageActionControllerTest.* +PermissionsUpdaterTest.* +ImageLoaderTest.* +ImageLoadingTrackerTest.* +ExtensionSettingsFrontendTest.* +ExtensionSettingsSyncTest.* +ExtensionUpdaterTest.* +UserScriptListenerTest.* +WebApplicationTest.GetShortcutInfoForTab +ExtensionActionIconFactoryTest.* + +# crbug.com/139411 +AutocompleteProviderTest.* +HistoryContentsProviderBodyOnlyTest.* +HistoryContentsProviderTest.* +HQPOrderingTest.* +SearchProviderTest.* + +ProtocolHandlerRegistryTest.TestOSRegistrationFailure + +# crbug.com/139418 +SQLiteServerBoundCertStoreTest.TestUpgradeV1 +SQLiteServerBoundCertStoreTest.TestUpgradeV2 + +ProfileSyncComponentsFactoryImplTest.* +PermissionsTest.GetWarningMessages_Plugins +ImageOperations.ResizeShouldAverageColors + +# crbug.com/139643 +VariationsUtilTest.DisableAfterInitialization +VariationsUtilTest.AssociateGoogleVariationID +VariationsUtilTest.NoAssociation + +# crbug.com/141473 +AutofillManagerTest.UpdatePasswordSyncState +AutofillManagerTest.UpdatePasswordGenerationState + +# crbug.com/144227 +ExtensionIconImageTest.* + +# crbug.com/145843 +EntropyProviderTest.UseOneTimeRandomizationSHA1 +EntropyProviderTest.UseOneTimeRandomizationPermuted + +# crbug.com/147500 +ManifestTest.RestrictedKeys + +# crbug.com/152599 +SyncSearchEngineDataTypeControllerTest.* + +# crbug.com/256259 +DiagnosticsModelTest.RunAll + +# Death tests are not supported with apks. +*DeathTest* diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py new file mode 100644 index 00000000000..9bc2c80f862 --- /dev/null +++ b/build/android/pylib/gtest/gtest_config.py @@ -0,0 +1,56 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Configuration file for android gtest suites.""" + +# Add new suites here before upgrading them to the stable list below. +EXPERIMENTAL_TEST_SUITES = [ + 'components_browsertests', + 'heap_profiler_unittests', + 'devtools_bridge_tests', +] + +TELEMETRY_EXPERIMENTAL_TEST_SUITES = [ + 'telemetry_unittests', +] + +# Do not modify this list without approval of an android owner. +# This list determines which suites are run by default, both for local +# testing and on android trybots running on commit-queue. +STABLE_TEST_SUITES = [ + 'android_webview_unittests', + 'base_unittests', + 'breakpad_unittests', + 'cc_unittests', + 'components_unittests', + 'content_browsertests', + 'content_unittests', + 'events_unittests', + 'gl_tests', + 'gl_unittests', + 'gpu_unittests', + 'ipc_tests', + 'media_unittests', + 'midi_unittests', + 'net_unittests', + 'sandbox_linux_unittests', + 'skia_unittests', + 'sql_unittests', + 'ui_android_unittests', + 'ui_base_unittests', + 'ui_touch_selection_unittests', + 'unit_tests_apk', + 'webkit_unit_tests', +] + +# Tests fail in component=shared_library build, which is required for ASan. +# http://crbug.com/344868 +ASAN_EXCLUDED_TEST_SUITES = [ + 'breakpad_unittests', + 'sandbox_linux_unittests', + + # The internal ASAN recipe cannot run step "unit_tests_apk", this is the + # only internal recipe affected. See http://crbug.com/607850 + 'unit_tests_apk', +] diff --git a/build/android/pylib/gtest/gtest_test_instance.py b/build/android/pylib/gtest/gtest_test_instance.py new file mode 100644 index 00000000000..56755baecd2 --- /dev/null +++ b/build/android/pylib/gtest/gtest_test_instance.py @@ -0,0 +1,418 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import re +import tempfile + +from devil.android import apk_helper +from pylib import constants +from pylib.constants import host_paths +from pylib.base import base_test_result +from pylib.base import test_instance +from pylib.utils import isolator + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import unittest_util # pylint: disable=import-error + + +BROWSER_TEST_SUITES = [ + 'components_browsertests', + 'content_browsertests', +] + +RUN_IN_SUB_THREAD_TEST_SUITES = ['net_unittests'] + + +_DEFAULT_ISOLATE_FILE_PATHS = { + 'base_unittests': 'base/base_unittests.isolate', + 'blink_heap_unittests': + 'third_party/WebKit/Source/platform/heap/BlinkHeapUnitTests.isolate', + 'blink_platform_unittests': + 'third_party/WebKit/Source/platform/blink_platform_unittests.isolate', + 'cc_perftests': 'cc/cc_perftests.isolate', + 'components_browsertests': 'components/components_browsertests.isolate', + 'components_unittests': 'components/components_unittests.isolate', + 'content_browsertests': 'content/content_browsertests.isolate', + 'content_unittests': 'content/content_unittests.isolate', + 'media_perftests': 'media/media_perftests.isolate', + 'media_unittests': 'media/media_unittests.isolate', + 'midi_unittests': 'media/midi/midi_unittests.isolate', + 'net_unittests': 'net/net_unittests.isolate', + 'sql_unittests': 'sql/sql_unittests.isolate', + 'ui_base_unittests': 'ui/base/ui_base_tests.isolate', + 'unit_tests': 'chrome/unit_tests.isolate', + 'webkit_unit_tests': + 'third_party/WebKit/Source/web/WebKitUnitTests.isolate', +} + + +# Used for filtering large data deps at a finer grain than what's allowed in +# isolate files since pushing deps to devices is expensive. +# Wildcards are allowed. +_DEPS_EXCLUSION_LIST = [ + 'chrome/test/data/extensions/api_test', + 'chrome/test/data/extensions/secure_shell', + 'chrome/test/data/firefox*', + 'chrome/test/data/gpu', + 'chrome/test/data/image_decoding', + 'chrome/test/data/import', + 'chrome/test/data/page_cycler', + 'chrome/test/data/perf', + 'chrome/test/data/pyauto_private', + 'chrome/test/data/safari_import', + 'chrome/test/data/scroll', + 'chrome/test/data/third_party', + 'third_party/hunspell_dictionaries/*.dic', + # crbug.com/258690 + 'webkit/data/bmp_decoder', + 'webkit/data/ico_decoder', +] + + +_EXTRA_NATIVE_TEST_ACTIVITY = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner.' + 'NativeTestActivity') +_EXTRA_RUN_IN_SUB_THREAD = ( + 'org.chromium.native_test.NativeTest.RunInSubThread') +EXTRA_SHARD_NANO_TIMEOUT = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner.' + 'ShardNanoTimeout') +_EXTRA_SHARD_SIZE_LIMIT = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner.' + 'ShardSizeLimit') + +# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate +# results. +_RE_TEST_STATUS = re.compile( + r'\[ +((?:RUN)|(?:FAILED)|(?:OK)|(?:CRASHED)) +\]' + r' ?([^ ]+)?(?: \((\d+) ms\))?$') +# Crash detection constants. +_RE_TEST_ERROR = re.compile(r'FAILURES!!! Tests run: \d+,' + r' Failures: \d+, Errors: 1') +_RE_TEST_CURRENTLY_RUNNING = re.compile(r'\[ERROR:.*?\]' + r' Currently running: (.*)') + +def ParseGTestListTests(raw_list): + """Parses a raw test list as provided by --gtest_list_tests. + + Args: + raw_list: The raw test listing with the following format: + + IPCChannelTest. + SendMessageInChannelConnected + IPCSyncChannelTest. + Simple + DISABLED_SendWithTimeoutMixedOKAndTimeout + + Returns: + A list of all tests. For the above raw listing: + + [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple, + IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout] + """ + ret = [] + current = '' + for test in raw_list: + if not test: + continue + if test[0] != ' ': + test_case = test.split()[0] + if test_case.endswith('.'): + current = test_case + elif not 'YOU HAVE' in test: + test_name = test.split()[0] + ret += [current + test_name] + return ret + + +def ParseGTestOutput(output): + """Parses raw gtest output and returns a list of results. + + Args: + output: A list of output lines. + Returns: + A list of base_test_result.BaseTestResults. + """ + duration = 0 + fallback_result_type = None + log = [] + result_type = None + results = [] + test_name = None + + def handle_possibly_unknown_test(): + if test_name is not None: + results.append(base_test_result.BaseTestResult( + test_name, + fallback_result_type or base_test_result.ResultType.UNKNOWN, + duration, log=('\n'.join(log) if log else ''))) + + for l in output: + logging.info(l) + matcher = _RE_TEST_STATUS.match(l) + if matcher: + if matcher.group(1) == 'RUN': + handle_possibly_unknown_test() + duration = 0 + fallback_result_type = None + log = [] + result_type = None + elif matcher.group(1) == 'OK': + result_type = base_test_result.ResultType.PASS + elif matcher.group(1) == 'FAILED': + result_type = base_test_result.ResultType.FAIL + elif matcher.group(1) == 'CRASHED': + fallback_result_type = base_test_result.ResultType.CRASH + # Be aware that test name and status might not appear on same line. + test_name = matcher.group(2) if matcher.group(2) else test_name + duration = int(matcher.group(3)) if matcher.group(3) else 0 + + else: + # Needs another matcher here to match crashes, like those of DCHECK. + matcher = _RE_TEST_CURRENTLY_RUNNING.match(l) + if matcher: + test_name = matcher.group(1) + result_type = base_test_result.ResultType.CRASH + duration = 0 # Don't know. + + if log is not None: + log.append(l) + + if result_type and test_name: + results.append(base_test_result.BaseTestResult( + test_name, result_type, duration, + log=('\n'.join(log) if log else ''))) + test_name = None + + handle_possibly_unknown_test() + + return results + + +class GtestTestInstance(test_instance.TestInstance): + + def __init__(self, args, isolate_delegate, error_func): + super(GtestTestInstance, self).__init__() + # TODO(jbudorick): Support multiple test suites. + if len(args.suite_name) > 1: + raise ValueError('Platform mode currently supports only 1 gtest suite') + self._extract_test_list_from_filter = args.extract_test_list_from_filter + self._shard_timeout = args.shard_timeout + self._suite = args.suite_name[0] + self._exe_dist_dir = None + + # GYP: + if args.executable_dist_dir: + self._exe_dist_dir = os.path.abspath(args.executable_dist_dir) + else: + # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly. + exe_dist_dir = os.path.join(constants.GetOutDirectory(), + '%s__dist' % self._suite) + + if os.path.exists(exe_dist_dir): + self._exe_dist_dir = exe_dist_dir + + incremental_part = '' + if args.test_apk_incremental_install_script: + incremental_part = '_incremental' + + apk_path = os.path.join( + constants.GetOutDirectory(), '%s_apk' % self._suite, + '%s-debug%s.apk' % (self._suite, incremental_part)) + self._test_apk_incremental_install_script = ( + args.test_apk_incremental_install_script) + if not os.path.exists(apk_path): + self._apk_helper = None + else: + self._apk_helper = apk_helper.ApkHelper(apk_path) + self._extras = { + _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(), + } + if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES: + self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1 + if self._suite in BROWSER_TEST_SUITES: + self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1 + self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout) + self._shard_timeout = 10 * self._shard_timeout + + if not self._apk_helper and not self._exe_dist_dir: + error_func('Could not find apk or executable for %s' % self._suite) + + self._data_deps = [] + if args.test_filter: + self._gtest_filter = args.test_filter + elif args.test_filter_file: + with open(args.test_filter_file, 'r') as f: + self._gtest_filter = ':'.join(l.strip() for l in f) + else: + self._gtest_filter = None + + if not args.isolate_file_path: + default_isolate_file_path = _DEFAULT_ISOLATE_FILE_PATHS.get(self._suite) + if default_isolate_file_path: + args.isolate_file_path = os.path.join( + host_paths.DIR_SOURCE_ROOT, default_isolate_file_path) + + if (args.isolate_file_path and + not isolator.IsIsolateEmpty(args.isolate_file_path)): + self._isolate_abs_path = os.path.abspath(args.isolate_file_path) + self._isolate_delegate = isolate_delegate + self._isolated_abs_path = os.path.join( + constants.GetOutDirectory(), '%s.isolated' % self._suite) + else: + logging.warning('No isolate file provided. No data deps will be pushed.') + self._isolate_delegate = None + + if args.app_data_files: + self._app_data_files = args.app_data_files + if args.app_data_file_dir: + self._app_data_file_dir = args.app_data_file_dir + else: + self._app_data_file_dir = tempfile.mkdtemp() + logging.critical('Saving app files to %s', self._app_data_file_dir) + else: + self._app_data_files = None + self._app_data_file_dir = None + + self._test_arguments = args.test_arguments + + @property + def activity(self): + return self._apk_helper and self._apk_helper.GetActivityName() + + @property + def apk(self): + return self._apk_helper and self._apk_helper.path + + @property + def apk_helper(self): + return self._apk_helper + + @property + def app_file_dir(self): + return self._app_data_file_dir + + @property + def app_files(self): + return self._app_data_files + + @property + def exe_dist_dir(self): + return self._exe_dist_dir + + @property + def extras(self): + return self._extras + + @property + def gtest_filter(self): + return self._gtest_filter + + @property + def package(self): + return self._apk_helper and self._apk_helper.GetPackageName() + + @property + def permissions(self): + return self._apk_helper and self._apk_helper.GetPermissions() + + @property + def runner(self): + return self._apk_helper and self._apk_helper.GetInstrumentationName() + + @property + def shard_timeout(self): + return self._shard_timeout + + @property + def suite(self): + return self._suite + + @property + def test_apk_incremental_install_script(self): + return self._test_apk_incremental_install_script + + @property + def test_arguments(self): + return self._test_arguments + + @property + def extract_test_list_from_filter(self): + return self._extract_test_list_from_filter + + #override + def TestType(self): + return 'gtest' + + #override + def SetUp(self): + """Map data dependencies via isolate.""" + if self._isolate_delegate: + self._isolate_delegate.Remap( + self._isolate_abs_path, self._isolated_abs_path) + self._isolate_delegate.PurgeExcluded(_DEPS_EXCLUSION_LIST) + self._isolate_delegate.MoveOutputDeps() + dest_dir = None + self._data_deps.extend([ + (self._isolate_delegate.isolate_deps_dir, dest_dir)]) + + + def GetDataDependencies(self): + """Returns the test suite's data dependencies. + + Returns: + A list of (host_path, device_path) tuples to push. If device_path is + None, the client is responsible for determining where to push the file. + """ + return self._data_deps + + def FilterTests(self, test_list, disabled_prefixes=None): + """Filters |test_list| based on prefixes and, if present, a filter string. + + Args: + test_list: The list of tests to filter. + disabled_prefixes: A list of test prefixes to filter. Defaults to + DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_ + Returns: + A filtered list of tests to run. + """ + gtest_filter_strings = [ + self._GenerateDisabledFilterString(disabled_prefixes)] + if self._gtest_filter: + gtest_filter_strings.append(self._gtest_filter) + + filtered_test_list = test_list + for gtest_filter_string in gtest_filter_strings: + logging.debug('Filtering tests using: %s', gtest_filter_string) + filtered_test_list = unittest_util.FilterTestNames( + filtered_test_list, gtest_filter_string) + return filtered_test_list + + def _GenerateDisabledFilterString(self, disabled_prefixes): + disabled_filter_items = [] + + if disabled_prefixes is None: + disabled_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_', 'PRE_', 'MANUAL_'] + disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes] + disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes] + + disabled_tests_file_path = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest', + 'filter', '%s_disabled' % self._suite) + if disabled_tests_file_path and os.path.exists(disabled_tests_file_path): + with open(disabled_tests_file_path) as disabled_tests_file: + disabled_filter_items += [ + '%s' % l for l in (line.strip() for line in disabled_tests_file) + if l and not l.startswith('#')] + + return '*-%s' % ':'.join(disabled_filter_items) + + #override + def TearDown(self): + """Clear the mappings created by SetUp.""" + if self._isolate_delegate: + self._isolate_delegate.Clear() + diff --git a/build/android/pylib/gtest/gtest_test_instance_test.py b/build/android/pylib/gtest/gtest_test_instance_test.py new file mode 100644 index 00000000000..02e29bb4600 --- /dev/null +++ b/build/android/pylib/gtest/gtest_test_instance_test.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +from pylib.base import base_test_result +from pylib.gtest import gtest_test_instance + + +class GtestTestInstanceTests(unittest.TestCase): + + def testParseGTestListTests_simple(self): + raw_output = [ + 'TestCaseOne.', + ' testOne', + ' testTwo', + 'TestCaseTwo.', + ' testThree', + ' testFour', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TestCaseOne.testOne', + 'TestCaseOne.testTwo', + 'TestCaseTwo.testThree', + 'TestCaseTwo.testFour', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_typeParameterized_old(self): + raw_output = [ + 'TPTestCase/WithTypeParam/0.', + ' testOne', + ' testTwo', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TPTestCase/WithTypeParam/0.testOne', + 'TPTestCase/WithTypeParam/0.testTwo', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_typeParameterized_new(self): + raw_output = [ + 'TPTestCase/WithTypeParam/0. # TypeParam = TypeParam0', + ' testOne', + ' testTwo', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TPTestCase/WithTypeParam/0.testOne', + 'TPTestCase/WithTypeParam/0.testTwo', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_valueParameterized_old(self): + raw_output = [ + 'VPTestCase.', + ' testWithValueParam/0', + ' testWithValueParam/1', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'VPTestCase.testWithValueParam/0', + 'VPTestCase.testWithValueParam/1', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_valueParameterized_new(self): + raw_output = [ + 'VPTestCase.', + ' testWithValueParam/0 # GetParam() = 0', + ' testWithValueParam/1 # GetParam() = 1', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'VPTestCase.testWithValueParam/0', + 'VPTestCase.testWithValueParam/1', + ] + self.assertEqual(expected, actual) + + def testParseGTestOutput_pass(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(1, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestOutput_fail(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ FAILED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(1, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_crash(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ CRASHED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(1, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_errorCrash(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ERROR:blah] Currently running: FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(0, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_unknown(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(0, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) + + def testParseGTestOutput_nonterminalUnknown(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ RUN ] FooTest.Baz', + '[ OK ] FooTest.Baz (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(2, len(actual)) + + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(0, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) + + self.assertEquals('FooTest.Baz', actual[1].GetName()) + self.assertEquals(1, actual[1].GetDuration()) + self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType()) + + def testParseGTestOutput_deathTestCrashOk(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ CRASHED ]', + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output) + self.assertEquals(1, len(actual)) + + self.assertEquals('FooTest.Bar', actual[0].GetName()) + self.assertEquals(1, actual[0].GetDuration()) + self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) + diff --git a/build/android/pylib/instrumentation/__init__.py b/build/android/pylib/instrumentation/__init__.py new file mode 100644 index 00000000000..727e987e6b6 --- /dev/null +++ b/build/android/pylib/instrumentation/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/instrumentation/instrumentation_parser.py b/build/android/pylib/instrumentation/instrumentation_parser.py new file mode 100644 index 00000000000..efd5efb0108 --- /dev/null +++ b/build/android/pylib/instrumentation/instrumentation_parser.py @@ -0,0 +1,96 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import re + +# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html +STATUS_CODE_START = 1 +STATUS_CODE_OK = 0 +STATUS_CODE_ERROR = -1 +STATUS_CODE_FAILURE = -2 + +# http://developer.android.com/reference/android/app/Activity.html +RESULT_CODE_OK = -1 +RESULT_CODE_CANCELED = 0 + +_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$') + + +class InstrumentationParser(object): + + def __init__(self, stream): + """An incremental parser for the output of Android instrumentation tests. + + Example: + + stream = adb.IterShell('am instrument -r ...') + parser = InstrumentationParser(stream) + + for code, bundle in parser.IterStatus(): + # do something with each instrumentation status + print 'status:', code, bundle + + # do something with the final instrumentation result + code, bundle = parser.GetResult() + print 'result:', code, bundle + + Args: + stream: a sequence of lines as produced by the raw output of an + instrumentation test (e.g. by |am instrument -r|). + """ + self._stream = stream + self._code = None + self._bundle = None + + def IterStatus(self): + """Iterate over statuses as they are produced by the instrumentation test. + + Yields: + A tuple (code, bundle) for each instrumentation status found in the + output. + """ + def join_bundle_values(bundle): + for key in bundle: + bundle[key] = '\n'.join(bundle[key]) + return bundle + + bundle = {'STATUS': {}, 'RESULT': {}} + header = None + key = None + for line in self._stream: + m = _INSTR_LINE_RE.match(line) + if m: + header, value = m.groups() + key = None + if header in ['STATUS', 'RESULT'] and '=' in value: + key, value = value.split('=', 1) + bundle[header][key] = [value] + elif header == 'STATUS_CODE': + yield int(value), join_bundle_values(bundle['STATUS']) + bundle['STATUS'] = {} + elif header == 'CODE': + self._code = int(value) + else: + logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value) + elif key is not None: + bundle[header][key].append(line) + + self._bundle = join_bundle_values(bundle['RESULT']) + + def GetResult(self): + """Return the final instrumentation result. + + Returns: + A pair (code, bundle) with the final instrumentation result. The |code| + may be None if no instrumentation result was found in the output. + + Raises: + AssertionError if attempting to get the instrumentation result before + exhausting |IterStatus| first. + """ + assert self._bundle is not None, ( + 'The IterStatus generator must be exhausted before reading the final' + ' instrumentation result.') + return self._code, self._bundle diff --git a/build/android/pylib/instrumentation/instrumentation_parser_test.py b/build/android/pylib/instrumentation/instrumentation_parser_test.py new file mode 100644 index 00000000000..092d10fc938 --- /dev/null +++ b/build/android/pylib/instrumentation/instrumentation_parser_test.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +"""Unit tests for instrumentation.InstrumentationParser.""" + +import unittest + +from pylib.instrumentation import instrumentation_parser + + +class InstrumentationParserTest(unittest.TestCase): + + def testInstrumentationParser_nothing(self): + parser = instrumentation_parser.InstrumentationParser(['']) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(None, code) + self.assertEqual({}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_noMatchingStarts(self): + raw_output = [ + '', + 'this.is.a.test.package.TestClass:.', + 'Test result for =.', + 'Time: 1.234', + '', + 'OK (1 test)', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(None, code) + self.assertEqual({}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_resultAndCode(self): + raw_output = [ + 'INSTRUMENTATION_RESULT: shortMsg=foo bar', + 'INSTRUMENTATION_RESULT: longMsg=a foo', + 'walked into', + 'a bar', + 'INSTRUMENTATION_CODE: -1', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(-1, code) + self.assertEqual( + {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_oneStatus(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: foo=1', + 'INSTRUMENTATION_STATUS: bar=hello', + 'INSTRUMENTATION_STATUS: world=false', + 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass', + 'INSTRUMENTATION_STATUS: test=testMethod', + 'INSTRUMENTATION_STATUS_CODE: 0', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + + expected = [ + (0, { + 'foo': '1', + 'bar': 'hello', + 'world': 'false', + 'class': 'this.is.a.test.package.TestClass', + 'test': 'testMethod', + }) + ] + self.assertEqual(expected, statuses) + + def testInstrumentationParser_multiStatus(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: class=foo', + 'INSTRUMENTATION_STATUS: test=bar', + 'INSTRUMENTATION_STATUS_CODE: 1', + 'INSTRUMENTATION_STATUS: test_skipped=true', + 'INSTRUMENTATION_STATUS_CODE: 0', + 'INSTRUMENTATION_STATUS: class=hello', + 'INSTRUMENTATION_STATUS: test=world', + 'INSTRUMENTATION_STATUS: stack=', + 'foo/bar.py (27)', + 'hello/world.py (42)', + 'test/file.py (1)', + 'INSTRUMENTATION_STATUS_CODE: -1', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + + expected = [ + (1, {'class': 'foo', 'test': 'bar',}), + (0, {'test_skipped': 'true'}), + (-1, { + 'class': 'hello', + 'test': 'world', + 'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)', + }), + ] + self.assertEqual(expected, statuses) + + def testInstrumentationParser_statusResultAndCode(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: class=foo', + 'INSTRUMENTATION_STATUS: test=bar', + 'INSTRUMENTATION_STATUS_CODE: 1', + 'INSTRUMENTATION_RESULT: result=hello', + 'world', + '', + '', + 'INSTRUMENTATION_CODE: 0', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + + self.assertEqual(0, code) + self.assertEqual({'result': 'hello\nworld\n\n'}, bundle) + self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance.py b/build/android/pylib/instrumentation/instrumentation_test_instance.py new file mode 100644 index 00000000000..37421d9b993 --- /dev/null +++ b/build/android/pylib/instrumentation/instrumentation_test_instance.py @@ -0,0 +1,699 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import copy +import logging +import os +import pickle +import re + +from devil.android import apk_helper +from devil.android import md5sum +from pylib import constants +from pylib.base import base_test_result +from pylib.base import test_instance +from pylib.constants import host_paths +from pylib.instrumentation import test_result +from pylib.instrumentation import instrumentation_parser +from pylib.utils import isolator +from pylib.utils import proguard + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import unittest_util # pylint: disable=import-error + +# Ref: http://developer.android.com/reference/android/app/Activity.html +_ACTIVITY_RESULT_CANCELED = 0 +_ACTIVITY_RESULT_OK = -1 + +_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter' +_DEFAULT_ANNOTATIONS = [ + 'Smoke', 'SmallTest', 'MediumTest', 'LargeTest', + 'EnormousTest', 'IntegrationTest'] +_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [ + 'DisabledTest', 'FlakyTest'] +_VALID_ANNOTATIONS = set(['Manual', 'PerfTest'] + _DEFAULT_ANNOTATIONS + + _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS) +_EXTRA_DRIVER_TEST_LIST = ( + 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestList') +_EXTRA_DRIVER_TEST_LIST_FILE = ( + 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestListFile') +_EXTRA_DRIVER_TARGET_PACKAGE = ( + 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetPackage') +_EXTRA_DRIVER_TARGET_CLASS = ( + 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetClass') +_EXTRA_TIMEOUT_SCALE = ( + 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TimeoutScale') + +_PARAMETERIZED_TEST_ANNOTATION = 'ParameterizedTest' +_PARAMETERIZED_TEST_SET_ANNOTATION = 'ParameterizedTest$Set' +_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE) +_PICKLE_FORMAT_VERSION = 10 + + +class MissingSizeAnnotationError(Exception): + def __init__(self, class_name): + super(MissingSizeAnnotationError, self).__init__(class_name + + ': Test method is missing required size annotation. Add one of: ' + + ', '.join('@' + a for a in _VALID_ANNOTATIONS)) + + +class ProguardPickleException(Exception): + pass + + +# TODO(jbudorick): Make these private class methods of +# InstrumentationTestInstance once the instrumentation test_runner is +# deprecated. +def ParseAmInstrumentRawOutput(raw_output): + """Parses the output of an |am instrument -r| call. + + Args: + raw_output: the output of an |am instrument -r| call as a list of lines + Returns: + A 3-tuple containing: + - the instrumentation code as an integer + - the instrumentation result as a list of lines + - the instrumentation statuses received as a list of 2-tuples + containing: + - the status code as an integer + - the bundle dump as a dict mapping string keys to a list of + strings, one for each line. + """ + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + return (code, bundle, statuses) + + +def GenerateTestResults( + result_code, result_bundle, statuses, start_ms, duration_ms): + """Generate test results from |statuses|. + + Args: + result_code: The overall status code as an integer. + result_bundle: The summary bundle dump as a dict. + statuses: A list of 2-tuples containing: + - the status code as an integer + - the bundle dump as a dict mapping string keys to string values + Note that this is the same as the third item in the 3-tuple returned by + |_ParseAmInstrumentRawOutput|. + start_ms: The start time of the test in milliseconds. + duration_ms: The duration of the test in milliseconds. + + Returns: + A list containing an instance of InstrumentationTestResult for each test + parsed. + """ + + results = [] + + current_result = None + + for status_code, bundle in statuses: + test_class = bundle.get('class', '') + test_method = bundle.get('test', '') + if test_class and test_method: + test_name = '%s#%s' % (test_class, test_method) + else: + continue + + if status_code == instrumentation_parser.STATUS_CODE_START: + if current_result: + results.append(current_result) + current_result = test_result.InstrumentationTestResult( + test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms) + else: + if status_code == instrumentation_parser.STATUS_CODE_OK: + if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'): + current_result.SetType(base_test_result.ResultType.SKIP) + elif current_result.GetType() == base_test_result.ResultType.UNKNOWN: + current_result.SetType(base_test_result.ResultType.PASS) + else: + if status_code not in (instrumentation_parser.STATUS_CODE_ERROR, + instrumentation_parser.STATUS_CODE_FAILURE): + logging.error('Unrecognized status code %d. Handling as an error.', + status_code) + current_result.SetType(base_test_result.ResultType.FAIL) + if 'stack' in bundle: + current_result.SetLog(bundle['stack']) + + if current_result: + if current_result.GetType() == base_test_result.ResultType.UNKNOWN: + crashed = (result_code == _ACTIVITY_RESULT_CANCELED + and any(_NATIVE_CRASH_RE.search(l) + for l in result_bundle.itervalues())) + if crashed: + current_result.SetType(base_test_result.ResultType.CRASH) + + results.append(current_result) + + return results + + +def ParseCommandLineFlagParameters(annotations): + """Determines whether the test is parameterized to be run with different + command-line flags. + + Args: + annotations: The annotations of the test. + + Returns: + If the test is parameterized, returns a list of named tuples + with lists of flags, e.g.: + + [(add=['--flag-to-add']), (remove=['--flag-to-remove']), ()] + + That means, the test must be run three times, the first time with + "--flag-to-add" added to command-line, the second time with + "--flag-to-remove" to be removed from command-line, and the third time + with default command-line args. If the same flag is listed both for adding + and for removing, it is left unchanged. + + If the test is not parametrized, returns None. + + """ + ParamsTuple = collections.namedtuple('ParamsTuple', ['add', 'remove']) + parameterized_tests = [] + if _PARAMETERIZED_TEST_SET_ANNOTATION in annotations: + if annotations[_PARAMETERIZED_TEST_SET_ANNOTATION]: + parameterized_tests = annotations[ + _PARAMETERIZED_TEST_SET_ANNOTATION].get('tests', []) + elif _PARAMETERIZED_TEST_ANNOTATION in annotations: + parameterized_tests = [annotations[_PARAMETERIZED_TEST_ANNOTATION]] + else: + return None + + result = [] + for pt in parameterized_tests: + if not pt: + continue + for p in pt['parameters']: + if p['tag'] == _COMMAND_LINE_PARAMETER: + to_add = [] + to_remove = [] + for a in p.get('arguments', []): + if a['name'] == 'add': + to_add = ['--%s' % f for f in a['stringArray']] + elif a['name'] == 'remove': + to_remove = ['--%s' % f for f in a['stringArray']] + result.append(ParamsTuple(to_add, to_remove)) + return result if result else None + + +def FilterTests(tests, test_filter=None, annotations=None, + excluded_annotations=None): + """Filter a list of tests + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'methods':[]}, + {'annotations": {}, 'class': 'com.example.TestB', 'methods':[]}] + test_filter: googletest-style filter string. + annotations: a dict of wanted annotations for test methods. + exclude_annotations: a dict of annotations to exclude. + + Return: + A list of filtered tests + """ + def gtest_filter(c, m): + if not test_filter: + return True + # Allow fully-qualified name as well as an omitted package. + names = ['%s.%s' % (c['class'], m['method']), + '%s.%s' % (c['class'].split('.')[-1], m['method'])] + return unittest_util.FilterTestNames(names, test_filter) + + def annotation_filter(all_annotations): + if not annotations: + return True + return any_annotation_matches(annotations, all_annotations) + + def excluded_annotation_filter(all_annotations): + if not excluded_annotations: + return True + return not any_annotation_matches(excluded_annotations, + all_annotations) + + def any_annotation_matches(filter_annotations, all_annotations): + return any( + ak in all_annotations + and annotation_value_matches(av, all_annotations[ak]) + for ak, av in filter_annotations) + + def annotation_value_matches(filter_av, av): + if filter_av is None: + return True + elif isinstance(av, dict): + return filter_av in av['value'] + elif isinstance(av, list): + return filter_av in av + return filter_av == av + + filtered_classes = [] + for c in tests: + filtered_methods = [] + for m in c['methods']: + # Gtest filtering + if not gtest_filter(c, m): + continue + + all_annotations = dict(c['annotations']) + all_annotations.update(m['annotations']) + + # Enforce that all tests declare their size. + if not any(a in _VALID_ANNOTATIONS for a in all_annotations): + raise MissingSizeAnnotationError('%s.%s' % (c['class'], m['method'])) + + if (not annotation_filter(all_annotations) + or not excluded_annotation_filter(all_annotations)): + continue + + filtered_methods.append(m) + + if filtered_methods: + filtered_class = dict(c) + filtered_class['methods'] = filtered_methods + filtered_classes.append(filtered_class) + + return filtered_classes + +def GetAllTests(test_jar): + pickle_path = '%s-proguard.pickle' % test_jar + try: + tests = _GetTestsFromPickle(pickle_path, test_jar) + except ProguardPickleException as e: + logging.info('Could not get tests from pickle: %s', e) + logging.info('Getting tests from JAR via proguard.') + tests = _GetTestsFromProguard(test_jar) + _SaveTestsToPickle(pickle_path, test_jar, tests) + return tests + + +def _GetTestsFromPickle(pickle_path, jar_path): + if not os.path.exists(pickle_path): + raise ProguardPickleException('%s does not exist.' % pickle_path) + if os.path.getmtime(pickle_path) <= os.path.getmtime(jar_path): + raise ProguardPickleException( + '%s newer than %s.' % (jar_path, pickle_path)) + + with open(pickle_path, 'r') as pickle_file: + pickle_data = pickle.loads(pickle_file.read()) + jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path] + + if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION: + raise ProguardPickleException('PICKLE_FORMAT_VERSION has changed.') + if pickle_data['JAR_MD5SUM'] != jar_md5: + raise ProguardPickleException('JAR file MD5 sum differs.') + return pickle_data['TEST_METHODS'] + + +def _GetTestsFromProguard(jar_path): + p = proguard.Dump(jar_path) + class_lookup = dict((c['class'], c) for c in p['classes']) + + def is_test_class(c): + return c['class'].endswith('Test') + + def is_test_method(m): + return m['method'].startswith('test') + + def recursive_class_annotations(c): + s = c['superclass'] + if s in class_lookup: + a = recursive_class_annotations(class_lookup[s]) + else: + a = {} + a.update(c['annotations']) + return a + + def stripped_test_class(c): + return { + 'class': c['class'], + 'annotations': recursive_class_annotations(c), + 'methods': [m for m in c['methods'] if is_test_method(m)], + } + + return [stripped_test_class(c) for c in p['classes'] + if is_test_class(c)] + + +def _SaveTestsToPickle(pickle_path, jar_path, tests): + jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path] + pickle_data = { + 'VERSION': _PICKLE_FORMAT_VERSION, + 'JAR_MD5SUM': jar_md5, + 'TEST_METHODS': tests, + } + with open(pickle_path, 'w') as pickle_file: + pickle.dump(pickle_data, pickle_file) + + +class InstrumentationTestInstance(test_instance.TestInstance): + + def __init__(self, args, isolate_delegate, error_func): + super(InstrumentationTestInstance, self).__init__() + + self._additional_apks = [] + self._apk_under_test = None + self._apk_under_test_incremental_install_script = None + self._package_info = None + self._suite = None + self._test_apk = None + self._test_apk_incremental_install_script = None + self._test_jar = None + self._test_package = None + self._test_runner = None + self._test_support_apk = None + self._initializeApkAttributes(args, error_func) + + self._data_deps = None + self._isolate_abs_path = None + self._isolate_delegate = None + self._isolated_abs_path = None + self._initializeDataDependencyAttributes(args, isolate_delegate) + + self._annotations = None + self._excluded_annotations = None + self._test_filter = None + self._initializeTestFilterAttributes(args) + + self._flags = None + self._initializeFlagAttributes(args) + + self._driver_apk = None + self._driver_package = None + self._driver_name = None + self._initializeDriverAttributes() + + self._timeout_scale = None + self._initializeTestControlAttributes(args) + + self._coverage_directory = None + self._initializeTestCoverageAttributes(args) + + self._store_tombstones = False + self._initializeTombstonesAttributes(args) + + def _initializeApkAttributes(self, args, error_func): + if args.apk_under_test: + apk_under_test_path = args.apk_under_test + if not args.apk_under_test.endswith('.apk'): + apk_under_test_path = os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR, + '%s.apk' % args.apk_under_test) + + if not os.path.exists(apk_under_test_path): + error_func('Unable to find APK under test: %s' % apk_under_test_path) + + self._apk_under_test = apk_helper.ToHelper(apk_under_test_path) + + if args.test_apk.endswith('.apk'): + self._suite = os.path.splitext(os.path.basename(args.test_apk))[0] + self._test_apk = apk_helper.ToHelper(args.test_apk) + else: + self._suite = args.test_apk + self._test_apk = apk_helper.ToHelper(os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR, + '%s.apk' % args.test_apk)) + + self._apk_under_test_incremental_install_script = ( + args.apk_under_test_incremental_install_script) + self._test_apk_incremental_install_script = ( + args.test_apk_incremental_install_script) + + if self._test_apk_incremental_install_script: + assert self._suite.endswith('_incremental') + self._suite = self._suite[:-len('_incremental')] + + self._test_jar = os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR, + '%s.jar' % self._suite) + self._test_support_apk = apk_helper.ToHelper(os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR, + '%sSupport.apk' % self._suite)) + + if not os.path.exists(self._test_apk.path): + error_func('Unable to find test APK: %s' % self._test_apk.path) + if not os.path.exists(self._test_jar): + error_func('Unable to find test JAR: %s' % self._test_jar) + + self._test_package = self._test_apk.GetPackageName() + self._test_runner = self._test_apk.GetInstrumentationName() + + self._package_info = None + if self._apk_under_test: + package_under_test = self._apk_under_test.GetPackageName() + for package_info in constants.PACKAGE_INFO.itervalues(): + if package_under_test == package_info.package: + self._package_info = package_info + break + if not self._package_info: + logging.warning('Unable to find package info for %s', self._test_package) + + for apk in args.additional_apks: + if not os.path.exists(apk): + error_func('Unable to find additional APK: %s' % apk) + self._additional_apks = ( + [apk_helper.ToHelper(x) for x in args.additional_apks]) + + def _initializeDataDependencyAttributes(self, args, isolate_delegate): + self._data_deps = [] + if (args.isolate_file_path and + not isolator.IsIsolateEmpty(args.isolate_file_path)): + if os.path.isabs(args.isolate_file_path): + self._isolate_abs_path = args.isolate_file_path + else: + self._isolate_abs_path = os.path.join( + constants.DIR_SOURCE_ROOT, args.isolate_file_path) + self._isolate_delegate = isolate_delegate + self._isolated_abs_path = os.path.join( + constants.GetOutDirectory(), '%s.isolated' % self._test_package) + else: + self._isolate_delegate = None + + if not self._isolate_delegate: + logging.warning('No data dependencies will be pushed.') + + def _initializeTestFilterAttributes(self, args): + if args.test_filter: + self._test_filter = args.test_filter.replace('#', '.') + + def annotation_element(a): + a = a.split('=', 1) + return (a[0], a[1] if len(a) == 2 else None) + + if args.annotation_str: + self._annotations = [ + annotation_element(a) for a in args.annotation_str.split(',')] + elif not self._test_filter: + self._annotations = [ + annotation_element(a) for a in _DEFAULT_ANNOTATIONS] + else: + self._annotations = [] + + if args.exclude_annotation_str: + self._excluded_annotations = [ + annotation_element(a) for a in args.exclude_annotation_str.split(',')] + else: + self._excluded_annotations = [] + + requested_annotations = set(a[0] for a in self._annotations) + self._excluded_annotations.extend( + annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS + if a not in requested_annotations) + + def _initializeFlagAttributes(self, args): + self._flags = ['--enable-test-intents'] + # TODO(jbudorick): Transition "--device-flags" to "--device-flags-file" + if hasattr(args, 'device_flags') and args.device_flags: + with open(args.device_flags) as device_flags_file: + stripped_lines = (l.strip() for l in device_flags_file) + self._flags.extend([flag for flag in stripped_lines if flag]) + if hasattr(args, 'device_flags_file') and args.device_flags_file: + with open(args.device_flags_file) as device_flags_file: + stripped_lines = (l.strip() for l in device_flags_file) + self._flags.extend([flag for flag in stripped_lines if flag]) + if (hasattr(args, 'strict_mode') and + args.strict_mode and + args.strict_mode != 'off'): + self._flags.append('--strict-mode=' + args.strict_mode) + if hasattr(args, 'regenerate_goldens') and args.regenerate_goldens: + self._flags.append('--regenerate-goldens') + + def _initializeDriverAttributes(self): + self._driver_apk = os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR, + 'OnDeviceInstrumentationDriver.apk') + if os.path.exists(self._driver_apk): + driver_apk = apk_helper.ApkHelper(self._driver_apk) + self._driver_package = driver_apk.GetPackageName() + self._driver_name = driver_apk.GetInstrumentationName() + else: + self._driver_apk = None + + def _initializeTestControlAttributes(self, args): + self._screenshot_dir = args.screenshot_dir + self._timeout_scale = args.timeout_scale or 1 + + def _initializeTestCoverageAttributes(self, args): + self._coverage_directory = args.coverage_dir + + def _initializeTombstonesAttributes(self, args): + self._store_tombstones = args.store_tombstones + + @property + def additional_apks(self): + return self._additional_apks + + @property + def apk_under_test(self): + return self._apk_under_test + + @property + def apk_under_test_incremental_install_script(self): + return self._apk_under_test_incremental_install_script + + @property + def coverage_directory(self): + return self._coverage_directory + + @property + def driver_apk(self): + return self._driver_apk + + @property + def driver_package(self): + return self._driver_package + + @property + def driver_name(self): + return self._driver_name + + @property + def flags(self): + return self._flags + + @property + def package_info(self): + return self._package_info + + @property + def screenshot_dir(self): + return self._screenshot_dir + + @property + def store_tombstones(self): + return self._store_tombstones + + @property + def suite(self): + return self._suite + + @property + def test_apk(self): + return self._test_apk + + @property + def test_apk_incremental_install_script(self): + return self._test_apk_incremental_install_script + + @property + def test_jar(self): + return self._test_jar + + @property + def test_support_apk(self): + return self._test_support_apk + + @property + def test_package(self): + return self._test_package + + @property + def test_runner(self): + return self._test_runner + + @property + def timeout_scale(self): + return self._timeout_scale + + #override + def TestType(self): + return 'instrumentation' + + #override + def SetUp(self): + if self._isolate_delegate: + self._isolate_delegate.Remap( + self._isolate_abs_path, self._isolated_abs_path) + self._isolate_delegate.MoveOutputDeps() + self._data_deps.extend([(self._isolate_delegate.isolate_deps_dir, None)]) + + def GetDataDependencies(self): + return self._data_deps + + def GetTests(self): + tests = GetAllTests(self.test_jar) + filtered_tests = FilterTests( + tests, self._test_filter, self._annotations, self._excluded_annotations) + return self._ParametrizeTestsWithFlags(self._InflateTests(filtered_tests)) + + # pylint: disable=no-self-use + def _InflateTests(self, tests): + inflated_tests = [] + for c in tests: + for m in c['methods']: + a = dict(c['annotations']) + a.update(m['annotations']) + inflated_tests.append({ + 'class': c['class'], + 'method': m['method'], + 'annotations': a, + }) + return inflated_tests + + def _ParametrizeTestsWithFlags(self, tests): + new_tests = [] + for t in tests: + parameters = ParseCommandLineFlagParameters(t['annotations']) + if parameters: + t['flags'] = parameters[0] + for p in parameters[1:]: + parameterized_t = copy.copy(t) + parameterized_t['flags'] = p + new_tests.append(parameterized_t) + return tests + new_tests + + def GetDriverEnvironmentVars( + self, test_list=None, test_list_file_path=None): + env = { + _EXTRA_DRIVER_TARGET_PACKAGE: self.test_package, + _EXTRA_DRIVER_TARGET_CLASS: self.test_runner, + _EXTRA_TIMEOUT_SCALE: self._timeout_scale, + } + + if test_list: + env[_EXTRA_DRIVER_TEST_LIST] = ','.join(test_list) + + if test_list_file_path: + env[_EXTRA_DRIVER_TEST_LIST_FILE] = ( + os.path.basename(test_list_file_path)) + + return env + + @staticmethod + def ParseAmInstrumentRawOutput(raw_output): + return ParseAmInstrumentRawOutput(raw_output) + + @staticmethod + def GenerateTestResults( + result_code, result_bundle, statuses, start_ms, duration_ms): + return GenerateTestResults(result_code, result_bundle, statuses, + start_ms, duration_ms) + + #override + def TearDown(self): + if self._isolate_delegate: + self._isolate_delegate.Clear() diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py new file mode 100644 index 00000000000..666e8923989 --- /dev/null +++ b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py @@ -0,0 +1,604 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unit tests for instrumentation_test_instance.""" + +# pylint: disable=protected-access + +import unittest + +from pylib.base import base_test_result +from pylib.constants import host_paths +from pylib.instrumentation import instrumentation_test_instance + +with host_paths.SysPath(host_paths.PYMOCK_PATH): + import mock # pylint: disable=import-error + +_INSTRUMENTATION_TEST_INSTANCE_PATH = ( + 'pylib.instrumentation.instrumentation_test_instance.%s') + +class InstrumentationTestInstanceTest(unittest.TestCase): + + def setUp(self): + options = mock.Mock() + options.tool = '' + + @staticmethod + def createTestInstance(): + c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance' + with mock.patch('%s._initializeApkAttributes' % c), ( + mock.patch('%s._initializeDataDependencyAttributes' % c)), ( + mock.patch('%s._initializeTestFilterAttributes' % c)), ( + mock.patch('%s._initializeFlagAttributes' % c)), ( + mock.patch('%s._initializeDriverAttributes' % c)), ( + mock.patch('%s._initializeTestControlAttributes' % c)), ( + mock.patch('%s._initializeTestCoverageAttributes' % c)): + return instrumentation_test_instance.InstrumentationTestInstance( + mock.MagicMock(), mock.MagicMock(), lambda s: None) + + def testGetTests_noFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + }, + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod2', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_simpleGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + }, + ] + + o._test_filter = 'org.chromium.test.SampleTest.testMethod1' + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_wildcardGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + o._test_filter = 'org.chromium.test.SampleTest2.*' + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + @unittest.skip('crbug.com/623047') + def testGetTests_negativeGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + o._GetTestsFromPickle = mock.MagicMock(return_value=raw_tests) + o._test_filter = '*-org.chromium.test.SampleTest.testMethod1' + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod2', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + actual_tests = o.GetTests() + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_annotationFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + o._annotations = [('SmallTest', None)] + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_excludedAnnotationFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod2', + }, + ] + + o._excluded_annotations = [('SmallTest', None)] + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_annotationSimpleValueFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'TestValue': '1', + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None, + 'TestValue': '2', + }, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'TestValue': '3', + }, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + 'TestValue': '1', + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + }, + ] + + o._annotations = [('TestValue', '1')] + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_annotationDictValueFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + o._annotations = [('Feature', 'Bar')] + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGetTests_multipleAnnotationValuesRequested(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'Feature': {'value': ['Baz']}, + 'MediumTest': None, + }, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Baz']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod2', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + }, + ] + + o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')] + with mock.patch(_INSTRUMENTATION_TEST_INSTANCE_PATH % '_GetTestsFromPickle', + return_value=raw_tests): + actual_tests = o.GetTests() + + self.assertEquals(actual_tests, expected_tests) + + def testGenerateTestResults_noStatus(self): + results = instrumentation_test_instance.GenerateTestResults( + None, None, [], 0, 1000) + self.assertEqual([], results) + + def testGenerateTestResults_testPassed(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 0, 1000) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) + + def testGenerateTestResults_testSkipped_true(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'test_skipped': 'true', + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 0, 1000) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType()) + + def testGenerateTestResults_testSkipped_false(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'test_skipped': 'false', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 0, 1000) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) + + def testGenerateTestResults_testFailed(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (-2, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 0, 1000) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) + + def testGenerateTestResults_testUnknownException(self): + stacktrace = 'long\nstacktrace' + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (-1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + 'stack': stacktrace, + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 0, 1000) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) + self.assertEqual(stacktrace, results[0].GetLog()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/build/android/pylib/instrumentation/json_perf_parser.py b/build/android/pylib/instrumentation/json_perf_parser.py new file mode 100644 index 00000000000..c647890ba3d --- /dev/null +++ b/build/android/pylib/instrumentation/json_perf_parser.py @@ -0,0 +1,161 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +"""A helper module for parsing JSON objects from perf tests results.""" + +import json + + +def GetAverageRunInfo(json_data, name): + """Summarizes TraceEvent JSON data for performance metrics. + + Example JSON Inputs (More tags can be added but these are required): + Measuring Duration: + [ + { "cat": "Java", + "ts": 10000000000, + "ph": "S", + "name": "TestTrace" + }, + { "cat": "Java", + "ts": 10000004000, + "ph": "F", + "name": "TestTrace" + }, + ... + ] + + Measuring Call Frequency (FPS): + [ + { "cat": "Java", + "ts": 10000000000, + "ph": "I", + "name": "TestTraceFPS" + }, + { "cat": "Java", + "ts": 10000004000, + "ph": "I", + "name": "TestTraceFPS" + }, + ... + ] + + Args: + json_data: A list of dictonaries each representing a JSON object. + name: The 'name' tag to filter on in the JSON file. + + Returns: + A dictionary of result data with the following tags: + min: The minimum value tracked. + max: The maximum value tracked. + average: The average of all the values tracked. + count: The number of times the category/name pair was tracked. + type: The type of tracking ('Instant' for instant tags and 'Span' for + begin/end tags. + category: The passed in category filter. + name: The passed in name filter. + data_points: A list of all of the times used to generate this data. + units: The units for the values being reported. + + Raises: + Exception: if entry contains invalid data. + """ + + def EntryFilter(entry): + return entry['cat'] == 'Java' and entry['name'] == name + filtered_entries = [j for j in json_data if EntryFilter(j)] + + result = {} + + result['min'] = -1 + result['max'] = -1 + result['average'] = 0 + result['count'] = 0 + result['type'] = 'Unknown' + result['category'] = 'Java' + result['name'] = name + result['data_points'] = [] + result['units'] = '' + + total_sum = 0 + + last_val = 0 + val_type = None + for entry in filtered_entries: + if not val_type: + if 'mem' in entry: + val_type = 'mem' + + def GetVal(entry): + return entry['mem'] + + result['units'] = 'kb' + elif 'ts' in entry: + val_type = 'ts' + + def GetVal(entry): + return float(entry['ts']) / 1000.0 + + result['units'] = 'ms' + else: + raise Exception('Entry did not contain valid value info: %s' % entry) + + if not val_type in entry: + raise Exception('Entry did not contain expected value type "%s" ' + 'information: %s' % (val_type, entry)) + val = GetVal(entry) + if (entry['ph'] == 'S' and + (result['type'] == 'Unknown' or result['type'] == 'Span')): + result['type'] = 'Span' + last_val = val + elif ((entry['ph'] == 'F' and result['type'] == 'Span') or + (entry['ph'] == 'I' and (result['type'] == 'Unknown' or + result['type'] == 'Instant'))): + if last_val > 0: + delta = val - last_val + if result['min'] == -1 or result['min'] > delta: + result['min'] = delta + if result['max'] == -1 or result['max'] < delta: + result['max'] = delta + total_sum += delta + result['count'] += 1 + result['data_points'].append(delta) + if entry['ph'] == 'I': + result['type'] = 'Instant' + last_val = val + if result['count'] > 0: + result['average'] = total_sum / result['count'] + + return result + + +def GetAverageRunInfoFromJSONString(json_string, name): + """Returns the results from GetAverageRunInfo using a JSON string. + + Args: + json_string: The string containing JSON. + name: The 'name' tag to filter on in the JSON file. + + Returns: + See GetAverageRunInfo Returns section. + """ + return GetAverageRunInfo(json.loads(json_string), name) + + +def GetAverageRunInfoFromFile(json_file, name): + """Returns the results from GetAverageRunInfo using a JSON file. + + Args: + json_file: The path to a JSON file. + name: The 'name' tag to filter on in the JSON file. + + Returns: + See GetAverageRunInfo Returns section. + """ + with open(json_file, 'r') as f: + data = f.read() + perf = json.loads(data) + + return GetAverageRunInfo(perf, name) diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py new file mode 100644 index 00000000000..24e80a8e5fb --- /dev/null +++ b/build/android/pylib/instrumentation/test_result.py @@ -0,0 +1,30 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.base import base_test_result + + +class InstrumentationTestResult(base_test_result.BaseTestResult): + """Result information for a single instrumentation test.""" + + def __init__(self, full_name, test_type, start_date, dur, log=''): + """Construct an InstrumentationTestResult object. + + Args: + full_name: Full name of the test. + test_type: Type of the test result as defined in ResultType. + start_date: Date in milliseconds when the test began running. + dur: Duration of the test run in milliseconds. + log: A string listing any errors. + """ + super(InstrumentationTestResult, self).__init__( + full_name, test_type, dur, log) + name_pieces = full_name.rsplit('#') + if len(name_pieces) > 1: + self._test_name = name_pieces[1] + self._class_name = name_pieces[0] + else: + self._class_name = full_name + self._test_name = full_name + self._start_date = start_date diff --git a/build/android/pylib/junit/__init__.py b/build/android/pylib/junit/__init__.py new file mode 100644 index 00000000000..5cac026153c --- /dev/null +++ b/build/android/pylib/junit/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/junit/setup.py b/build/android/pylib/junit/setup.py new file mode 100644 index 00000000000..94d4277d8e6 --- /dev/null +++ b/build/android/pylib/junit/setup.py @@ -0,0 +1,20 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.junit import test_runner + +def Setup(args): + """Creates a test runner factory for junit tests. + + Args: + args: an argparse.Namespace object. + Return: + A (runner_factory, tests) tuple. + """ + + def TestRunnerFactory(_unused_device, _unused_shard_index): + return test_runner.JavaTestRunner(args) + + return (TestRunnerFactory, ['JUnit tests']) + diff --git a/build/android/pylib/junit/test_dispatcher.py b/build/android/pylib/junit/test_dispatcher.py new file mode 100644 index 00000000000..51253d4cc07 --- /dev/null +++ b/build/android/pylib/junit/test_dispatcher.py @@ -0,0 +1,29 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib import constants +from pylib.base import base_test_result + +def RunTests(tests, runner_factory): + """Runs a set of java tests on the host. + + Return: + A tuple containing the results & the exit code. + """ + def run(t): + runner = runner_factory(None, None) + runner.SetUp() + results_list, return_code = runner.RunTest(t) + runner.TearDown() + return (results_list, return_code == 0) + + test_run_results = base_test_result.TestRunResults() + exit_code = 0 + for t in tests: + results_list, passed = run(t) + test_run_results.AddResults(results_list) + if not passed: + exit_code = constants.ERROR_EXIT_CODE + return (test_run_results, exit_code) + diff --git a/build/android/pylib/junit/test_runner.py b/build/android/pylib/junit/test_runner.py new file mode 100644 index 00000000000..5066c204123 --- /dev/null +++ b/build/android/pylib/junit/test_runner.py @@ -0,0 +1,72 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import tempfile + +from devil.utils import cmd_helper +from pylib import constants +from pylib.results import json_results + +class JavaTestRunner(object): + """Runs java tests on the host.""" + + def __init__(self, args): + self._coverage_dir = args.coverage_dir + self._package_filter = args.package_filter + self._runner_filter = args.runner_filter + self._sdk_version = args.sdk_version + self._test_filter = args.test_filter + self._test_suite = args.test_suite + + def SetUp(self): + pass + + def RunTest(self, _test): + """Runs junit tests from |self._test_suite|.""" + with tempfile.NamedTemporaryFile() as json_file: + java_script = os.path.join( + constants.GetOutDirectory(), 'bin', 'helper', self._test_suite) + command = [java_script] + + # Add Jar arguments. + jar_args = ['-test-jars', self._test_suite + '.jar', + '-json-results-file', json_file.name] + if self._test_filter: + jar_args.extend(['-gtest-filter', self._test_filter]) + if self._package_filter: + jar_args.extend(['-package-filter', self._package_filter]) + if self._runner_filter: + jar_args.extend(['-runner-filter', self._runner_filter]) + if self._sdk_version: + jar_args.extend(['-sdk-version', self._sdk_version]) + command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)]) + + # Add JVM arguments. + jvm_args = [] + # TODO(mikecase): Add a --robolectric-dep-dir arg to test runner. + # Have this arg set by GN in the generated test runner scripts. + jvm_args += [ + '-Drobolectric.dependency.dir=%s' % + os.path.join(constants.GetOutDirectory(), + 'lib.java', 'third_party', 'robolectric')] + if self._coverage_dir: + if not os.path.exists(self._coverage_dir): + os.makedirs(self._coverage_dir) + elif not os.path.isdir(self._coverage_dir): + raise Exception('--coverage-dir takes a directory, not file path.') + jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join( + self._coverage_dir, '%s.ec' % self._test_suite)) + if jvm_args: + command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)]) + + return_code = cmd_helper.RunCmd(command) + results_list = json_results.ParseResultsFromJson( + json.loads(json_file.read())) + return (results_list, return_code) + + def TearDown(self): + pass + diff --git a/build/android/pylib/linker/__init__.py b/build/android/pylib/linker/__init__.py new file mode 100644 index 00000000000..af994371233 --- /dev/null +++ b/build/android/pylib/linker/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/linker/setup.py b/build/android/pylib/linker/setup.py new file mode 100644 index 00000000000..3f380ead80d --- /dev/null +++ b/build/android/pylib/linker/setup.py @@ -0,0 +1,60 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Setup for linker tests.""" + +import logging + +from pylib.constants import host_paths +from pylib.linker import test_case +from pylib.linker import test_runner + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import unittest_util # pylint: disable=import-error + +# ModernLinker requires Android M (API level 23) or later. +_VERSION_SDK_PROPERTY = 'ro.build.version.sdk' +_MODERN_LINKER_MINIMUM_SDK_INT = 23 + +def Setup(args, devices): + """Creates a list of test cases and a runner factory. + + Args: + args: an argparse.Namespace object. + devices: an iterable of available devices. + Returns: + A tuple of (TestRunnerFactory, tests). + """ + legacy_linker_tests = [ + test_case.LinkerSharedRelroTest(is_modern_linker=False, + is_low_memory=False), + test_case.LinkerSharedRelroTest(is_modern_linker=False, + is_low_memory=True), + ] + modern_linker_tests = [ + test_case.LinkerSharedRelroTest(is_modern_linker=True), + ] + + min_sdk_int = 1 << 31 + for device in devices: + min_sdk_int = min(min_sdk_int, device.build_version_sdk) + + if min_sdk_int >= _MODERN_LINKER_MINIMUM_SDK_INT: + all_tests = legacy_linker_tests + modern_linker_tests + else: + all_tests = legacy_linker_tests + logging.warn('Not running LinkerModern tests (requires API %d, found %d)', + _MODERN_LINKER_MINIMUM_SDK_INT, min_sdk_int) + + if args.test_filter: + all_test_names = [test.qualified_name for test in all_tests] + filtered_test_names = unittest_util.FilterTestNames(all_test_names, + args.test_filter) + all_tests = [t for t in all_tests \ + if t.qualified_name in filtered_test_names] + + def TestRunnerFactory(device, _shard_index): + return test_runner.LinkerTestRunner(device, args.tool) + + return (TestRunnerFactory, all_tests) diff --git a/build/android/pylib/linker/test_case.py b/build/android/pylib/linker/test_case.py new file mode 100644 index 00000000000..475b730c4a1 --- /dev/null +++ b/build/android/pylib/linker/test_case.py @@ -0,0 +1,227 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Base class for linker-specific test cases. + + The custom dynamic linker can only be tested through a custom test case + for various technical reasons: + + - It's an 'invisible feature', i.e. it doesn't expose a new API or + behaviour, all it does is save RAM when loading native libraries. + + - Checking that it works correctly requires several things that do not + fit the existing GTest-based and instrumentation-based tests: + + - Native test code needs to be run in both the browser and renderer + process at the same time just after loading native libraries, in + a completely asynchronous way. + + - Each test case requires restarting a whole new application process + with a different command-line. + + - Enabling test support in the Linker code requires building a special + APK with a flag to activate special test-only support code in the + Linker code itself. + + Host-driven tests have also been tried, but since they're really + sub-classes of instrumentation tests, they didn't work well either. + + To build and run the linker tests, do the following: + + ninja -C out/Debug chromium_linker_test_apk + build/android/test_runner.py linker + +""" +# pylint: disable=R0201 + +import logging +import re + +from devil.android import device_errors +from devil.android.sdk import intent +from pylib.base import base_test_result + + +ResultType = base_test_result.ResultType + +_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk' +_ACTIVITY_NAME = '.ChromiumLinkerTestActivity' +_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line' + +# Logcat filters used during each test. Only the 'chromium' one is really +# needed, but the logs are added to the TestResult in case of error, and +# it is handy to have others as well when troubleshooting. +_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'cr_chromium:v', + 'cr_ChromiumAndroidLinker:v', 'cr_LibraryLoader:v', + 'cr_LinkerTest:v'] +#_LOGCAT_FILTERS = ['*:v'] ## DEBUG + +# Regular expression used to match status lines in logcat. +_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$') +_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$') + +def _StartActivityAndWaitForLinkerTestStatus(device, timeout): + """Force-start an activity and wait up to |timeout| seconds until the full + linker test status lines appear in the logcat, recorded through |device|. + Args: + device: A DeviceUtils instance. + timeout: Timeout in seconds + Returns: + A (status, logs) tuple, where status is a ResultType constant, and logs + if the final logcat output as a string. + """ + + # 1. Start recording logcat with appropriate filters. + with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon: + + # 2. Force-start activity. + device.StartActivity( + intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME), + force_stop=True) + + # 3. Wait up to |timeout| seconds until the test status is in the logcat. + result = ResultType.PASS + try: + browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout) + logging.debug('Found browser match: %s', browser_match.group(0)) + renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE, + timeout=timeout) + logging.debug('Found renderer match: %s', renderer_match.group(0)) + if (browser_match.group(1) != 'SUCCESS' + or renderer_match.group(1) != 'SUCCESS'): + result = ResultType.FAIL + except device_errors.CommandTimeoutError: + result = ResultType.TIMEOUT + + return result, '\n'.join(device.adb.Logcat(dump=True)) + + +class LibraryLoadMap(dict): + """A helper class to pretty-print a map of library names to load addresses.""" + def __str__(self): + items = ['\'%s\': 0x%x' % (name, address) for \ + (name, address) in self.iteritems()] + return '{%s}' % (', '.join(items)) + + def __repr__(self): + return 'LibraryLoadMap(%s)' % self.__str__() + + +class AddressList(list): + """A helper class to pretty-print a list of load addresses.""" + def __str__(self): + items = ['0x%x' % address for address in self] + return '[%s]' % (', '.join(items)) + + def __repr__(self): + return 'AddressList(%s)' % self.__str__() + + +class LinkerTestCaseBase(object): + """Base class for linker test cases.""" + + def __init__(self, is_modern_linker=False, is_low_memory=False): + """Create a test case. + Args: + is_modern_linker: True to test ModernLinker, False to test LegacyLinker. + is_low_memory: True to simulate a low-memory device, False otherwise. + """ + self.is_modern_linker = is_modern_linker + if is_modern_linker: + test_suffix = 'ForModernLinker' + else: + test_suffix = 'ForLegacyLinker' + self.is_low_memory = is_low_memory + if is_low_memory: + test_suffix += 'LowMemoryDevice' + else: + test_suffix += 'RegularDevice' + class_name = self.__class__.__name__ + self.qualified_name = '%s.%s' % (class_name, test_suffix) + self.tagged_name = self.qualified_name + + def _RunTest(self, _device): + """Run the test, must be overriden. + Args: + _device: A DeviceUtils interface. + Returns: + A (status, log) tuple, where is a ResultType constant, and + is the logcat output captured during the test in case of error, or None + in case of success. + """ + return ResultType.FAIL, 'Unimplemented _RunTest() method!' + + def Run(self, device): + """Run the test on a given device. + Args: + device: Name of target device where to run the test. + Returns: + A base_test_result.TestRunResult() instance. + """ + margin = 8 + print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name) + logging.info('Running linker test: %s', self.tagged_name) + + # Create command-line file on device. + if self.is_modern_linker: + command_line_flags = '--use-linker=modern' + else: + command_line_flags = '--use-linker=legacy' + if self.is_low_memory: + command_line_flags += ' --low-memory-device' + device.WriteFile(_COMMAND_LINE_FILE, command_line_flags) + + # Run the test. + status, logs = self._RunTest(device) + + result_text = 'OK' + if status == ResultType.FAIL: + result_text = 'FAILED' + elif status == ResultType.TIMEOUT: + result_text = 'TIMEOUT' + print '[ %*s ] %s' % (margin, result_text, self.tagged_name) + + results = base_test_result.TestRunResults() + results.AddResult( + base_test_result.BaseTestResult( + self.tagged_name, + status, + log=logs)) + + return results + + def __str__(self): + return self.tagged_name + + def __repr__(self): + return self.tagged_name + + +class LinkerSharedRelroTest(LinkerTestCaseBase): + """A linker test case to check the status of shared RELRO sections. + + The core of the checks performed here are pretty simple: + + - Clear the logcat and start recording with an appropriate set of filters. + - Create the command-line appropriate for the test-case. + - Start the activity (always forcing a cold start). + - Every second, look at the current content of the filtered logcat lines + and look for instances of the following: + + BROWSER_LINKER_TEST: + RENDERER_LINKER_TEST: + + where can be either FAIL or SUCCESS. These lines can appear + in any order in the logcat. Once both browser and renderer status are + found, stop the loop. Otherwise timeout after 30 seconds. + + Note that there can be other lines beginning with BROWSER_LINKER_TEST: + and RENDERER_LINKER_TEST:, but are not followed by a code. + + - The test case passes if the for both the browser and renderer + process are SUCCESS. Otherwise its a fail. + """ + def _RunTest(self, device): + # Wait up to 30 seconds until the linker test status is in the logcat. + return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30) diff --git a/build/android/pylib/linker/test_runner.py b/build/android/pylib/linker/test_runner.py new file mode 100644 index 00000000000..d3459520b9e --- /dev/null +++ b/build/android/pylib/linker/test_runner.py @@ -0,0 +1,97 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs linker tests on a particular device.""" + +import logging +import os.path +import sys +import traceback + +from pylib import constants +from pylib.base import base_test_result +from pylib.base import base_test_runner +from pylib.linker import test_case + + +# Name of the Android package to install for this to work. +_PACKAGE_NAME = 'ChromiumLinkerTest' + + +class LinkerExceptionTestResult(base_test_result.BaseTestResult): + """Test result corresponding to a python exception in a host-custom test.""" + + def __init__(self, test_name, exc_info): + """Constructs a LinkerExceptionTestResult object. + + Args: + test_name: name of the test which raised an exception. + exc_info: exception info, ostensibly from sys.exc_info(). + """ + exc_type, exc_value, exc_traceback = exc_info + trace_info = ''.join(traceback.format_exception(exc_type, exc_value, + exc_traceback)) + log_msg = 'Exception:\n' + trace_info + + super(LinkerExceptionTestResult, self).__init__( + test_name, + base_test_result.ResultType.FAIL, + log="%s %s" % (exc_type, log_msg)) + + +class LinkerTestRunner(base_test_runner.BaseTestRunner): + """Orchestrates running a set of linker tests. + + Any Python exceptions in the tests are caught and translated into a failed + result, rather than being re-raised on the main thread. + """ + + #override + def __init__(self, device, tool): + """Creates a new LinkerTestRunner. + + Args: + device: Attached android device. + tool: Name of the Valgrind tool. + """ + super(LinkerTestRunner, self).__init__(device, tool) + + #override + def InstallTestPackage(self): + apk_path = os.path.join( + constants.GetOutDirectory(), 'apks', '%s.apk' % _PACKAGE_NAME) + + if not os.path.exists(apk_path): + raise Exception('%s not found, please build it' % apk_path) + + self.device.Install(apk_path) + + #override + def RunTest(self, test): + """Sets up and runs a test case. + + Args: + test: An object which is ostensibly a subclass of LinkerTestCaseBase. + + Returns: + A TestRunResults object which contains the result produced by the test + and, in the case of a failure, the test that should be retried. + """ + + assert isinstance(test, test_case.LinkerTestCaseBase) + + try: + results = test.Run(self.device) + except Exception: # pylint: disable=broad-except + logging.exception('Caught exception while trying to run test: ' + + test.tagged_name) + exc_info = sys.exc_info() + results = base_test_result.TestRunResults() + results.AddResult(LinkerExceptionTestResult( + test.tagged_name, exc_info)) + + if not results.DidRunPass(): + return results, test + else: + return results, None diff --git a/build/android/pylib/local/__init__.py b/build/android/pylib/local/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/local/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/local/device/__init__.py b/build/android/pylib/local/device/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/local/device/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/local/device/local_device_environment.py b/build/android/pylib/local/device/local_device_environment.py new file mode 100644 index 00000000000..9f9c78745b3 --- /dev/null +++ b/build/android/pylib/local/device/local_device_environment.py @@ -0,0 +1,217 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import datetime +import functools +import logging +import os +import shutil +import tempfile +import threading + +from devil import base_error +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_list +from devil.android import device_utils +from devil.android import logcat_monitor +from devil.utils import file_utils +from devil.utils import parallelizer +from pylib import constants +from pylib.base import environment + + +def _DeviceCachePath(device): + file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() + return os.path.join(constants.GetOutDirectory(), file_name) + + +def handle_shard_failures(f): + """A decorator that handles device failures for per-device functions. + + Args: + f: the function being decorated. The function must take at least one + argument, and that argument must be the device. + """ + return handle_shard_failures_with(None)(f) + + +# TODO(jbudorick): Refactor this to work as a decorator or context manager. +def handle_shard_failures_with(on_failure): + """A decorator that handles device failures for per-device functions. + + This calls on_failure in the event of a failure. + + Args: + f: the function being decorated. The function must take at least one + argument, and that argument must be the device. + on_failure: A binary function to call on failure. + """ + def decorator(f): + @functools.wraps(f) + def wrapper(dev, *args, **kwargs): + try: + return f(dev, *args, **kwargs) + except device_errors.CommandTimeoutError: + logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev)) + except device_errors.DeviceUnreachableError: + logging.exception('Shard died: %s(%s)', f.__name__, str(dev)) + except base_error.BaseError: + logging.exception('Shard failed: %s(%s)', f.__name__, str(dev)) + except SystemExit: + logging.exception('Shard killed: %s(%s)', f.__name__, str(dev)) + raise + if on_failure: + on_failure(dev, f.__name__) + return None + + return wrapper + + return decorator + + +class LocalDeviceEnvironment(environment.Environment): + + def __init__(self, args, _error_func): + super(LocalDeviceEnvironment, self).__init__() + self._blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + self._device_serial = args.test_device + self._devices_lock = threading.Lock() + self._devices = [] + self._concurrent_adb = args.enable_concurrent_adb + self._enable_device_cache = args.enable_device_cache + self._logcat_monitors = [] + self._logcat_output_dir = args.logcat_output_dir + self._logcat_output_file = args.logcat_output_file + self._max_tries = 1 + args.num_retries + self._skip_clear_data = args.skip_clear_data + self._target_devices_file = args.target_devices_file + self._tool_name = args.tool + + #override + def SetUp(self): + device_arg = 'default' + if self._target_devices_file: + device_arg = device_list.GetPersistentDeviceList( + self._target_devices_file) + if not device_arg: + logging.warning('No target devices specified. Falling back to ' + 'running on all available devices.') + device_arg = 'default' + else: + logging.info( + 'Read device list %s from target devices file.', str(device_arg)) + elif self._device_serial: + device_arg = self._device_serial + + self._devices = device_utils.DeviceUtils.HealthyDevices( + self._blacklist, enable_device_files_cache=self._enable_device_cache, + default_retries=self._max_tries - 1, device_arg=device_arg) + if not self._devices: + raise device_errors.NoDevicesError + + if self._logcat_output_file: + self._logcat_output_dir = tempfile.mkdtemp() + + @handle_shard_failures_with(on_failure=self.BlacklistDevice) + def prepare_device(d): + if self._enable_device_cache: + cache_path = _DeviceCachePath(d) + if os.path.exists(cache_path): + logging.info('Using device cache: %s', cache_path) + with open(cache_path) as f: + d.LoadCacheData(f.read()) + # Delete cached file so that any exceptions cause it to be cleared. + os.unlink(cache_path) + + if self._logcat_output_dir: + logcat_file = os.path.join( + self._logcat_output_dir, + '%s_%s' % (d.adb.GetDeviceSerial(), + datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S'))) + monitor = logcat_monitor.LogcatMonitor( + d.adb, clear=True, output_file=logcat_file) + self._logcat_monitors.append(monitor) + monitor.Start() + + self.parallel_devices.pMap(prepare_device) + + @property + def blacklist(self): + return self._blacklist + + @property + def concurrent_adb(self): + return self._concurrent_adb + + @property + def devices(self): + if not self._devices: + raise device_errors.NoDevicesError() + return self._devices + + @property + def max_tries(self): + return self._max_tries + + @property + def parallel_devices(self): + return parallelizer.SyncParallelizer(self.devices) + + @property + def skip_clear_data(self): + return self._skip_clear_data + + @property + def tool(self): + return self._tool_name + + #override + def TearDown(self): + @handle_shard_failures_with(on_failure=self.BlacklistDevice) + def tear_down_device(d): + # Write the cache even when not using it so that it will be ready the + # first time that it is enabled. Writing it every time is also necessary + # so that an invalid cache can be flushed just by disabling it for one + # run. + cache_path = _DeviceCachePath(d) + with open(cache_path, 'w') as f: + f.write(d.DumpCacheData()) + logging.info('Wrote device cache: %s', cache_path) + + self.parallel_devices.pMap(tear_down_device) + + for m in self._logcat_monitors: + try: + m.Stop() + m.Close() + _, temp_path = tempfile.mkstemp() + with open(m.output_file, 'r') as infile: + with open(temp_path, 'w') as outfile: + for line in infile: + outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line)) + shutil.move(temp_path, m.output_file) + except base_error.BaseError: + logging.exception('Failed to stop logcat monitor for %s', + m.adb.GetDeviceSerial()) + except IOError: + logging.exception('Failed to locate logcat for device %s', + m.adb.GetDeviceSerial()) + + if self._logcat_output_file: + file_utils.MergeFiles( + self._logcat_output_file, + [m.output_file for m in self._logcat_monitors + if os.path.exists(m.output_file)]) + shutil.rmtree(self._logcat_output_dir) + + def BlacklistDevice(self, device, reason='local_device_failure'): + device_serial = device.adb.GetDeviceSerial() + if self._blacklist: + self._blacklist.Extend([device_serial], reason=reason) + with self._devices_lock: + self._devices = [d for d in self._devices if str(d) != device_serial] + diff --git a/build/android/pylib/local/device/local_device_gtest_run.py b/build/android/pylib/local/device/local_device_gtest_run.py new file mode 100644 index 00000000000..72961708288 --- /dev/null +++ b/build/android/pylib/local/device/local_device_gtest_run.py @@ -0,0 +1,374 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import itertools +import logging +import os +import posixpath + +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import ports +from devil.utils import reraiser_thread +from pylib import constants +from pylib.base import base_test_result +from pylib.gtest import gtest_test_instance +from pylib.local import local_test_server_spawner +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_test_run + +_COMMAND_LINE_FLAGS_SUPPORTED = True + +_MAX_INLINE_FLAGS_LENGTH = 50 # Arbitrarily chosen. +_EXTRA_COMMAND_LINE_FILE = ( + 'org.chromium.native_test.NativeTest.CommandLineFile') +_EXTRA_COMMAND_LINE_FLAGS = ( + 'org.chromium.native_test.NativeTest.CommandLineFlags') +_EXTRA_TEST_LIST = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner' + '.TestList') +_EXTRA_TEST = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner' + '.Test') + +_MAX_SHARD_SIZE = 256 +_SECONDS_TO_NANOS = int(1e9) + +# The amount of time a test executable may run before it gets killed. +_TEST_TIMEOUT_SECONDS = 30*60 + +# TODO(jbudorick): Move this up to the test instance if the net test server is +# handled outside of the APK for the remote_device environment. +_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [ + 'components_browsertests', 'content_unittests', 'content_browsertests', + 'net_unittests', 'unit_tests' +] + +# No-op context manager. If we used Python 3, we could change this to +# contextlib.ExitStack() +class _NullContextManager(object): + def __enter__(self): + pass + def __exit__(self, *args): + pass + + +# TODO(jbudorick): Move this inside _ApkDelegate once TestPackageApk is gone. +def PullAppFilesImpl(device, package, files, directory): + device_dir = device.GetApplicationDataDirectory(package) + host_dir = os.path.join(directory, str(device)) + for f in files: + device_file = posixpath.join(device_dir, f) + host_file = os.path.join(host_dir, *f.split(posixpath.sep)) + host_file_base, ext = os.path.splitext(host_file) + for i in itertools.count(): + host_file = '%s_%d%s' % (host_file_base, i, ext) + if not os.path.exists(host_file): + break + device.PullFile(device_file, host_file) + + +def _ExtractTestsFromFilter(gtest_filter): + """Returns the list of tests specified by the given filter. + + Returns: + None if the device should be queried for the test list instead. + """ + # Empty means all tests, - means exclude filter. + if not gtest_filter or '-' in gtest_filter: + return None + + patterns = gtest_filter.split(':') + # For a single pattern, allow it even if it has a wildcard so long as the + # wildcard comes at the end and there is at least one . to prove the scope is + # not too large. + # This heuristic is not necessarily faster, but normally is. + if len(patterns) == 1 and patterns[0].endswith('*'): + no_suffix = patterns[0].rstrip('*') + if '*' not in no_suffix and '.' in no_suffix: + return patterns + + if '*' in gtest_filter: + return None + return patterns + + +class _ApkDelegate(object): + def __init__(self, test_instance): + self._activity = test_instance.activity + self._apk_helper = test_instance.apk_helper + self._test_apk_incremental_install_script = ( + test_instance.test_apk_incremental_install_script) + self._package = test_instance.package + self._runner = test_instance.runner + self._permissions = test_instance.permissions + self._suite = test_instance.suite + self._component = '%s/%s' % (self._package, self._runner) + self._extras = test_instance.extras + + def Install(self, device): + if self._test_apk_incremental_install_script: + local_device_test_run.IncrementalInstall(device, self._apk_helper, + self._test_apk_incremental_install_script) + else: + device.Install(self._apk_helper, reinstall=True, + permissions=self._permissions) + + def Run(self, test, device, flags=None, **kwargs): + extras = dict(self._extras) + + if ('timeout' in kwargs + and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras): + # Make sure the instrumentation doesn't kill the test before the + # scripts do. The provided timeout value is in seconds, but the + # instrumentation deals with nanoseconds because that's how Android + # handles time. + extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int( + kwargs['timeout'] * _SECONDS_TO_NANOS) + + command_line_file = _NullContextManager() + if flags: + if len(flags) > _MAX_INLINE_FLAGS_LENGTH: + command_line_file = device_temp_file.DeviceTempFile(device.adb) + device.WriteFile(command_line_file.name, '_ %s' % flags) + extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name + else: + extras[_EXTRA_COMMAND_LINE_FLAGS] = flags + + test_list_file = _NullContextManager() + if test: + if len(test) > 1: + test_list_file = device_temp_file.DeviceTempFile(device.adb) + device.WriteFile(test_list_file.name, '\n'.join(test)) + extras[_EXTRA_TEST_LIST] = test_list_file.name + else: + extras[_EXTRA_TEST] = test[0] + + with command_line_file, test_list_file: + try: + return device.StartInstrumentation( + self._component, extras=extras, raw=False, **kwargs) + except Exception: + device.ForceStop(self._package) + raise + + def PullAppFiles(self, device, files, directory): + PullAppFilesImpl(device, self._package, files, directory) + + def Clear(self, device): + device.ClearApplicationState(self._package, permissions=self._permissions) + + +class _ExeDelegate(object): + def __init__(self, tr, dist_dir): + self._host_dist_dir = dist_dir + self._exe_file_name = os.path.basename(dist_dir)[:-len('__dist')] + self._device_dist_dir = posixpath.join( + constants.TEST_EXECUTABLE_DIR, os.path.basename(dist_dir)) + self._test_run = tr + + def Install(self, device): + # TODO(jbudorick): Look into merging this with normal data deps pushing if + # executables become supported on nonlocal environments. + device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)], + delete_device_stale=True) + + def Run(self, test, device, flags=None, **kwargs): + tool = self._test_run.GetTool(device).GetTestWrapper() + if tool: + cmd = [tool] + else: + cmd = [] + cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name)) + + if test: + cmd.append('--gtest_filter=%s' % ':'.join(test)) + if flags: + # TODO(agrieve): This won't work if multiple flags are passed. + cmd.append(flags) + cwd = constants.TEST_EXECUTABLE_DIR + + env = { + 'LD_LIBRARY_PATH': self._device_dist_dir + } + try: + gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP'] + external = device.GetExternalStoragePath() + env['GCOV_PREFIX'] = '%s/gcov' % external + env['GCOV_PREFIX_STRIP'] = gcov_strip_depth + except (device_errors.CommandFailedError, KeyError): + pass + + output = device.RunShellCommand( + cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs) + return output + + def PullAppFiles(self, device, files, directory): + pass + + def Clear(self, device): + device.KillAll(self._exe_file_name, blocking=True, timeout=30, quiet=True) + + +class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): + + def __init__(self, env, test_instance): + assert isinstance(env, local_device_environment.LocalDeviceEnvironment) + assert isinstance(test_instance, gtest_test_instance.GtestTestInstance) + super(LocalDeviceGtestRun, self).__init__(env, test_instance) + + if self._test_instance.apk: + self._delegate = _ApkDelegate(self._test_instance) + elif self._test_instance.exe_dist_dir: + self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir) + self._crashes = set() + self._servers = collections.defaultdict(list) + + #override + def TestPackage(self): + return self._test_instance.suite + + #override + def SetUp(self): + @local_device_environment.handle_shard_failures_with( + on_failure=self._env.BlacklistDevice) + def individual_device_set_up(dev): + def install_apk(): + # Install test APK. + self._delegate.Install(dev) + + def push_test_data(): + # Push data dependencies. + device_root = posixpath.join(dev.GetExternalStoragePath(), + 'chromium_tests_root') + data_deps = self._test_instance.GetDataDependencies() + host_device_tuples = [ + (h, d if d is not None else device_root) + for h, d in data_deps] + dev.PushChangedFiles(host_device_tuples, delete_device_stale=True) + if not host_device_tuples: + dev.RunShellCommand(['rm', '-rf', device_root], check_return=True) + dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True) + + def init_tool_and_start_servers(): + tool = self.GetTool(dev) + tool.CopyFiles(dev) + tool.SetupEnvironment() + + self._servers[str(dev)] = [] + if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER: + self._servers[str(dev)].append( + local_test_server_spawner.LocalTestServerSpawner( + ports.AllocateTestServerPort(), dev, tool)) + + for s in self._servers[str(dev)]: + s.SetUp() + + steps = (install_apk, push_test_data, init_tool_and_start_servers) + if self._env.concurrent_adb: + reraiser_thread.RunAsync(steps) + else: + for step in steps: + step() + + self._env.parallel_devices.pMap(individual_device_set_up) + + #override + def _ShouldShard(self): + return True + + #override + def _CreateShards(self, tests): + # _crashes are tests that might crash and make the tests in the same shard + # following the crashed testcase not run. + # Thus we need to create separate shards for each crashed testcase, + # so that other tests can be run. + device_count = len(self._env.devices) + shards = [] + + # Add shards with only one suspect testcase. + shards += [[crash] for crash in self._crashes if crash in tests] + + # Delete suspect testcase from tests. + tests = [test for test in tests if not test in self._crashes] + + for i in xrange(0, device_count): + unbounded_shard = tests[i::device_count] + shards += [unbounded_shard[j:j+_MAX_SHARD_SIZE] + for j in xrange(0, len(unbounded_shard), _MAX_SHARD_SIZE)] + return shards + + #override + def _GetTests(self): + if self._test_instance.extract_test_list_from_filter: + # When the exact list of tests to run is given via command-line (e.g. when + # locally iterating on a specific test), skip querying the device (which + # takes ~3 seconds). + tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter) + if tests: + return tests + + # Even when there's only one device, it still makes sense to retrieve the + # test list so that tests can be split up and run in batches rather than all + # at once (since test output is not streamed). + @local_device_environment.handle_shard_failures_with( + on_failure=self._env.BlacklistDevice) + def list_tests(dev): + raw_test_list = self._delegate.Run( + None, dev, flags='--gtest_list_tests', timeout=30) + tests = gtest_test_instance.ParseGTestListTests(raw_test_list) + if not tests: + logging.info('No tests found. Output:') + for l in raw_test_list: + logging.info(' %s', l) + tests = self._test_instance.FilterTests(tests) + return tests + + # Query all devices in case one fails. + test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None) + + # If all devices failed to list tests, raise an exception. + # Check that tl is not None and is not empty. + if all(not tl for tl in test_lists): + raise device_errors.CommandFailedError( + 'Failed to list tests on any device') + return list(sorted(set().union(*[set(tl) for tl in test_lists if tl]))) + + #override + def _RunTest(self, device, test): + # Run the test. + timeout = (self._test_instance.shard_timeout + * self.GetTool(device).GetTimeoutScale()) + output = self._delegate.Run( + test, device, flags=self._test_instance.test_arguments, + timeout=timeout, retries=0) + for s in self._servers[str(device)]: + s.Reset() + if self._test_instance.app_files: + self._delegate.PullAppFiles(device, self._test_instance.app_files, + self._test_instance.app_file_dir) + if not self._env.skip_clear_data: + self._delegate.Clear(device) + + # Parse the output. + # TODO(jbudorick): Transition test scripts away from parsing stdout. + results = gtest_test_instance.ParseGTestOutput(output) + + # Check whether there are any crashed testcases. + self._crashes.update(r.GetName() for r in results + if r.GetType() == base_test_result.ResultType.CRASH) + return results + + #override + def TearDown(self): + @local_device_environment.handle_shard_failures + def individual_device_tear_down(dev): + for s in self._servers.get(str(dev), []): + s.TearDown() + + tool = self.GetTool(dev) + tool.CleanUpEnvironment() + + self._env.parallel_devices.pMap(individual_device_tear_down) diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/build/android/pylib/local/device/local_device_instrumentation_test_run.py new file mode 100644 index 00000000000..9f743353f99 --- /dev/null +++ b/build/android/pylib/local/device/local_device_instrumentation_test_run.py @@ -0,0 +1,381 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import posixpath +import re +import time + +from devil.android import device_errors +from devil.android import flag_changer +from devil.utils import reraiser_thread +from pylib import valgrind_tools +from pylib.base import base_test_result +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_test_run +import tombstones + +TIMEOUT_ANNOTATIONS = [ + ('Manual', 10 * 60 * 60), + ('IntegrationTest', 30 * 60), + ('External', 10 * 60), + ('EnormousTest', 10 * 60), + ('LargeTest', 5 * 60), + ('MediumTest', 3 * 60), + ('SmallTest', 1 * 60), +] + + +# TODO(jbudorick): Make this private once the instrumentation test_runner is +# deprecated. +def DidPackageCrashOnDevice(package_name, device): + # Dismiss any error dialogs. Limit the number in case we have an error + # loop or we are failing to dismiss. + try: + for _ in xrange(10): + package = device.DismissCrashDialogIfNeeded() + if not package: + return False + # Assume test package convention of ".test" suffix + if package in package_name: + return True + except device_errors.CommandFailedError: + logging.exception('Error while attempting to dismiss crash dialog.') + return False + + +_CURRENT_FOCUS_CRASH_RE = re.compile( + r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}') + + +class LocalDeviceInstrumentationTestRun( + local_device_test_run.LocalDeviceTestRun): + def __init__(self, env, test_instance): + super(LocalDeviceInstrumentationTestRun, self).__init__(env, test_instance) + self._flag_changers = {} + + def TestPackage(self): + return self._test_instance.suite + + def SetUp(self): + def substitute_device_root(d, device_root): + if not d: + return device_root + elif isinstance(d, list): + return posixpath.join(p if p else device_root for p in d) + else: + return d + + @local_device_environment.handle_shard_failures_with( + self._env.BlacklistDevice) + def individual_device_set_up(dev, host_device_tuples): + def install_apk(): + if self._test_instance.apk_under_test: + if self._test_instance.apk_under_test_incremental_install_script: + local_device_test_run.IncrementalInstall( + dev, + self._test_instance.apk_under_test, + self._test_instance.apk_under_test_incremental_install_script) + else: + permissions = self._test_instance.apk_under_test.GetPermissions() + dev.Install(self._test_instance.apk_under_test, + permissions=permissions) + + if self._test_instance.test_apk_incremental_install_script: + local_device_test_run.IncrementalInstall( + dev, + self._test_instance.test_apk, + self._test_instance.test_apk_incremental_install_script) + else: + permissions = self._test_instance.test_apk.GetPermissions() + dev.Install(self._test_instance.test_apk, permissions=permissions) + + for apk in self._test_instance.additional_apks: + dev.Install(apk) + + # Set debug app in order to enable reading command line flags on user + # builds + if self._test_instance.flags: + if not self._test_instance.package_info: + logging.error("Couldn't set debug app: no package info") + elif not self._test_instance.package_info.package: + logging.error("Couldn't set debug app: no package defined") + else: + dev.RunShellCommand(['am', 'set-debug-app', '--persistent', + self._test_instance.package_info.package], + check_return=True) + + def push_test_data(): + device_root = posixpath.join(dev.GetExternalStoragePath(), + 'chromium_tests_root') + host_device_tuples_substituted = [ + (h, substitute_device_root(d, device_root)) + for h, d in host_device_tuples] + logging.info('instrumentation data deps:') + for h, d in host_device_tuples_substituted: + logging.info('%r -> %r', h, d) + dev.PushChangedFiles(host_device_tuples_substituted, + delete_device_stale=True) + if not host_device_tuples_substituted: + dev.RunShellCommand(['rm', '-rf', device_root], check_return=True) + dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True) + + def create_flag_changer(): + if self._test_instance.flags: + if not self._test_instance.package_info: + logging.error("Couldn't set flags: no package info") + elif not self._test_instance.package_info.cmdline_file: + logging.error("Couldn't set flags: no cmdline_file") + else: + self._CreateFlagChangerIfNeeded(dev) + logging.debug('Attempting to set flags: %r', + self._test_instance.flags) + self._flag_changers[str(dev)].AddFlags(self._test_instance.flags) + + valgrind_tools.SetChromeTimeoutScale( + dev, self._test_instance.timeout_scale) + + steps = (install_apk, push_test_data, create_flag_changer) + if self._env.concurrent_adb: + reraiser_thread.RunAsync(steps) + else: + for step in steps: + step() + if self._test_instance.store_tombstones: + tombstones.ClearAllTombstones(dev) + + self._env.parallel_devices.pMap( + individual_device_set_up, + self._test_instance.GetDataDependencies()) + + def TearDown(self): + @local_device_environment.handle_shard_failures_with( + self._env.BlacklistDevice) + def individual_device_tear_down(dev): + if str(dev) in self._flag_changers: + self._flag_changers[str(dev)].Restore() + + # Remove package-specific configuration + dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True) + + valgrind_tools.SetChromeTimeoutScale(dev, None) + + self._env.parallel_devices.pMap(individual_device_tear_down) + + def _CreateFlagChangerIfNeeded(self, device): + if not str(device) in self._flag_changers: + self._flag_changers[str(device)] = flag_changer.FlagChanger( + device, self._test_instance.package_info.cmdline_file) + + #override + def _CreateShards(self, tests): + return tests + + #override + def _GetTests(self): + return self._test_instance.GetTests() + + def _GetTestName(self, test): + # pylint: disable=no-self-use + return '%s#%s' % (test['class'], test['method']) + + #override + def _GetUniqueTestName(self, test): + display_name = self._GetTestName(test) + if 'flags' in test: + flags = test['flags'] + if flags.add: + display_name = '%s with {%s}' % (display_name, ' '.join(flags.add)) + if flags.remove: + display_name = '%s without {%s}' % ( + display_name, ' '.join(flags.remove)) + return display_name + + #override + def _RunTest(self, device, test): + extras = {} + + flags = None + test_timeout_scale = None + if self._test_instance.coverage_directory: + coverage_basename = '%s.ec' % ('%s_group' % test[0]['method'] + if isinstance(test, list) else test['method']) + extras['coverage'] = 'true' + coverage_directory = os.path.join( + device.GetExternalStoragePath(), 'chrome', 'test', 'coverage') + coverage_device_file = os.path.join( + coverage_directory, coverage_basename) + extras['coverageFile'] = coverage_device_file + + if isinstance(test, list): + if not self._test_instance.driver_apk: + raise Exception('driver_apk does not exist. ' + 'Please build it and try again.') + + def name_and_timeout(t): + n = self._GetTestName(t) + i = self._GetTimeoutFromAnnotations(t['annotations'], n) + return (n, i) + + test_names, timeouts = zip(*(name_and_timeout(t) for t in test)) + + test_name = ','.join(test_names) + test_display_name = test_name + target = '%s/%s' % ( + self._test_instance.driver_package, + self._test_instance.driver_name) + extras.update( + self._test_instance.GetDriverEnvironmentVars( + test_list=test_names)) + timeout = sum(timeouts) + else: + test_name = self._GetTestName(test) + test_display_name = self._GetUniqueTestName(test) + target = '%s/%s' % ( + self._test_instance.test_package, self._test_instance.test_runner) + extras['class'] = test_name + if 'flags' in test: + flags = test['flags'] + timeout = self._GetTimeoutFromAnnotations( + test['annotations'], test_display_name) + + test_timeout_scale = self._GetTimeoutScaleFromAnnotations( + test['annotations']) + if test_timeout_scale and test_timeout_scale != 1: + valgrind_tools.SetChromeTimeoutScale( + device, test_timeout_scale * self._test_instance.timeout_scale) + + logging.info('preparing to run %s: %s', test_display_name, test) + + if flags: + self._CreateFlagChangerIfNeeded(device) + self._flag_changers[str(device)].PushFlags( + add=flags.add, remove=flags.remove) + + try: + time_ms = lambda: int(time.time() * 1e3) + start_ms = time_ms() + output = device.StartInstrumentation( + target, raw=True, extras=extras, timeout=timeout, retries=0) + duration_ms = time_ms() - start_ms + finally: + if flags: + self._flag_changers[str(device)].Restore() + if test_timeout_scale: + valgrind_tools.SetChromeTimeoutScale( + device, self._test_instance.timeout_scale) + + # TODO(jbudorick): Make instrumentation tests output a JSON so this + # doesn't have to parse the output. + result_code, result_bundle, statuses = ( + self._test_instance.ParseAmInstrumentRawOutput(output)) + results = self._test_instance.GenerateTestResults( + result_code, result_bundle, statuses, start_ms, duration_ms) + + # Update the result name if the test used flags. + if flags: + for r in results: + if r.GetName() == test_name: + r.SetName(test_display_name) + + # Add UNKNOWN results for any missing tests. + iterable_test = test if isinstance(test, list) else [test] + test_names = set(self._GetUniqueTestName(t) for t in iterable_test) + results_names = set(r.GetName() for r in results) + results.extend( + base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN) + for u in test_names.difference(results_names)) + + # Update the result type if we detect a crash. + if DidPackageCrashOnDevice(self._test_instance.test_package, device): + for r in results: + if r.GetType() == base_test_result.ResultType.UNKNOWN: + r.SetType(base_test_result.ResultType.CRASH) + + # Handle failures by: + # - optionally taking a screenshot + # - logging the raw output at INFO level + # - clearing the application state while persisting permissions + if any(r.GetType() not in (base_test_result.ResultType.PASS, + base_test_result.ResultType.SKIP) + for r in results): + if self._test_instance.screenshot_dir: + file_name = '%s-%s.png' % ( + test_display_name, + time.strftime('%Y%m%dT%H%M%S', time.localtime())) + saved_dir = device.TakeScreenshot( + os.path.join(self._test_instance.screenshot_dir, file_name)) + logging.info( + 'Saved screenshot for %s to %s.', + test_display_name, saved_dir) + logging.info('detected failure in %s. raw output:', test_display_name) + for l in output: + logging.info(' %s', l) + if (not self._env.skip_clear_data + and self._test_instance.package_info): + permissions = ( + self._test_instance.apk_under_test.GetPermissions() + if self._test_instance.apk_under_test + else None) + device.ClearApplicationState(self._test_instance.package_info.package, + permissions=permissions) + + else: + logging.debug('raw output from %s:', test_display_name) + for l in output: + logging.debug(' %s', l) + if self._test_instance.coverage_directory: + device.PullFile(coverage_directory, + self._test_instance.coverage_directory) + device.RunShellCommand('rm -f %s' % os.path.join(coverage_directory, + '*')) + if self._test_instance.store_tombstones: + for result in results: + if result.GetType() == base_test_result.ResultType.CRASH: + resolved_tombstones = tombstones.ResolveTombstones( + device, + resolve_all_tombstones=True, + include_stack_symbols=False, + wipe_tombstones=True) + result.SetTombstones('\n'.join(resolved_tombstones)) + return results + + #override + def _ShouldRetry(self, test): + if 'RetryOnFailure' in test.get('annotations', {}): + return True + + # TODO(jbudorick): Remove this log message and switch the return value to + # False after tests have been annotated with @RetryOnFailure. + # See crbug.com/619055 for more details. + logging.warning('Default retries are being phased out. crbug.com/619055') + return True + + #override + def _ShouldShard(self): + return True + + @classmethod + def _GetTimeoutScaleFromAnnotations(cls, annotations): + try: + return int(annotations.get('TimeoutScale', 1)) + except ValueError as e: + logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e)) + return 1 + + @classmethod + def _GetTimeoutFromAnnotations(cls, annotations, test_name): + for k, v in TIMEOUT_ANNOTATIONS: + if k in annotations: + timeout = v + break + else: + logging.warning('Using default 1 minute timeout for %s', test_name) + timeout = 60 + + timeout *= cls._GetTimeoutScaleFromAnnotations(annotations) + + return timeout + diff --git a/build/android/pylib/local/device/local_device_perf_test_run.py b/build/android/pylib/local/device/local_device_perf_test_run.py new file mode 100644 index 00000000000..b78553e0417 --- /dev/null +++ b/build/android/pylib/local/device/local_device_perf_test_run.py @@ -0,0 +1,506 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import io +import json +import logging +import os +import pickle +import shutil +import tempfile +import threading +import time +import zipfile + +from devil.android import battery_utils +from devil.android import device_errors +from devil.android import device_list +from devil.android import device_utils +from devil.android import forwarder +from devil.android.tools import device_recovery +from devil.android.tools import device_status +from devil.utils import cmd_helper +from devil.utils import parallelizer +from pylib import constants +from pylib.base import base_test_result +from pylib.constants import host_paths +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_test_run + + +class HeartBeat(object): + + def __init__(self, shard, wait_time=60*10): + """ HeartBeat Logger constructor. + + Args: + shard: A perf test runner device shard. + wait_time: time to wait between heartbeat messages. + """ + self._shard = shard + self._running = False + self._timer = None + self._wait_time = wait_time + + def Start(self): + if not self._running: + self._timer = threading.Timer(self._wait_time, self._LogMessage) + self._timer.start() + self._running = True + + def Stop(self): + if self._running: + self._timer.cancel() + self._running = False + + def _LogMessage(self): + logging.info('Currently working on test %s', self._shard.current_test) + self._timer = threading.Timer(self._wait_time, self._LogMessage) + self._timer.start() + + +class TestShard(object): + def __init__(self, env, test_instance, tests, retries=3, timeout=None): + logging.info('Create shard for the following tests:') + for t in tests: + logging.info(' %s', t) + self._current_test = None + self._env = env + self._heart_beat = HeartBeat(self) + self._index = None + self._output_dir = None + self._retries = retries + self._test_instance = test_instance + self._tests = tests + self._timeout = timeout + + def _TestSetUp(self, test): + if (self._test_instance.collect_chartjson_data + or self._tests[test].get('archive_output_dir')): + self._output_dir = tempfile.mkdtemp() + + self._current_test = test + self._heart_beat.Start() + + def _RunSingleTest(self, test): + self._test_instance.WriteBuildBotJson(self._output_dir) + + timeout = self._tests[test].get('timeout', self._timeout) + cmd = self._CreateCmd(test) + cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT) + + self._LogTest(test, cmd, timeout) + + try: + start_time = time.time() + exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout( + cmd, timeout, cwd=cwd, shell=True) + end_time = time.time() + json_output = self._test_instance.ReadChartjsonOutput(self._output_dir) + if exit_code == 0: + result_type = base_test_result.ResultType.PASS + else: + result_type = base_test_result.ResultType.FAIL + except cmd_helper.TimeoutError as e: + end_time = time.time() + exit_code = -1 + output = e.output + json_output = '' + result_type = base_test_result.ResultType.TIMEOUT + + return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code, + output, json_output, result_type) + + def _CreateCmd(self, test): + cmd = [] + if self._test_instance.dry_run: + cmd.append('echo') + cmd.append(self._tests[test]['cmd']) + if self._output_dir: + cmd.append('--output-dir=%s' % self._output_dir) + return ' '.join(self._ExtendCmd(cmd)) + + def _ExtendCmd(self, cmd): # pylint: disable=no-self-use + return cmd + + def _LogTest(self, _test, _cmd, _timeout): + raise NotImplementedError + + def _LogTestExit(self, test, exit_code, duration): + # pylint: disable=no-self-use + logging.info('%s : exit_code=%d in %d secs.', test, exit_code, duration) + + def _ExtendPersistedResult(self, persisted_result): + raise NotImplementedError + + def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code, + output, json_output, result_type): + if exit_code is None: + exit_code = -1 + + self._LogTestExit(test, exit_code, end_time - start_time) + + actual_exit_code = exit_code + if (self._test_instance.flaky_steps + and test in self._test_instance.flaky_steps): + exit_code = 0 + archive_bytes = (self._ArchiveOutputDir() + if self._tests[test].get('archive_output_dir') + else None) + persisted_result = { + 'name': test, + 'output': [output], + 'chartjson': json_output, + 'archive_bytes': archive_bytes, + 'exit_code': exit_code, + 'actual_exit_code': actual_exit_code, + 'result_type': result_type, + 'start_time': start_time, + 'end_time': end_time, + 'total_time': end_time - start_time, + 'cmd': cmd, + } + self._ExtendPersistedResult(persisted_result) + self._SaveResult(persisted_result) + return result_type + + def _ArchiveOutputDir(self): + """Archive all files in the output dir, and return as compressed bytes.""" + with io.BytesIO() as archive: + with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as contents: + num_files = 0 + for absdir, _, files in os.walk(self._output_dir): + reldir = os.path.relpath(absdir, self._output_dir) + for filename in files: + src_path = os.path.join(absdir, filename) + # We use normpath to turn './file.txt' into just 'file.txt'. + dst_path = os.path.normpath(os.path.join(reldir, filename)) + contents.write(src_path, dst_path) + num_files += 1 + if num_files: + logging.info('%d files in the output dir were archived.', num_files) + else: + logging.warning('No files in the output dir. Archive is empty.') + return archive.getvalue() + + @staticmethod + def _SaveResult(result): + pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name']) + if os.path.exists(pickled): + with file(pickled, 'r') as f: + previous = pickle.loads(f.read()) + result['output'] = previous['output'] + result['output'] + with file(pickled, 'w') as f: + f.write(pickle.dumps(result)) + + def _TestTearDown(self): + if self._output_dir: + shutil.rmtree(self._output_dir, ignore_errors=True) + self._output_dir = None + self._heart_beat.Stop() + self._current_test = None + + @property + def current_test(self): + return self._current_test + + +class DeviceTestShard(TestShard): + def __init__( + self, env, test_instance, device, index, tests, retries=3, timeout=None): + super(DeviceTestShard, self).__init__( + env, test_instance, tests, retries, timeout) + self._battery = battery_utils.BatteryUtils(device) if device else None + self._device = device + self._index = index + + @local_device_environment.handle_shard_failures + def RunTestsOnShard(self): + results = base_test_result.TestRunResults() + for test in self._tests: + tries_left = self._retries + result_type = None + while (result_type != base_test_result.ResultType.PASS + and tries_left > 0): + try: + self._TestSetUp(test) + result_type = self._RunSingleTest(test) + except device_errors.CommandTimeoutError: + result_type = base_test_result.ResultType.TIMEOUT + except device_errors.CommandFailedError: + logging.exception('Exception when executing %s.', test) + result_type = base_test_result.ResultType.FAIL + finally: + self._TestTearDown() + if result_type != base_test_result.ResultType.PASS: + try: + device_recovery.RecoverDevice(self._device, self._env.blacklist) + except device_errors.CommandTimeoutError: + logging.exception( + 'Device failed to recover after failing %s.', test) + tries_left = tries_left - 1 + + results.AddResult(base_test_result.BaseTestResult(test, result_type)) + return results + + def _LogTestExit(self, test, exit_code, duration): + logging.info('%s : exit_code=%d in %d secs on device %s', + test, exit_code, duration, str(self._device)) + + def _TestSetUp(self, test): + if not self._device.IsOnline(): + msg = 'Device %s is unresponsive.' % str(self._device) + raise device_errors.DeviceUnreachableError(msg) + + logging.info('Charge level: %s%%', + str(self._battery.GetBatteryInfo().get('level'))) + if self._test_instance.min_battery_level: + self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level) + + logging.info('temperature: %s (0.1 C)', + str(self._battery.GetBatteryInfo().get('temperature'))) + if self._test_instance.max_battery_temp: + self._battery.LetBatteryCoolToTemperature( + self._test_instance.max_battery_temp) + + if not self._device.IsScreenOn(): + self._device.SetScreen(True) + + super(DeviceTestShard, self)._TestSetUp(test) + + def _LogTest(self, test, cmd, timeout): + logging.debug("Running %s with command '%s' on shard %s with timeout %d", + test, cmd, str(self._index), timeout) + + def _ExtendCmd(self, cmd): + cmd.extend(['--device=%s' % str(self._device)]) + return cmd + + def _ExtendPersistedResult(self, persisted_result): + persisted_result['host_test'] = False + persisted_result['device'] = str(self._device) + + def _TestTearDown(self): + try: + logging.info('Unmapping device ports for %s.', self._device) + forwarder.Forwarder.UnmapAllDevicePorts(self._device) + except Exception: # pylint: disable=broad-except + logging.exception('Exception when resetting ports.') + finally: + super(DeviceTestShard, self)._TestTearDown() + +class HostTestShard(TestShard): + def __init__(self, env, test_instance, tests, retries=3, timeout=None): + super(HostTestShard, self).__init__( + env, test_instance, tests, retries, timeout) + + @local_device_environment.handle_shard_failures + def RunTestsOnShard(self): + results = base_test_result.TestRunResults() + for test in self._tests: + tries_left = self._retries + result_type = None + while (result_type != base_test_result.ResultType.PASS + and tries_left > 0): + try: + self._TestSetUp(test) + result_type = self._RunSingleTest(test) + finally: + self._TestTearDown() + results.AddResult(base_test_result.BaseTestResult(test, result_type)) + return results + + def _LogTest(self, test, cmd, timeout): + logging.debug("Running %s with command '%s' on host shard with timeout %d", + test, cmd, timeout) + + def _ExtendPersistedResult(self, persisted_result): + persisted_result['host_test'] = True + + +class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun): + + _DEFAULT_TIMEOUT = 60 * 60 + _CONFIG_VERSION = 1 + + def __init__(self, env, test_instance): + super(LocalDevicePerfTestRun, self).__init__(env, test_instance) + self._devices = None + self._env = env + self._no_device_tests = {} + self._test_buckets = [] + self._test_instance = test_instance + self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT + + def SetUp(self): + self._devices = self._GetAllDevices(self._env.devices, + self._test_instance.known_devices_file) + + if os.path.exists(constants.PERF_OUTPUT_DIR): + shutil.rmtree(constants.PERF_OUTPUT_DIR) + os.makedirs(constants.PERF_OUTPUT_DIR) + + def TearDown(self): + pass + + def _GetStepsFromDict(self): + # From where this is called one of these two must be set. + if self._test_instance.single_step: + return { + 'version': self._CONFIG_VERSION, + 'steps': { + 'single_step': { + 'device_affinity': 0, + 'cmd': self._test_instance.single_step + }, + } + } + if self._test_instance.steps: + with file(self._test_instance.steps, 'r') as f: + steps = json.load(f) + if steps['version'] != self._CONFIG_VERSION: + raise TestDictVersionError( + 'Version is expected to be %d but was %d' % (self._CONFIG_VERSION, + steps['version'])) + return steps + raise PerfTestRunGetStepsError( + 'Neither single_step or steps set in test_instance.') + + def _SplitTestsByAffinity(self): + # This splits tests by their device affinity so that the same tests always + # run on the same devices. This is important for perf tests since different + # devices might yield slightly different performance results. + test_dict = self._GetStepsFromDict() + for test, test_config in test_dict['steps'].iteritems(): + try: + affinity = test_config.get('device_affinity') + if affinity is None: + self._no_device_tests[test] = test_config + else: + if len(self._test_buckets) < affinity + 1: + while len(self._test_buckets) != affinity + 1: + self._test_buckets.append({}) + self._test_buckets[affinity][test] = test_config + except KeyError: + logging.exception( + 'Test config for %s is bad.\n Config:%s', test, str(test_config)) + + @staticmethod + def _GetAllDevices(active_devices, devices_path): + try: + if devices_path: + devices = [device_utils.DeviceUtils(s) + for s in device_list.GetPersistentDeviceList(devices_path)] + if not devices and active_devices: + logging.warning('%s is empty. Falling back to active devices.', + devices_path) + devices = active_devices + else: + logging.warning('Known devices file path not being passed. For device ' + 'affinity to work properly, it must be passed.') + devices = active_devices + except IOError as e: + logging.error('Unable to find %s [%s]', devices_path, e) + devices = active_devices + return sorted(devices) + + #override + def RunTests(self): + # Affinitize the tests. + self._SplitTestsByAffinity() + if not self._test_buckets and not self._no_device_tests: + raise local_device_test_run.NoTestsError() + + def run_perf_tests(shard_id): + if shard_id is None: + s = HostTestShard(self._env, self._test_instance, self._no_device_tests, + retries=3, timeout=self._timeout) + else: + if device_status.IsBlacklisted( + str(self._devices[shard_id]), self._env.blacklist): + logging.warning('Device %s is not active. Will not create shard %s.', + str(self._devices[shard_id]), shard_id) + return None + s = DeviceTestShard(self._env, self._test_instance, + self._devices[shard_id], shard_id, + self._test_buckets[shard_id], + retries=self._env.max_tries, timeout=self._timeout) + return s.RunTestsOnShard() + + device_indices = range(min(len(self._devices), len(self._test_buckets))) + if self._no_device_tests: + device_indices.append(None) + shards = parallelizer.Parallelizer(device_indices).pMap(run_perf_tests) + return [x for x in shards.pGet(self._timeout) if x is not None] + + # override + def TestPackage(self): + return 'perf' + + # override + def _CreateShards(self, _tests): + raise NotImplementedError + + # override + def _GetTests(self): + return self._test_buckets + + # override + def _RunTest(self, _device, _test): + raise NotImplementedError + + # override + def _ShouldShard(self): + return False + + +class OutputJsonList(LocalDevicePerfTestRun): + # override + def SetUp(self): + pass + + # override + def RunTests(self): + result_type = self._test_instance.OutputJsonList() + result = base_test_result.TestRunResults() + result.AddResult( + base_test_result.BaseTestResult('OutputJsonList', result_type)) + return [result] + + # override + def _CreateShards(self, _tests): + raise NotImplementedError + + # override + def _RunTest(self, _device, _test): + raise NotImplementedError + + +class PrintStep(LocalDevicePerfTestRun): + # override + def SetUp(self): + pass + + # override + def RunTests(self): + result_type = self._test_instance.PrintTestOutput() + result = base_test_result.TestRunResults() + result.AddResult( + base_test_result.BaseTestResult('PrintStep', result_type)) + return [result] + + # override + def _CreateShards(self, _tests): + raise NotImplementedError + + # override + def _RunTest(self, _device, _test): + raise NotImplementedError + + +class TestDictVersionError(Exception): + pass + +class PerfTestRunGetStepsError(Exception): + pass diff --git a/build/android/pylib/local/device/local_device_test_run.py b/build/android/pylib/local/device/local_device_test_run.py new file mode 100644 index 00000000000..70946ac2ec1 --- /dev/null +++ b/build/android/pylib/local/device/local_device_test_run.py @@ -0,0 +1,183 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import imp +import logging +import signal +import thread +import threading + +from devil.utils import signal_handler +from pylib import valgrind_tools +from pylib.base import base_test_result +from pylib.base import test_run +from pylib.base import test_collection +from pylib.local.device import local_device_environment + + +def IncrementalInstall(device, apk_helper, installer_script): + """Performs an incremental install. + + Args: + device: Device to install on. + apk_helper: ApkHelper instance for the _incremental.apk. + installer_script: Path to the installer script for the incremental apk. + """ + try: + install_wrapper = imp.load_source('install_wrapper', installer_script) + except IOError: + raise Exception('Incremental install script not found: %s\n' % + installer_script) + params = install_wrapper.GetInstallParameters() + + from incremental_install import installer + installer.Install(device, apk_helper, split_globs=params['splits'], + native_libs=params['native_libs'], + dex_files=params['dex_files'], + permissions=None) # Auto-grant permissions from manifest. + + +class LocalDeviceTestRun(test_run.TestRun): + + def __init__(self, env, test_instance): + super(LocalDeviceTestRun, self).__init__(env, test_instance) + self._tools = {} + + #override + def RunTests(self): + tests = self._GetTests() + + exit_now = threading.Event() + + @local_device_environment.handle_shard_failures + def run_tests_on_device(dev, tests, results): + for test in tests: + if exit_now.isSet(): + thread.exit() + + result = None + try: + result = self._RunTest(dev, test) + if isinstance(result, base_test_result.BaseTestResult): + results.AddResult(result) + elif isinstance(result, list): + results.AddResults(result) + else: + raise Exception( + 'Unexpected result type: %s' % type(result).__name__) + except: + if isinstance(tests, test_collection.TestCollection): + tests.add(test) + raise + finally: + if isinstance(tests, test_collection.TestCollection): + tests.test_completed() + + + logging.info('Finished running tests on this device.') + + class TestsTerminated(Exception): + pass + + def stop_tests(_signum, _frame): + logging.critical('Received SIGTERM. Stopping test execution.') + exit_now.set() + raise TestsTerminated() + + try: + with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests): + tries = 0 + results = [] + while tries < self._env.max_tries and tests: + logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries) + logging.info('Will run %d tests on %d devices: %s', + len(tests), len(self._env.devices), + ', '.join(str(d) for d in self._env.devices)) + for t in tests: + logging.debug(' %s', t) + + try_results = base_test_result.TestRunResults() + test_names = (self._GetUniqueTestName(t) for t in tests) + try_results.AddResults( + base_test_result.BaseTestResult( + t, base_test_result.ResultType.UNKNOWN) + for t in test_names if not t.endswith('*')) + + try: + if self._ShouldShard(): + tc = test_collection.TestCollection(self._CreateShards(tests)) + self._env.parallel_devices.pMap( + run_tests_on_device, tc, try_results).pGet(None) + else: + self._env.parallel_devices.pMap( + run_tests_on_device, tests, try_results).pGet(None) + finally: + results.append(try_results) + + tries += 1 + tests = self._GetTestsToRetry(tests, try_results) + + logging.info('FINISHED TRY #%d/%d', tries, self._env.max_tries) + if tests: + logging.info('%d failed tests remain.', len(tests)) + else: + logging.info('All tests completed.') + except TestsTerminated: + pass + + return results + + def _GetTestsToRetry(self, tests, try_results): + + def is_failure_result(test_result): + return ( + test_result is None + or test_result.GetType() not in ( + base_test_result.ResultType.PASS, + base_test_result.ResultType.SKIP)) + + all_test_results = {r.GetName(): r for r in try_results.GetAll()} + + def test_failed(name): + # When specifying a test filter, names can contain trailing wildcards. + # See local_device_gtest_run._ExtractTestsFromFilter() + if name.endswith('*'): + return any(fnmatch.fnmatch(n, name) and is_failure_result(t) + for n, t in all_test_results.iteritems()) + return is_failure_result(all_test_results.get(name)) + + failed_tests = (t for t in tests if test_failed(self._GetUniqueTestName(t))) + + return [t for t in failed_tests if self._ShouldRetry(t)] + + def GetTool(self, device): + if not str(device) in self._tools: + self._tools[str(device)] = valgrind_tools.CreateTool( + self._env.tool, device) + return self._tools[str(device)] + + def _CreateShards(self, tests): + raise NotImplementedError + + def _GetUniqueTestName(self, test): + # pylint: disable=no-self-use + return test + + def _ShouldRetry(self, test): + # pylint: disable=no-self-use,unused-argument + return True + + def _GetTests(self): + raise NotImplementedError + + def _RunTest(self, device, test): + raise NotImplementedError + + def _ShouldShard(self): + raise NotImplementedError + + +class NoTestsError(Exception): + """Error for when no tests are found.""" diff --git a/build/android/pylib/local/device/local_device_test_run_test.py b/build/android/pylib/local/device/local_device_test_run_test.py new file mode 100644 index 00000000000..83a0972e274 --- /dev/null +++ b/build/android/pylib/local/device/local_device_test_run_test.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import unittest + +from pylib.base import base_test_result +from pylib.constants import host_paths +from pylib.local.device import local_device_test_run + +with host_paths.SysPath(host_paths.PYMOCK_PATH): + import mock # pylint: disable=import-error + + +class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun): + + # pylint: disable=abstract-method + + def __init__(self): + super(TestLocalDeviceTestRun, self).__init__( + mock.MagicMock(), mock.MagicMock()) + + +class TestLocalDeviceNonStringTestRun( + local_device_test_run.LocalDeviceTestRun): + + # pylint: disable=abstract-method + + def __init__(self): + super(TestLocalDeviceNonStringTestRun, self).__init__( + mock.MagicMock(), mock.MagicMock()) + + def _GetUniqueTestName(self, test): + return test['name'] + + +class LocalDeviceTestRunTest(unittest.TestCase): + + def testGetTestsToRetry_allTestsPassed(self): + results = [ + base_test_result.BaseTestResult( + 'Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(0, len(tests_to_retry)) + + def testGetTestsToRetry_testFailed(self): + results = [ + base_test_result.BaseTestResult( + 'Test1', base_test_result.ResultType.FAIL), + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_testUnknown(self): + results = [ + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = ['Test1'] + [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_wildcardFilter_allPass(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.PASS), + ] + + tests = ['TestCase.*'] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(0, len(tests_to_retry)) + + def testGetTestsToRetry_wildcardFilter_oneFails(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.FAIL), + ] + + tests = ['TestCase.*'] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(1, len(tests_to_retry)) + self.assertIn('TestCase.*', tests_to_retry) + + def testGetTestsToRetry_nonStringTests(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.FAIL), + ] + + tests = [ + {'name': 'TestCase.Test1'}, + {'name': 'TestCase.Test2'}, + ] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceNonStringTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEquals(1, len(tests_to_retry)) + self.assertIsInstance(tests_to_retry[0], dict) + self.assertEquals(tests[1], tests_to_retry[0]) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/build/android/pylib/local/local_test_server_spawner.py b/build/android/pylib/local/local_test_server_spawner.py new file mode 100644 index 00000000000..db9fbfddc46 --- /dev/null +++ b/build/android/pylib/local/local_test_server_spawner.py @@ -0,0 +1,45 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from devil.android import forwarder +from pylib import chrome_test_server_spawner +from pylib.base import test_server + + +class LocalTestServerSpawner(test_server.TestServer): + + def __init__(self, port, device, tool): + super(LocalTestServerSpawner, self).__init__() + self._device = device + self._spawning_server = chrome_test_server_spawner.SpawningServer( + port, device, tool) + self._tool = tool + + @property + def server_address(self): + return self._spawning_server.server.server_address + + @property + def port(self): + return self.server_address[1] + + #override + def SetUp(self): + self._device.WriteFile( + '%s/net-test-server-ports' % self._device.GetExternalStoragePath(), + '%s:0' % str(self.port)) + forwarder.Forwarder.Map( + [(self.port, self.port)], self._device, self._tool) + self._spawning_server.Start() + + #override + def Reset(self): + self._spawning_server.CleanupState() + + #override + def TearDown(self): + self.Reset() + self._spawning_server.Stop() + forwarder.Forwarder.UnmapDevicePort(self.port, self._device) + diff --git a/build/android/pylib/monkey/__init__.py b/build/android/pylib/monkey/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/android/pylib/monkey/setup.py b/build/android/pylib/monkey/setup.py new file mode 100644 index 00000000000..fe690a505e9 --- /dev/null +++ b/build/android/pylib/monkey/setup.py @@ -0,0 +1,27 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates test runner factory and tests for monkey tests.""" + +from pylib.monkey import test_runner + + +def Setup(test_options): + """Create and return the test runner factory and tests. + + Args: + test_options: A MonkeyOptions object. + + Returns: + A tuple of (TestRunnerFactory, tests). + """ + # Token to replicate across devices as the "test". The TestRunner does all of + # the work to run the test. + tests = ['MonkeyTest'] + + def TestRunnerFactory(device, shard_index): + return test_runner.TestRunner( + test_options, device, shard_index) + + return (TestRunnerFactory, tests) diff --git a/build/android/pylib/monkey/test_options.py b/build/android/pylib/monkey/test_options.py new file mode 100644 index 00000000000..54d3d0840e1 --- /dev/null +++ b/build/android/pylib/monkey/test_options.py @@ -0,0 +1,16 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Defines the MonkeyOptions named tuple.""" + +import collections + +MonkeyOptions = collections.namedtuple('MonkeyOptions', [ + 'verbose_count', + 'package', + 'event_count', + 'category', + 'throttle', + 'seed', + 'extra_args']) diff --git a/build/android/pylib/monkey/test_runner.py b/build/android/pylib/monkey/test_runner.py new file mode 100644 index 00000000000..ff4c9400fca --- /dev/null +++ b/build/android/pylib/monkey/test_runner.py @@ -0,0 +1,110 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs a monkey test on a single device.""" + +import logging +import random + +from devil.android import device_errors +from devil.android.sdk import intent +from pylib import constants +from pylib.base import base_test_result +from pylib.base import base_test_runner + +_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package + +class TestRunner(base_test_runner.BaseTestRunner): + """A TestRunner instance runs a monkey test on a single device.""" + + def __init__(self, test_options, device, _): + super(TestRunner, self).__init__(device, None) + self._options = test_options + self._package = constants.PACKAGE_INFO[self._options.package].package + self._activity = constants.PACKAGE_INFO[self._options.package].activity + + def _LaunchMonkeyTest(self): + """Runs monkey test for a given package. + + Returns: + Output from the monkey command on the device. + """ + + timeout_ms = self._options.event_count * self._options.throttle * 1.5 + + cmd = ['monkey', + '-p %s' % self._package, + ' '.join(['-c %s' % c for c in self._options.category]), + '--throttle %d' % self._options.throttle, + '-s %d' % (self._options.seed or random.randint(1, 100)), + '-v ' * self._options.verbose_count, + '--monitor-native-crashes', + '--kill-process-after-error', + self._options.extra_args, + '%d' % self._options.event_count] + return self.device.RunShellCommand(' '.join(cmd), timeout=timeout_ms) + + def RunTest(self, test_name): + """Run a Monkey test on the device. + + Args: + test_name: String to use for logging the test result. + + Returns: + A tuple of (TestRunResults, retry). + """ + self.device.StartActivity( + intent.Intent(package=self._package, activity=self._activity, + action='android.intent.action.MAIN'), + blocking=True, force_stop=True) + + # Chrome crashes are not always caught by Monkey test runner. + # Verify Chrome has the same PID before and after the test. + before_pids = self.device.GetPids(self._package) + + # Run the test. + output = '' + if before_pids: + if len(before_pids.get(self._package, [])) > 1: + raise Exception( + 'At most one instance of process %s expected but found pids: ' + '%s' % (self._package, before_pids)) + output = '\n'.join(self._LaunchMonkeyTest()) + after_pids = self.device.GetPids(self._package) + + crashed = True + if not self._package in before_pids: + logging.error('Failed to start the process.') + elif not self._package in after_pids: + logging.error('Process %s has died.', before_pids[self._package]) + elif before_pids[self._package] != after_pids[self._package]: + logging.error('Detected process restart %s -> %s', + before_pids[self._package], after_pids[self._package]) + else: + crashed = False + + results = base_test_result.TestRunResults() + success_pattern = 'Events injected: %d' % self._options.event_count + if success_pattern in output and not crashed: + result = base_test_result.BaseTestResult( + test_name, base_test_result.ResultType.PASS, log=output) + else: + result = base_test_result.BaseTestResult( + test_name, base_test_result.ResultType.FAIL, log=output) + if 'chrome' in self._options.package: + logging.warning('Starting MinidumpUploadService...') + # TODO(jbudorick): Update this after upstreaming. + minidump_intent = intent.Intent( + action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE, + package=self._package, + activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE) + try: + self.device.RunShellCommand( + ['am', 'startservice'] + minidump_intent.am_args, + as_root=True, check_return=True) + except device_errors.CommandFailedError: + logging.exception('Failed to start MinidumpUploadService') + + results.AddResult(result) + return results, False diff --git a/build/android/pylib/perf/__init__.py b/build/android/pylib/perf/__init__.py new file mode 100644 index 00000000000..9228df89b0e --- /dev/null +++ b/build/android/pylib/perf/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/perf/perf_test_instance.py b/build/android/pylib/perf/perf_test_instance.py new file mode 100644 index 00000000000..426ffaf5503 --- /dev/null +++ b/build/android/pylib/perf/perf_test_instance.py @@ -0,0 +1,241 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import os +import pickle +import re + +from devil import base_error +from devil.utils import cmd_helper +from pylib import constants +from pylib.base import base_test_result +from pylib.base import test_instance +from pylib.constants import host_paths + + +_GIT_CR_POS_RE = re.compile(r'^Cr-Commit-Position: refs/heads/master@{#(\d+)}$') + + +def _GetPersistedResult(test_name): + file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name) + if not os.path.exists(file_name): + logging.error('File not found %s', file_name) + return None + + with file(file_name, 'r') as f: + return pickle.loads(f.read()) + + +def _GetChromiumRevision(): + # pylint: disable=line-too-long + """Get the git hash and commit position of the chromium master branch. + + See: + https://chromium.googlesource.com/chromium/tools/build/+/387e3cf3/scripts/slave/runtest.py#211 + + Returns: + A dictionary with 'revision' and 'commit_pos' keys. + """ + # pylint: enable=line-too-long + status, output = cmd_helper.GetCmdStatusAndOutput( + ['git', 'log', '-n', '1', '--pretty=format:%H%n%B', 'HEAD'], + cwd=host_paths.DIR_SOURCE_ROOT) + revision = None + commit_pos = None + if not status: + lines = output.splitlines() + revision = lines[0] + for line in reversed(lines): + m = _GIT_CR_POS_RE.match(line.strip()) + if m: + commit_pos = int(m.group(1)) + break + return {'revision': revision, 'commit_pos': commit_pos} + + +class PerfTestInstance(test_instance.TestInstance): + def __init__(self, args, _): + super(PerfTestInstance, self).__init__() + + self._collect_chartjson_data = args.collect_chartjson_data + self._dry_run = args.dry_run + self._flaky_steps = args.flaky_steps + self._output_dir_archive_path = args.output_dir_archive_path + # TODO(rnephew): Get rid of this when everything uses + # --output-dir-archive-path + if self._output_dir_archive_path is None and args.get_output_dir_archive: + self._output_dir_archive_path = args.get_output_dir_archive + self._known_devices_file = args.known_devices_file + self._max_battery_temp = args.max_battery_temp + self._min_battery_level = args.min_battery_level + self._no_timeout = args.no_timeout + self._output_chartjson_data = args.output_chartjson_data + self._output_json_list = args.output_json_list + self._print_step = args.print_step + self._single_step = ( + ' '.join(args.single_step_command) if args.single_step else None) + self._steps = args.steps + self._test_filter = args.test_filter + self._write_buildbot_json = args.write_buildbot_json + + def SetUp(self): + pass + + def TearDown(self): + pass + + def OutputJsonList(self): + try: + with file(self._steps, 'r') as i: + all_steps = json.load(i) + + step_values = [] + for k, v in all_steps['steps'].iteritems(): + data = {'test': k, 'device_affinity': v['device_affinity']} + + persisted_result = _GetPersistedResult(k) + if persisted_result: + data['start_time'] = persisted_result['start_time'] + data['end_time'] = persisted_result['end_time'] + data['total_time'] = persisted_result['total_time'] + data['has_archive'] = persisted_result['archive_bytes'] is not None + step_values.append(data) + + with file(self._output_json_list, 'w') as o: + o.write(json.dumps(step_values)) + return base_test_result.ResultType.PASS + except KeyError: + logging.exception('Persistent results file missing key.') + return base_test_result.ResultType.FAIL + + def PrintTestOutput(self): + """Helper method to print the output of previously executed test_name. + + Test_name is passed from the command line as print_step + + Returns: + exit code generated by the test step. + """ + persisted_result = _GetPersistedResult(self._print_step) + if not persisted_result: + raise PersistentDataError('No data for test %s found.' % self._print_step) + logging.info('*' * 80) + logging.info('Output from:') + logging.info(persisted_result['cmd']) + logging.info('*' * 80) + + output_formatted = '' + persisted_outputs = persisted_result['output'] + for i in xrange(len(persisted_outputs)): + output_formatted += '\n\nOutput from run #%d:\n\n%s' % ( + i, persisted_outputs[i]) + print output_formatted + + if self._output_chartjson_data: + with file(self._output_chartjson_data, 'w') as f: + f.write(persisted_result['chartjson']) + + if self._output_dir_archive_path: + if persisted_result['archive_bytes'] is not None: + with file(self._output_dir_archive_path, 'wb') as f: + f.write(persisted_result['archive_bytes']) + else: + logging.error('The output dir was not archived.') + if persisted_result['exit_code'] == 0: + return base_test_result.ResultType.PASS + return base_test_result.ResultType.FAIL + + #override + def TestType(self): + return 'perf' + + @staticmethod + def ReadChartjsonOutput(output_dir): + if not output_dir: + return '' + json_output_path = os.path.join(output_dir, 'results-chart.json') + try: + with open(json_output_path) as f: + return f.read() + except IOError: + logging.exception('Exception when reading chartjson.') + logging.error('This usually means that telemetry did not run, so it could' + ' not generate the file. Please check the device running' + ' the test.') + return '' + + def WriteBuildBotJson(self, output_dir): + """Write metadata about the buildbot environment to the output dir.""" + if not output_dir or not self._write_buildbot_json: + return + data = { + 'chromium': _GetChromiumRevision(), + 'environment': dict(os.environ) + } + with open(os.path.join(output_dir, 'buildbot.json'), 'w') as f: + json.dump(data, f, sort_keys=True, separators=(',', ': ')) + + @property + def collect_chartjson_data(self): + return self._collect_chartjson_data + + @property + def dry_run(self): + return self._dry_run + + @property + def flaky_steps(self): + return self._flaky_steps + + @property + def known_devices_file(self): + return self._known_devices_file + + @property + def max_battery_temp(self): + return self._max_battery_temp + + @property + def min_battery_level(self): + return self._min_battery_level + + @property + def no_timeout(self): + return self._no_timeout + + @property + def output_chartjson_data(self): + return self._output_chartjson_data + + @property + def output_dir_archive_path(self): + return self._output_dir_archive_path + + @property + def output_json_list(self): + return self._output_json_list + + @property + def print_step(self): + return self._print_step + + @property + def single_step(self): + return self._single_step + + @property + def steps(self): + return self._steps + + @property + def test_filter(self): + return self._test_filter + + +class PersistentDataError(base_error.BaseError): + def __init__(self, message): + super(PersistentDataError, self).__init__(message) + self._is_infra_error = True diff --git a/build/android/pylib/pexpect.py b/build/android/pylib/pexpect.py new file mode 100644 index 00000000000..cf59fb0f6d3 --- /dev/null +++ b/build/android/pylib/pexpect.py @@ -0,0 +1,21 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +from __future__ import absolute_import + +import os +import sys + +_CHROME_SRC = os.path.join( + os.path.abspath(os.path.dirname(__file__)), '..', '..', '..') + +_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect') +if _PEXPECT_PATH not in sys.path: + sys.path.append(_PEXPECT_PATH) + +# pexpect is not available on all platforms. We allow this file to be imported +# on platforms without pexpect and only fail when pexpect is actually used. +try: + from pexpect import * # pylint: disable=W0401,W0614 +except ImportError: + pass diff --git a/build/android/pylib/remote/__init__.py b/build/android/pylib/remote/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/remote/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/remote/device/__init__.py b/build/android/pylib/remote/device/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/remote/device/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/remote/device/appurify_constants.py b/build/android/pylib/remote/device/appurify_constants.py new file mode 100644 index 00000000000..cf99bb65107 --- /dev/null +++ b/build/android/pylib/remote/device/appurify_constants.py @@ -0,0 +1,58 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Defines a set of constants specific to appurify.""" + +# Appurify network config constants. +class NETWORK(object): + WIFI_1_BAR = 1 + SPRINT_4G_LTE_4_BARS = 2 + SPRINT_3G_5_BARS = 3 + SPRINT_3G_4_BARS = 4 + SPRINT_3G_3_BARS = 5 + SPRINT_3G_2_BARS = 6 + SPRINT_3G_1_BAR = 7 + SPRING_4G_1_BAR = 8 + VERIZON_3G_5_BARS = 9 + VERIZON_3G_4_BARS = 10 + VERIZON_3G_3_BARS = 11 + VERIZON_3G_2_BARS = 12 + VERIZON_3G_1_BAR = 13 + VERIZON_4G_1_BAR = 14 + ATANDT_3G_5_BARS = 15 + ATANDT_3G_4_BARS = 16 + ATANDT_3G_3_BARS = 17 + ATANDT_3G_2_BARS = 18 + ATANDT_3G_1_BAR = 19 + GENERIC_2G_4_BARS = 20 + GENERIC_2G_3_BARS = 21 + GENERIC_EVOLVED_EDGE = 22 + GENERIC_GPRS = 23 + GENERIC_ENHANCED_GPRS = 24 + GENERIC_LTE = 25 + GENERIC_HIGH_LATENCY_DNS = 26 + GENERIC_100_PERCENT_PACKET_LOSS = 27 + ATANDT_HSPA_PLUS = 28 + ATANDT_4G_LTE_4_BARS = 29 + VERIZON_4G_LTE_4_BARS = 30 + GENERIC_DIGITAL_SUBSCRIBE_LINE = 31 + WIFI_STARBUCKS_3_BARS = 32 + WIFI_STARBUCKS_4_BARS = 33 + WIFI_STARBUCKS_HIGH_TRAFFIC = 34 + WIFI_TARGET_1_BAR = 35 + WIFI_TARGET_3_BARS = 36 + WIFI_TARGET_4_BARS = 37 + PUBLIC_WIFI_MCDONALDS_5_BARS = 38 + PUBLIC_WIFI_MCDONALDS_4_BARS = 39 + PUBLIC_WIFI_MCDONALDS_2_BARS = 40 + PUBLIC_WIFI_MCDONALDS_1_BAR = 41 + PUBLIC_WIFI_KOHLS_5_BARS = 42 + PUBLIC_WIFI_KOHLS_4_BARS = 43 + PUBLIC_WIFI_KOHLS_2_BARS = 44 + PUBLIC_WIFI_ATANDT_5_BARS = 45 + PUBLIC_WIFI_ATANDT_4_BARS = 46 + PUBLIC_WIFI_ATANDT_2_BARS = 47 + PUBLIC_WIFI_ATANDT_1_BAR = 48 + BOINGO = 49 + diff --git a/build/android/pylib/remote/device/appurify_sanitized.py b/build/android/pylib/remote/device/appurify_sanitized.py new file mode 100644 index 00000000000..48736d5617f --- /dev/null +++ b/build/android/pylib/remote/device/appurify_sanitized.py @@ -0,0 +1,43 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import contextlib +import logging +import os + +from pylib.constants import host_paths + +_REQUESTS_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'third_party', 'requests', 'src') +_APPURIFY_PYTHON_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'third_party', 'appurify-python', 'src') + +with host_paths.SysPath(_REQUESTS_PATH), ( + host_paths.SysPath(_APPURIFY_PYTHON_PATH)): + + handlers_before = list(logging.getLogger().handlers) + + import appurify.api # pylint: disable=import-error + import appurify.utils # pylint: disable=import-error + + handlers_after = list(logging.getLogger().handlers) + new_handler = list(set(handlers_after) - set(handlers_before)) + while new_handler: + logging.info("Removing logging handler.") + logging.getLogger().removeHandler(new_handler.pop()) + + api = appurify.api + utils = appurify.utils + +# This is not thread safe. If multiple threads are ever supported with appurify +# this may cause logging messages to go missing. +@contextlib.contextmanager +def SanitizeLogging(verbose_count, level): + if verbose_count < 2: + logging.disable(level) + yield True + logging.disable(logging.NOTSET) + else: + yield False + diff --git a/build/android/pylib/remote/device/dummy/BUILD.gn b/build/android/pylib/remote/device/dummy/BUILD.gn new file mode 100644 index 00000000000..54ca275ea34 --- /dev/null +++ b/build/android/pylib/remote/device/dummy/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/android/rules.gni") + +# GYP: //build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk +android_apk("remote_device_dummy_apk") { + android_manifest = "//build/android/AndroidManifest.xml" + java_files = [ "src/org/chromium/dummy/Dummy.java" ] + apk_name = "remote_device_dummy" + testonly = true +} diff --git a/build/android/pylib/remote/device/dummy/dummy.gyp b/build/android/pylib/remote/device/dummy/dummy.gyp new file mode 100644 index 00000000000..a7c451fe5fd --- /dev/null +++ b/build/android/pylib/remote/device/dummy/dummy.gyp @@ -0,0 +1,48 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Running gtests on a remote device via am instrument requires both an "app" +# APK and a "test" APK with different package names. Our gtests only use one +# APK, so we build a dummy APK to upload as the app. + +{ + 'variables': { + 'remote_device_dummy_apk_name': 'remote_device_dummy', + 'remote_device_dummy_apk_path': '<(PRODUCT_DIR)/apks/<(remote_device_dummy_apk_name).apk', + }, + 'targets': [ + { + # GN: //build/android/pylib/remote/device/dummy:remote_device_dummy_apk + 'target_name': 'remote_device_dummy_apk', + 'type': 'none', + 'variables': { + 'apk_name': '<(remote_device_dummy_apk_name)', + 'final_apk_path': '<(remote_device_dummy_apk_path)', + 'java_in_dir': '.', + 'never_lint': 1, + 'android_manifest_path': '../../../../../../build/android/AndroidManifest.xml', + }, + 'includes': [ + '../../../../../../build/java_apk.gypi', + ] + }, + { + 'target_name': 'require_remote_device_dummy_apk', + 'message': 'Making sure <(remote_device_dummy_apk_path) has been built.', + 'type': 'none', + 'variables': { + 'required_file': '<(PRODUCT_DIR)/remote_device_dummy_apk/<(remote_device_dummy_apk_name).apk.required', + }, + 'inputs': [ + '<(remote_device_dummy_apk_path)', + ], + 'outputs': [ + '<(required_file)', + ], + 'action': [ + 'python', '../../build/android/gyp/touch.py', '<(required_file)', + ], + } + ] +} diff --git a/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java new file mode 100644 index 00000000000..1281b39586e --- /dev/null +++ b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java @@ -0,0 +1,9 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.dummy; + +/** Does nothing. */ +class Dummy {} + diff --git a/build/android/pylib/remote/device/remote_device_environment.py b/build/android/pylib/remote/device/remote_device_environment.py new file mode 100644 index 00000000000..7923f3ade13 --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_environment.py @@ -0,0 +1,364 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Environment setup and teardown for remote devices.""" + +import distutils.version +import json +import logging +import os +import random +import sys + +from devil.utils import reraiser_thread +from devil.utils import timeout_retry +from pylib.base import environment +from pylib.remote.device import appurify_sanitized +from pylib.remote.device import remote_device_helper + +class RemoteDeviceEnvironment(environment.Environment): + """An environment for running on remote devices.""" + + _ENV_KEY = 'env' + _DEVICE_KEY = 'device' + _DEFAULT_RETRIES = 0 + + def __init__(self, args, error_func): + """Constructor. + + Args: + args: Command line arguments. + error_func: error to show when using bad command line arguments. + """ + super(RemoteDeviceEnvironment, self).__init__() + self._access_token = None + self._device = None + self._device_type = args.device_type + self._verbose_count = args.verbose_count + self._timeouts = { + 'queueing': 60 * 10, + 'installing': 60 * 10, + 'in-progress': 60 * 30, + 'unknown': 60 * 5 + } + # Example config file: + # { + # "remote_device": ["Galaxy S4", "Galaxy S3"], + # "remote_device_os": ["4.4.2", "4.4.4"], + # "remote_device_minimum_os": "4.4.2", + # "api_address": "www.example.com", + # "api_port": "80", + # "api_protocol": "http", + # "api_secret": "apisecret", + # "api_key": "apikey", + # "timeouts": { + # "queueing": 600, + # "installing": 600, + # "in-progress": 1800, + # "unknown": 300 + # } + # } + if args.remote_device_file: + with open(args.remote_device_file) as device_file: + device_json = json.load(device_file) + else: + device_json = {} + + self._api_address = device_json.get('api_address', None) + self._api_key = device_json.get('api_key', None) + self._api_port = device_json.get('api_port', None) + self._api_protocol = device_json.get('api_protocol', None) + self._api_secret = device_json.get('api_secret', None) + self._device_oem = device_json.get('device_oem', None) + self._device_type = device_json.get('device_type', 'Android') + self._network_config = device_json.get('network_config', None) + self._remote_device = device_json.get('remote_device', None) + self._remote_device_minimum_os = device_json.get( + 'remote_device_minimum_os', None) + self._remote_device_os = device_json.get('remote_device_os', None) + self._remote_device_timeout = device_json.get( + 'remote_device_timeout', None) + self._results_path = device_json.get('results_path', None) + self._runner_package = device_json.get('runner_package', None) + self._runner_type = device_json.get('runner_type', None) + self._timeouts.update(device_json.get('timeouts', {})) + + def command_line_override( + file_value, cmd_line_value, desc, print_value=True): + if cmd_line_value: + if file_value and file_value != cmd_line_value: + if print_value: + logging.info('Overriding %s from %s to %s', + desc, file_value, cmd_line_value) + else: + logging.info('overriding %s', desc) + return cmd_line_value + return file_value + + self._api_address = command_line_override( + self._api_address, args.api_address, 'api_address') + self._api_port = command_line_override( + self._api_port, args.api_port, 'api_port') + self._api_protocol = command_line_override( + self._api_protocol, args.api_protocol, 'api_protocol') + self._device_oem = command_line_override( + self._device_oem, args.device_oem, 'device_oem') + self._device_type = command_line_override( + self._device_type, args.device_type, 'device_type') + self._network_config = command_line_override( + self._network_config, args.network_config, 'network_config') + self._remote_device = command_line_override( + self._remote_device, args.remote_device, 'remote_device') + self._remote_device_minimum_os = command_line_override( + self._remote_device_minimum_os, args.remote_device_minimum_os, + 'remote_device_minimum_os') + self._remote_device_os = command_line_override( + self._remote_device_os, args.remote_device_os, 'remote_device_os') + self._remote_device_timeout = command_line_override( + self._remote_device_timeout, args.remote_device_timeout, + 'remote_device_timeout') + self._results_path = command_line_override( + self._results_path, args.results_path, 'results_path') + self._runner_package = command_line_override( + self._runner_package, args.runner_package, 'runner_package') + self._runner_type = command_line_override( + self._runner_type, args.runner_type, 'runner_type') + self._timeouts["in-progress"] = command_line_override( + self._timeouts["in-progress"], args.test_timeout, 'test_timeout') + + if args.api_key_file: + with open(args.api_key_file) as api_key_file: + temp_key = api_key_file.read().strip() + self._api_key = command_line_override( + self._api_key, temp_key, 'api_key', print_value=False) + self._api_key = command_line_override( + self._api_key, args.api_key, 'api_key', print_value=False) + + if args.api_secret_file: + with open(args.api_secret_file) as api_secret_file: + temp_secret = api_secret_file.read().strip() + self._api_secret = command_line_override( + self._api_secret, temp_secret, 'api_secret', print_value=False) + self._api_secret = command_line_override( + self._api_secret, args.api_secret, 'api_secret', print_value=False) + + if not self._api_address: + error_func('Must set api address with --api-address' + ' or in --remote-device-file.') + if not self._api_key: + error_func('Must set api key with --api-key, --api-key-file' + ' or in --remote-device-file') + if not self._api_port: + error_func('Must set api port with --api-port' + ' or in --remote-device-file') + if not self._api_protocol: + error_func('Must set api protocol with --api-protocol' + ' or in --remote-device-file. Example: http') + if not self._api_secret: + error_func('Must set api secret with --api-secret, --api-secret-file' + ' or in --remote-device-file') + + logging.info('Api address: %s', self._api_address) + logging.info('Api port: %s', self._api_port) + logging.info('Api protocol: %s', self._api_protocol) + logging.info('Remote device: %s', self._remote_device) + logging.info('Remote device minimum OS: %s', + self._remote_device_minimum_os) + logging.info('Remote device OS: %s', self._remote_device_os) + logging.info('Remote device OEM: %s', self._device_oem) + logging.info('Remote device type: %s', self._device_type) + logging.info('Remote device timout: %s', self._remote_device_timeout) + logging.info('Results Path: %s', self._results_path) + logging.info('Runner package: %s', self._runner_package) + logging.info('Runner type: %s', self._runner_type) + logging.info('Timeouts: %s', self._timeouts) + + if not args.trigger and not args.collect: + self._trigger = True + self._collect = True + else: + self._trigger = args.trigger + self._collect = args.collect + + def SetUp(self): + """Set up the test environment.""" + os.environ['APPURIFY_API_PROTO'] = self._api_protocol + os.environ['APPURIFY_API_HOST'] = self._api_address + os.environ['APPURIFY_API_PORT'] = self._api_port + os.environ['APPURIFY_STATUS_BASE_URL'] = 'none' + self._GetAccessToken() + if self._trigger: + self._SelectDevice() + + def TearDown(self): + """Teardown the test environment.""" + self._RevokeAccessToken() + + def __enter__(self): + """Set up the test run when used as a context manager.""" + try: + self.SetUp() + return self + except: + self.__exit__(*sys.exc_info()) + raise + + def __exit__(self, exc_type, exc_val, exc_tb): + """Tears down the test run when used as a context manager.""" + self.TearDown() + + def DumpTo(self, persisted_data): + env_data = { + self._DEVICE_KEY: self._device, + } + persisted_data[self._ENV_KEY] = env_data + + def LoadFrom(self, persisted_data): + env_data = persisted_data[self._ENV_KEY] + self._device = env_data[self._DEVICE_KEY] + + def _GetAccessToken(self): + """Generates access token for remote device service.""" + logging.info('Generating remote service access token') + with appurify_sanitized.SanitizeLogging(self._verbose_count, + logging.WARNING): + access_token_results = appurify_sanitized.api.access_token_generate( + self._api_key, self._api_secret) + remote_device_helper.TestHttpResponse(access_token_results, + 'Unable to generate access token.') + self._access_token = access_token_results.json()['response']['access_token'] + + def _RevokeAccessToken(self): + """Destroys access token for remote device service.""" + logging.info('Revoking remote service access token') + with appurify_sanitized.SanitizeLogging(self._verbose_count, + logging.WARNING): + revoke_token_results = appurify_sanitized.api.access_token_revoke( + self._access_token) + remote_device_helper.TestHttpResponse(revoke_token_results, + 'Unable to revoke access token.') + + def _SelectDevice(self): + if self._remote_device_timeout: + try: + timeout_retry.Run(self._FindDeviceWithTimeout, + self._remote_device_timeout, self._DEFAULT_RETRIES) + except reraiser_thread.TimeoutError: + self._NoDeviceFound() + else: + if not self._FindDevice(): + self._NoDeviceFound() + + def _FindDevice(self): + """Find which device to use.""" + logging.info('Finding device to run tests on.') + device_list = self._GetDeviceList() + random.shuffle(device_list) + for device in device_list: + if device['os_name'] != self._device_type: + continue + if self._remote_device and device['name'] not in self._remote_device: + continue + if (self._remote_device_os + and device['os_version'] not in self._remote_device_os): + continue + if self._device_oem and device['brand'] not in self._device_oem: + continue + if (self._remote_device_minimum_os + and distutils.version.LooseVersion(device['os_version']) + < distutils.version.LooseVersion(self._remote_device_minimum_os)): + continue + if device['has_available_device']: + logging.info('Found device: %s %s', + device['name'], device['os_version']) + self._device = device + return True + return False + + def _FindDeviceWithTimeout(self): + """Find which device to use with timeout.""" + timeout_retry.WaitFor(self._FindDevice, wait_period=1) + + def _PrintAvailableDevices(self, device_list): + def compare_devices(a, b): + for key in ('os_version', 'name'): + c = cmp(a[key], b[key]) + if c: + return c + return 0 + + logging.critical('Available %s Devices:', self._device_type) + logging.critical( + ' %s %s %s %s %s', + 'OS'.ljust(10), + 'Device Name'.ljust(30), + 'Available'.ljust(10), + 'Busy'.ljust(10), + 'All'.ljust(10)) + devices = (d for d in device_list if d['os_name'] == self._device_type) + for d in sorted(devices, compare_devices): + logging.critical( + ' %s %s %s %s %s', + d['os_version'].ljust(10), + d['name'].ljust(30), + str(d['available_devices_count']).ljust(10), + str(d['busy_devices_count']).ljust(10), + str(d['all_devices_count']).ljust(10)) + + def _GetDeviceList(self): + with appurify_sanitized.SanitizeLogging(self._verbose_count, + logging.WARNING): + dev_list_res = appurify_sanitized.api.devices_list(self._access_token) + remote_device_helper.TestHttpResponse(dev_list_res, + 'Unable to generate access token.') + return dev_list_res.json()['response'] + + def _NoDeviceFound(self): + self._PrintAvailableDevices(self._GetDeviceList()) + raise remote_device_helper.RemoteDeviceError( + 'No device found.', is_infra_error=True) + + @property + def collect(self): + return self._collect + + @property + def device_type_id(self): + return self._device['device_type_id'] + + @property + def network_config(self): + return self._network_config + + @property + def results_path(self): + return self._results_path + + @property + def runner_package(self): + return self._runner_package + + @property + def runner_type(self): + return self._runner_type + + @property + def timeouts(self): + return self._timeouts + + @property + def token(self): + return self._access_token + + @property + def trigger(self): + return self._trigger + + @property + def verbose_count(self): + return self._verbose_count + + @property + def device_type(self): + return self._device_type diff --git a/build/android/pylib/remote/device/remote_device_gtest_run.py b/build/android/pylib/remote/device/remote_device_gtest_run.py new file mode 100644 index 00000000000..07a8108b5bd --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_gtest_run.py @@ -0,0 +1,89 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Run specific test on specific environment.""" + +import logging +import os +import tempfile + +from pylib import constants +from pylib.base import base_test_result +from pylib.gtest import gtest_test_instance +from pylib.remote.device import remote_device_test_run + + +_EXTRA_COMMAND_LINE_FILE = ( + 'org.chromium.native_test.NativeTest.CommandLineFile') + + +class RemoteDeviceGtestTestRun(remote_device_test_run.RemoteDeviceTestRun): + """Run gtests and uirobot tests on a remote device.""" + + DEFAULT_RUNNER_PACKAGE = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner') + + #override + def TestPackage(self): + return self._test_instance.suite + + #override + def _TriggerSetUp(self): + """Set up the triggering of a test run.""" + logging.info('Triggering test run.') + + if self._env.runner_type: + logging.warning('Ignoring configured runner_type "%s"', + self._env.runner_type) + + if not self._env.runner_package: + runner_package = self.DEFAULT_RUNNER_PACKAGE + logging.info('Using default runner package: %s', + self.DEFAULT_RUNNER_PACKAGE) + else: + runner_package = self._env.runner_package + + dummy_app_path = os.path.join( + constants.GetOutDirectory(), 'apks', 'remote_device_dummy.apk') + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile(suffix='.flags.txt') as flag_file: + env_vars = dict(self._test_instance.extras) + if gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in env_vars: + env_vars[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int( + self._test_instance.shard_timeout * 1e9) + + flags = [] + + filter_string = self._test_instance._GenerateDisabledFilterString(None) + if filter_string: + flags.append('--gtest_filter=%s' % filter_string) + + if self._test_instance.test_arguments: + flags.append(self._test_instance.test_arguments) + + if flags: + flag_file.write('_ ' + ' '.join(flags)) + flag_file.flush() + env_vars[_EXTRA_COMMAND_LINE_FILE] = os.path.basename(flag_file.name) + self._test_instance._data_deps.append( + (os.path.abspath(flag_file.name), None)) + self._AmInstrumentTestSetup( + dummy_app_path, self._test_instance.apk, runner_package, + environment_variables=env_vars) + + _INSTRUMENTATION_STREAM_LEADER = 'INSTRUMENTATION_STATUS: stream=' + + #override + def _ParseTestResults(self): + logging.info('Parsing results from stdout.') + results = base_test_result.TestRunResults() + output = self._results['results']['output'].splitlines() + output = (l[len(self._INSTRUMENTATION_STREAM_LEADER):] for l in output + if l.startswith(self._INSTRUMENTATION_STREAM_LEADER)) + results_list = gtest_test_instance.ParseGTestOutput(output) + results.AddResults(results_list) + + self._DetectPlatformErrors(results) + return results diff --git a/build/android/pylib/remote/device/remote_device_helper.py b/build/android/pylib/remote/device/remote_device_helper.py new file mode 100644 index 00000000000..1b02207a027 --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_helper.py @@ -0,0 +1,24 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Common functions and Exceptions for remote_device_*""" + +from devil import base_error + + +class RemoteDeviceError(base_error.BaseError): + """Exception to throw when problems occur with remote device service.""" + pass + + +def TestHttpResponse(response, error_msg): + """Checks the Http response from remote device service. + + Args: + response: response dict from the remote device service. + error_msg: Error message to display if bad response is seen. + """ + if response.status_code != 200: + raise RemoteDeviceError( + '%s (%d: %s)' % (error_msg, response.status_code, response.reason)) diff --git a/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py new file mode 100644 index 00000000000..ee0185794d1 --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py @@ -0,0 +1,74 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Run specific test on specific environment.""" + +import logging +import os +import tempfile + +from pylib.base import base_test_result +from pylib.remote.device import remote_device_test_run + + +class RemoteDeviceInstrumentationTestRun( + remote_device_test_run.RemoteDeviceTestRun): + """Run instrumentation tests on a remote device.""" + + #override + def TestPackage(self): + return self._test_instance.test_package + + #override + def _TriggerSetUp(self): + """Set up the triggering of a test run.""" + logging.info('Triggering test run.') + + # pylint: disable=protected-access + with tempfile.NamedTemporaryFile(suffix='.txt') as test_list_file: + tests = self._test_instance.GetTests() + logging.debug('preparing to run %d instrumentation tests remotely:', + len(tests)) + for t in tests: + test_name = '%s#%s' % (t['class'], t['method']) + logging.debug(' %s', test_name) + test_list_file.write('%s\n' % test_name) + test_list_file.flush() + self._test_instance._data_deps.append( + (os.path.abspath(test_list_file.name), None)) + + env_vars = self._test_instance.GetDriverEnvironmentVars( + test_list_file_path=test_list_file.name) + + logging.debug('extras:') + for k, v in env_vars.iteritems(): + logging.debug(' %s: %s', k, v) + + self._AmInstrumentTestSetup( + self._test_instance.apk_under_test, + self._test_instance.driver_apk, + self._test_instance.driver_name, + environment_variables=env_vars, + extra_apks=([self._test_instance.test_apk] + + self._test_instance.additional_apks)) + + #override + def _ParseTestResults(self): + logging.info('Parsing results from stdout.') + r = base_test_result.TestRunResults() + result_code, result_bundle, statuses = ( + self._test_instance.ParseAmInstrumentRawOutput( + self._results['results']['output'].splitlines())) + result = self._test_instance.GenerateTestResults( + result_code, result_bundle, statuses, 0, 0) + + if isinstance(result, base_test_result.BaseTestResult): + r.AddResult(result) + elif isinstance(result, list): + r.AddResults(result) + else: + raise Exception('Unexpected result type: %s' % type(result).__name__) + + self._DetectPlatformErrors(r) + return r diff --git a/build/android/pylib/remote/device/remote_device_test_run.py b/build/android/pylib/remote/device/remote_device_test_run.py new file mode 100644 index 00000000000..c974b09ae38 --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_test_run.py @@ -0,0 +1,392 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Run specific test on specific environment.""" + +import json +import logging +import os +import re +import shutil +import string +import tempfile +import time +import zipfile + +from devil.utils import zip_utils +from pylib.base import base_test_result +from pylib.base import test_run +from pylib.remote.device import appurify_constants +from pylib.remote.device import appurify_sanitized +from pylib.remote.device import remote_device_helper + +_DEVICE_OFFLINE_RE = re.compile('error: device not found') +_LONG_MSG_RE = re.compile('longMsg=(.*)$') +_SHORT_MSG_RE = re.compile('shortMsg=(.*)$') + +class RemoteDeviceTestRun(test_run.TestRun): + """Run tests on a remote device.""" + + _TEST_RUN_KEY = 'test_run' + _TEST_RUN_ID_KEY = 'test_run_id' + + WAIT_TIME = 5 + COMPLETE = 'complete' + HEARTBEAT_INTERVAL = 300 + + def __init__(self, env, test_instance): + """Constructor. + + Args: + env: Environment the tests will run in. + test_instance: The test that will be run. + """ + super(RemoteDeviceTestRun, self).__init__(env, test_instance) + self._env = env + self._test_instance = test_instance + self._app_id = '' + self._test_id = '' + self._results = '' + self._test_run_id = '' + self._results_temp_dir = None + + #override + def SetUp(self): + """Set up a test run.""" + if self._env.trigger: + self._TriggerSetUp() + elif self._env.collect: + assert isinstance(self._env.collect, basestring), ( + 'File for storing test_run_id must be a string.') + with open(self._env.collect, 'r') as persisted_data_file: + persisted_data = json.loads(persisted_data_file.read()) + self._env.LoadFrom(persisted_data) + self.LoadFrom(persisted_data) + + def _TriggerSetUp(self): + """Set up the triggering of a test run.""" + raise NotImplementedError + + #override + def RunTests(self): + """Run the test.""" + if self._env.trigger: + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + test_start_res = appurify_sanitized.api.tests_run( + self._env.token, self._env.device_type_id, self._app_id, + self._test_id) + remote_device_helper.TestHttpResponse( + test_start_res, 'Unable to run test.') + self._test_run_id = test_start_res.json()['response']['test_run_id'] + logging.info('Test run id: %s', self._test_run_id) + + if self._env.collect: + current_status = '' + timeout_counter = 0 + heartbeat_counter = 0 + while self._GetTestStatus(self._test_run_id) != self.COMPLETE: + if self._results['detailed_status'] != current_status: + logging.info('Test status: %s', self._results['detailed_status']) + current_status = self._results['detailed_status'] + timeout_counter = 0 + heartbeat_counter = 0 + if heartbeat_counter > self.HEARTBEAT_INTERVAL: + logging.info('Test status: %s', self._results['detailed_status']) + heartbeat_counter = 0 + + timeout = self._env.timeouts.get( + current_status, self._env.timeouts['unknown']) + if timeout_counter > timeout: + raise remote_device_helper.RemoteDeviceError( + 'Timeout while in %s state for %s seconds' + % (current_status, timeout), + is_infra_error=True) + time.sleep(self.WAIT_TIME) + timeout_counter += self.WAIT_TIME + heartbeat_counter += self.WAIT_TIME + self._DownloadTestResults(self._env.results_path) + + if self._results['results']['exception']: + raise remote_device_helper.RemoteDeviceError( + self._results['results']['exception'], is_infra_error=True) + + return [self._ParseTestResults()] + + #override + def TearDown(self): + """Tear down the test run.""" + if self._env.collect: + self._CollectTearDown() + elif self._env.trigger: + assert isinstance(self._env.trigger, basestring), ( + 'File for storing test_run_id must be a string.') + with open(self._env.trigger, 'w') as persisted_data_file: + persisted_data = {} + self.DumpTo(persisted_data) + self._env.DumpTo(persisted_data) + persisted_data_file.write(json.dumps(persisted_data)) + + def _CollectTearDown(self): + if self._GetTestStatus(self._test_run_id) != self.COMPLETE: + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + test_abort_res = appurify_sanitized.api.tests_abort( + self._env.token, self._test_run_id, reason='Test runner exiting.') + remote_device_helper.TestHttpResponse(test_abort_res, + 'Unable to abort test.') + if self._results_temp_dir: + shutil.rmtree(self._results_temp_dir) + + def __enter__(self): + """Set up the test run when used as a context manager.""" + self.SetUp() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Tear down the test run when used as a context manager.""" + self.TearDown() + + def DumpTo(self, persisted_data): + test_run_data = { + self._TEST_RUN_ID_KEY: self._test_run_id, + } + persisted_data[self._TEST_RUN_KEY] = test_run_data + + def LoadFrom(self, persisted_data): + test_run_data = persisted_data[self._TEST_RUN_KEY] + self._test_run_id = test_run_data[self._TEST_RUN_ID_KEY] + + def _ParseTestResults(self): + raise NotImplementedError + + def _GetTestByName(self, test_name): + """Gets test_id for specific test. + + Args: + test_name: Test to find the ID of. + """ + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + test_list_res = appurify_sanitized.api.tests_list(self._env.token) + remote_device_helper.TestHttpResponse(test_list_res, + 'Unable to get tests list.') + for test in test_list_res.json()['response']: + if test['test_type'] == test_name: + return test['test_id'] + raise remote_device_helper.RemoteDeviceError( + 'No test found with name %s' % (test_name)) + + def _DownloadTestResults(self, results_path): + """Download the test results from remote device service. + + Downloads results in temporary location, and then copys results + to results_path if results_path is not set to None. + + Args: + results_path: Path to download appurify results zipfile. + + Returns: + Path to downloaded file. + """ + + if self._results_temp_dir is None: + self._results_temp_dir = tempfile.mkdtemp() + logging.info('Downloading results to %s.', self._results_temp_dir) + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + appurify_sanitized.utils.wget(self._results['results']['url'], + self._results_temp_dir + '/results') + if results_path: + logging.info('Copying results to %s', results_path) + if not os.path.exists(os.path.dirname(results_path)): + os.makedirs(os.path.dirname(results_path)) + shutil.copy(self._results_temp_dir + '/results', results_path) + return self._results_temp_dir + '/results' + + def _GetTestStatus(self, test_run_id): + """Checks the state of the test, and sets self._results + + Args: + test_run_id: Id of test on on remote service. + """ + + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + test_check_res = appurify_sanitized.api.tests_check_result( + self._env.token, test_run_id) + remote_device_helper.TestHttpResponse(test_check_res, + 'Unable to get test status.') + self._results = test_check_res.json()['response'] + return self._results['status'] + + def _AmInstrumentTestSetup(self, app_path, test_path, runner_package, + environment_variables, extra_apks=None): + config = {'runner': runner_package} + if environment_variables: + config['environment_vars'] = ','.join( + '%s=%s' % (k, v) for k, v in environment_variables.iteritems()) + + self._app_id = self._UploadAppToDevice(app_path) + + # TODO(agrieve): If AMP is ever ressurected, this needs to be changed to put + # test files under /sdcard/gtestdata. http://crbug.com/607169 + data_deps = self._test_instance.GetDataDependencies() + if data_deps: + with tempfile.NamedTemporaryFile(suffix='.zip') as test_with_deps: + sdcard_files = [] + additional_apks = [] + host_test = os.path.basename(test_path) + with zipfile.ZipFile(test_with_deps.name, 'w') as zip_file: + zip_file.write(test_path, host_test, zipfile.ZIP_DEFLATED) + for h, _ in data_deps: + if os.path.isdir(h): + zip_utils.WriteToZipFile(zip_file, h, '.') + sdcard_files.extend(os.listdir(h)) + else: + zip_utils.WriteToZipFile(zip_file, h, os.path.basename(h)) + sdcard_files.append(os.path.basename(h)) + for a in extra_apks or (): + zip_utils.WriteToZipFile(zip_file, a, os.path.basename(a)) + additional_apks.append(os.path.basename(a)) + + config['sdcard_files'] = ','.join(sdcard_files) + config['host_test'] = host_test + if additional_apks: + config['additional_apks'] = ','.join(additional_apks) + self._test_id = self._UploadTestToDevice( + 'robotium', test_with_deps.name, app_id=self._app_id) + else: + self._test_id = self._UploadTestToDevice('robotium', test_path) + + logging.info('Setting config: %s', config) + appurify_configs = {} + if self._env.network_config: + appurify_configs['network'] = self._env.network_config + self._SetTestConfig('robotium', config, **appurify_configs) + + def _UploadAppToDevice(self, app_path): + """Upload app to device.""" + logging.info('Uploading %s to remote service as %s.', app_path, + self._test_instance.suite) + with open(app_path, 'rb') as apk_src: + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + upload_results = appurify_sanitized.api.apps_upload( + self._env.token, apk_src, 'raw', name=self._test_instance.suite) + remote_device_helper.TestHttpResponse( + upload_results, 'Unable to upload %s.' % app_path) + return upload_results.json()['response']['app_id'] + + def _UploadTestToDevice(self, test_type, test_path, app_id=None): + """Upload test to device + Args: + test_type: Type of test that is being uploaded. Ex. uirobot, gtest.. + """ + logging.info('Uploading %s to remote service.', test_path) + with open(test_path, 'rb') as test_src: + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + upload_results = appurify_sanitized.api.tests_upload( + self._env.token, test_src, 'raw', test_type, app_id=app_id) + remote_device_helper.TestHttpResponse(upload_results, + 'Unable to upload %s.' % test_path) + return upload_results.json()['response']['test_id'] + + def _SetTestConfig(self, runner_type, runner_configs, + network=appurify_constants.NETWORK.WIFI_1_BAR, + pcap=0, profiler=0, videocapture=0): + """Generates and uploads config file for test. + Args: + runner_configs: Configs specific to the runner you are using. + network: Config to specify the network environment the devices running + the tests will be in. + pcap: Option to set the recording the of network traffic from the device. + profiler: Option to set the recording of CPU, memory, and network + transfer usage in the tests. + videocapture: Option to set video capture during the tests. + + """ + logging.info('Generating config file for test.') + with tempfile.TemporaryFile() as config: + config_data = [ + '[appurify]', + 'network=%s' % network, + 'pcap=%s' % pcap, + 'profiler=%s' % profiler, + 'videocapture=%s' % videocapture, + '[%s]' % runner_type + ] + config_data.extend( + '%s=%s' % (k, v) for k, v in runner_configs.iteritems()) + config.write(''.join('%s\n' % l for l in config_data)) + config.flush() + config.seek(0) + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + config_response = appurify_sanitized.api.config_upload( + self._env.token, config, self._test_id) + remote_device_helper.TestHttpResponse( + config_response, 'Unable to upload test config.') + + def _LogLogcat(self, level=logging.CRITICAL): + """Prints out logcat downloaded from remote service. + Args: + level: logging level to print at. + + Raises: + KeyError: If appurify_results/logcat.txt file cannot be found in + downloaded zip. + """ + zip_file = self._DownloadTestResults(None) + with zipfile.ZipFile(zip_file) as z: + try: + logcat = z.read('appurify_results/logcat.txt') + printable_logcat = ''.join(c for c in logcat if c in string.printable) + for line in printable_logcat.splitlines(): + logging.log(level, line) + except KeyError: + logging.error('No logcat found.') + + def _LogAdbTraceLog(self): + zip_file = self._DownloadTestResults(None) + with zipfile.ZipFile(zip_file) as z: + adb_trace_log = z.read('adb_trace.log') + for line in adb_trace_log.splitlines(): + logging.critical(line) + + def _DidDeviceGoOffline(self): + zip_file = self._DownloadTestResults(None) + with zipfile.ZipFile(zip_file) as z: + adb_trace_log = z.read('adb_trace.log') + if any(_DEVICE_OFFLINE_RE.search(l) for l in adb_trace_log.splitlines()): + return True + return False + + def _DetectPlatformErrors(self, results): + if not self._results['results']['pass']: + crash_msg = None + for line in self._results['results']['output'].splitlines(): + m = _LONG_MSG_RE.search(line) + if m: + crash_msg = m.group(1) + break + m = _SHORT_MSG_RE.search(line) + if m: + crash_msg = m.group(1) + if crash_msg: + self._LogLogcat() + results.AddResult(base_test_result.BaseTestResult( + crash_msg, base_test_result.ResultType.CRASH)) + elif self._DidDeviceGoOffline(): + self._LogLogcat() + self._LogAdbTraceLog() + raise remote_device_helper.RemoteDeviceError( + 'Remote service unable to reach device.', is_infra_error=True) + else: + # Remote service is reporting a failure, but no failure in results obj. + if results.DidRunPass(): + results.AddResult(base_test_result.BaseTestResult( + 'Remote service detected error.', + base_test_result.ResultType.UNKNOWN)) diff --git a/build/android/pylib/remote/device/remote_device_uirobot_test_run.py b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py new file mode 100644 index 00000000000..f99e685084b --- /dev/null +++ b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py @@ -0,0 +1,85 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Run specific test on specific environment.""" + +import logging + +from pylib.base import base_test_result +from pylib.remote.device import appurify_sanitized +from pylib.remote.device import remote_device_test_run +from pylib.remote.device import remote_device_helper + + +class RemoteDeviceUirobotTestRun(remote_device_test_run.RemoteDeviceTestRun): + """Run uirobot tests on a remote device.""" + + + def __init__(self, env, test_instance): + """Constructor. + + Args: + env: Environment the tests will run in. + test_instance: The test that will be run. + """ + super(RemoteDeviceUirobotTestRun, self).__init__(env, test_instance) + + #override + def TestPackage(self): + return self._test_instance.package_name + + #override + def _TriggerSetUp(self): + """Set up the triggering of a test run.""" + logging.info('Triggering test run.') + + if self._env.device_type == 'Android': + default_runner_type = 'android_robot' + elif self._env.device_type == 'iOS': + default_runner_type = 'ios_robot' + else: + raise remote_device_helper.RemoteDeviceError( + 'Unknown device type: %s' % self._env.device_type) + + self._app_id = self._UploadAppToDevice(self._test_instance.app_under_test) + if not self._env.runner_type: + runner_type = default_runner_type + logging.info('Using default runner type: %s', default_runner_type) + else: + runner_type = self._env.runner_type + + self._test_id = self._UploadTestToDevice( + 'android_robot', None, app_id=self._app_id) + config_body = {'duration': self._test_instance.minutes} + self._SetTestConfig(runner_type, config_body) + + + # TODO(rnephew): Switch to base class implementation when supported. + #override + def _UploadTestToDevice(self, test_type, test_path, app_id=None): + if test_path: + logging.info("Ignoring test path.") + data = { + 'access_token':self._env.token, + 'test_type':test_type, + 'app_id':app_id, + } + with appurify_sanitized.SanitizeLogging(self._env.verbose_count, + logging.WARNING): + test_upload_res = appurify_sanitized.utils.post('tests/upload', + data, None) + remote_device_helper.TestHttpResponse( + test_upload_res, 'Unable to get UiRobot test id.') + return test_upload_res.json()['response']['test_id'] + + #override + def _ParseTestResults(self): + logging.info('Parsing results from remote service.') + results = base_test_result.TestRunResults() + if self._results['results']['pass']: + result_type = base_test_result.ResultType.PASS + else: + result_type = base_test_result.ResultType.FAIL + results.AddResult(base_test_result.BaseTestResult('uirobot', result_type)) + return results diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh new file mode 100644 index 00000000000..393b2ebac04 --- /dev/null +++ b/build/android/pylib/restart_adbd.sh @@ -0,0 +1,20 @@ +#!/system/bin/sh + +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Android shell script to restart adbd on the device. This has to be run +# atomically as a shell script because stopping adbd prevents further commands +# from running (even if called in the same adb shell). + +trap '' HUP +trap '' TERM +trap '' PIPE + +function restart() { + stop adbd + start adbd +} + +restart & diff --git a/build/android/pylib/results/__init__.py b/build/android/pylib/results/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/results/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/results/flakiness_dashboard/__init__.py b/build/android/pylib/results/flakiness_dashboard/__init__.py new file mode 100644 index 00000000000..4d6aabb953d --- /dev/null +++ b/build/android/pylib/results/flakiness_dashboard/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py new file mode 100644 index 00000000000..7f849f9c051 --- /dev/null +++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py @@ -0,0 +1,696 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Most of this file was ported over from Blink's +# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py +# Tools/Scripts/webkitpy/common/net/file_uploader.py +# + +import json +import logging +import mimetypes +import os +import time +import urllib2 + +_log = logging.getLogger(__name__) + +_JSON_PREFIX = 'ADD_RESULTS(' +_JSON_SUFFIX = ');' + + +def HasJSONWrapper(string): + return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX) + + +def StripJSONWrapper(json_content): + # FIXME: Kill this code once the server returns json instead of jsonp. + if HasJSONWrapper(json_content): + return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)] + return json_content + + +def WriteJSON(json_object, file_path, callback=None): + # Specify separators in order to get compact encoding. + json_string = json.dumps(json_object, separators=(',', ':')) + if callback: + json_string = callback + '(' + json_string + ');' + with open(file_path, 'w') as fp: + fp.write(json_string) + + +def ConvertTrieToFlatPaths(trie, prefix=None): + """Flattens the trie of paths, prepending a prefix to each.""" + result = {} + for name, data in trie.iteritems(): + if prefix: + name = prefix + '/' + name + + if len(data) and not 'results' in data: + result.update(ConvertTrieToFlatPaths(data, name)) + else: + result[name] = data + + return result + + +def AddPathToTrie(path, value, trie): + """Inserts a single path and value into a directory trie structure.""" + if not '/' in path: + trie[path] = value + return + + directory, _, rest = path.partition('/') + if not directory in trie: + trie[directory] = {} + AddPathToTrie(rest, value, trie[directory]) + + +def TestTimingsTrie(individual_test_timings): + """Breaks a test name into dicts by directory + + foo/bar/baz.html: 1ms + foo/bar/baz1.html: 3ms + + becomes + foo: { + bar: { + baz.html: 1, + baz1.html: 3 + } + } + """ + trie = {} + for test_result in individual_test_timings: + test = test_result.test_name + + AddPathToTrie(test, int(1000 * test_result.test_run_time), trie) + + return trie + + +class TestResult(object): + """A simple class that represents a single test result.""" + + # Test modifier constants. + (NONE, FAILS, FLAKY, DISABLED) = range(4) + + def __init__(self, test, failed=False, elapsed_time=0): + self.test_name = test + self.failed = failed + self.test_run_time = elapsed_time + + test_name = test + try: + test_name = test.split('.')[1] + except IndexError: + _log.warn('Invalid test name: %s.', test) + + if test_name.startswith('FAILS_'): + self.modifier = self.FAILS + elif test_name.startswith('FLAKY_'): + self.modifier = self.FLAKY + elif test_name.startswith('DISABLED_'): + self.modifier = self.DISABLED + else: + self.modifier = self.NONE + + def Fixable(self): + return self.failed or self.modifier == self.DISABLED + + +class JSONResultsGeneratorBase(object): + """A JSON results generator for generic tests.""" + + MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750 + # Min time (seconds) that will be added to the JSON. + MIN_TIME = 1 + + # Note that in non-chromium tests those chars are used to indicate + # test modifiers (FAILS, FLAKY, etc) but not actual test results. + PASS_RESULT = 'P' + SKIP_RESULT = 'X' + FAIL_RESULT = 'F' + FLAKY_RESULT = 'L' + NO_DATA_RESULT = 'N' + + MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT, + TestResult.DISABLED: SKIP_RESULT, + TestResult.FAILS: FAIL_RESULT, + TestResult.FLAKY: FLAKY_RESULT} + + VERSION = 4 + VERSION_KEY = 'version' + RESULTS = 'results' + TIMES = 'times' + BUILD_NUMBERS = 'buildNumbers' + TIME = 'secondsSinceEpoch' + TESTS = 'tests' + + FIXABLE_COUNT = 'fixableCount' + FIXABLE = 'fixableCounts' + ALL_FIXABLE_COUNT = 'allFixableCount' + + RESULTS_FILENAME = 'results.json' + TIMES_MS_FILENAME = 'times_ms.json' + INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json' + + # line too long pylint: disable=line-too-long + URL_FOR_TEST_LIST_JSON = ( + 'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s') + # pylint: enable=line-too-long + + def __init__(self, builder_name, build_name, build_number, + results_file_base_path, builder_base_url, + test_results_map, svn_repositories=None, + test_results_server=None, + test_type='', + master_name=''): + """Modifies the results.json file. Grabs it off the archive directory + if it is not found locally. + + Args + builder_name: the builder name (e.g. Webkit). + build_name: the build name (e.g. webkit-rel). + build_number: the build number. + results_file_base_path: Absolute path to the directory containing the + results json file. + builder_base_url: the URL where we have the archived test results. + If this is None no archived results will be retrieved. + test_results_map: A dictionary that maps test_name to TestResult. + svn_repositories: A (json_field_name, svn_path) pair for SVN + repositories that tests rely on. The SVN revision will be + included in the JSON with the given json_field_name. + test_results_server: server that hosts test results json. + test_type: test type string (e.g. 'layout-tests'). + master_name: the name of the buildbot master. + """ + self._builder_name = builder_name + self._build_name = build_name + self._build_number = build_number + self._builder_base_url = builder_base_url + self._results_directory = results_file_base_path + + self._test_results_map = test_results_map + self._test_results = test_results_map.values() + + self._svn_repositories = svn_repositories + if not self._svn_repositories: + self._svn_repositories = {} + + self._test_results_server = test_results_server + self._test_type = test_type + self._master_name = master_name + + self._archived_results = None + + def GenerateJSONOutput(self): + json_object = self.GetJSON() + if json_object: + file_path = ( + os.path.join( + self._results_directory, + self.INCREMENTAL_RESULTS_FILENAME)) + WriteJSON(json_object, file_path) + + def GenerateTimesMSFile(self): + times = TestTimingsTrie(self._test_results_map.values()) + file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME) + WriteJSON(times, file_path) + + def GetJSON(self): + """Gets the results for the results.json file.""" + results_json = {} + + if not results_json: + results_json, error = self._GetArchivedJSONResults() + if error: + # If there was an error don't write a results.json + # file at all as it would lose all the information on the + # bot. + _log.error('Archive directory is inaccessible. Not ' + 'modifying or clobbering the results.json ' + 'file: ' + str(error)) + return None + + builder_name = self._builder_name + if results_json and builder_name not in results_json: + _log.debug('Builder name (%s) is not in the results.json file.', + builder_name) + + self._ConvertJSONToCurrentVersion(results_json) + + if builder_name not in results_json: + results_json[builder_name] = ( + self._CreateResultsForBuilderJSON()) + + results_for_builder = results_json[builder_name] + + if builder_name: + self._InsertGenericMetaData(results_for_builder) + + self._InsertFailureSummaries(results_for_builder) + + # Update the all failing tests with result type and time. + tests = results_for_builder[self.TESTS] + all_failing_tests = self._GetFailedTestNames() + all_failing_tests.update(ConvertTrieToFlatPaths(tests)) + + for test in all_failing_tests: + self._InsertTestTimeAndResult(test, tests) + + return results_json + + def SetArchivedResults(self, archived_results): + self._archived_results = archived_results + + def UploadJSONFiles(self, json_files): + """Uploads the given json_files to the test_results_server (if the + test_results_server is given).""" + if not self._test_results_server: + return + + if not self._master_name: + _log.error( + '--test-results-server was set, but --master-name was not. Not ' + 'uploading JSON files.') + return + + _log.info('Uploading JSON files for builder: %s', self._builder_name) + attrs = [('builder', self._builder_name), + ('testtype', self._test_type), + ('master', self._master_name)] + + files = [(json_file, os.path.join(self._results_directory, json_file)) + for json_file in json_files] + + url = 'http://%s/testfile/upload' % self._test_results_server + # Set uploading timeout in case appengine server is having problems. + # 120 seconds are more than enough to upload test results. + uploader = _FileUploader(url, 120) + try: + response = uploader.UploadAsMultipartFormData(files, attrs) + if response: + if response.code == 200: + _log.info('JSON uploaded.') + else: + _log.debug( + "JSON upload failed, %d: '%s'", response.code, response.read()) + else: + _log.error('JSON upload failed; no response returned') + except Exception, err: # pylint: disable=broad-except + _log.error('Upload failed: %s', err) + return + + def _GetTestTiming(self, test_name): + """Returns test timing data (elapsed time) in second + for the given test_name.""" + if test_name in self._test_results_map: + # Floor for now to get time in seconds. + return int(self._test_results_map[test_name].test_run_time) + return 0 + + def _GetFailedTestNames(self): + """Returns a set of failed test names.""" + return set([r.test_name for r in self._test_results if r.failed]) + + def _GetModifierChar(self, test_name): + """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT, + PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier + for the given test_name. + """ + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + test_result = self._test_results_map[test_name] + if test_result.modifier in self.MODIFIER_TO_CHAR.keys(): + return self.MODIFIER_TO_CHAR[test_result.modifier] + + return self.__class__.PASS_RESULT + + def _get_result_char(self, test_name): + """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT, + PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result + for the given test_name. + """ + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + test_result = self._test_results_map[test_name] + if test_result.modifier == TestResult.DISABLED: + return self.__class__.SKIP_RESULT + + if test_result.failed: + return self.__class__.FAIL_RESULT + + return self.__class__.PASS_RESULT + + def _GetSVNRevision(self, in_directory): + """Returns the svn revision for the given directory. + + Args: + in_directory: The directory where svn is to be run. + """ + # This is overridden in flakiness_dashboard_results_uploader.py. + raise NotImplementedError() + + def _GetArchivedJSONResults(self): + """Download JSON file that only contains test + name list from test-results server. This is for generating incremental + JSON so the file generated has info for tests that failed before but + pass or are skipped from current run. + + Returns (archived_results, error) tuple where error is None if results + were successfully read. + """ + results_json = {} + old_results = None + error = None + + if not self._test_results_server: + return {}, None + + results_file_url = (self.URL_FOR_TEST_LIST_JSON % + (urllib2.quote(self._test_results_server), + urllib2.quote(self._builder_name), + self.RESULTS_FILENAME, + urllib2.quote(self._test_type), + urllib2.quote(self._master_name))) + + try: + # FIXME: We should talk to the network via a Host object. + results_file = urllib2.urlopen(results_file_url) + old_results = results_file.read() + except urllib2.HTTPError, http_error: + # A non-4xx status code means the bot is hosed for some reason + # and we can't grab the results.json file off of it. + if http_error.code < 400 and http_error.code >= 500: + error = http_error + except urllib2.URLError, url_error: + error = url_error + + if old_results: + # Strip the prefix and suffix so we can get the actual JSON object. + old_results = StripJSONWrapper(old_results) + + try: + results_json = json.loads(old_results) + except Exception: # pylint: disable=broad-except + _log.debug('results.json was not valid JSON. Clobbering.') + # The JSON file is not valid JSON. Just clobber the results. + results_json = {} + else: + _log.debug('Old JSON results do not exist. Starting fresh.') + results_json = {} + + return results_json, error + + def _InsertFailureSummaries(self, results_for_builder): + """Inserts aggregate pass/failure statistics into the JSON. + This method reads self._test_results and generates + FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries. + + Args: + results_for_builder: Dictionary containing the test results for a + single builder. + """ + # Insert the number of tests that failed or skipped. + fixable_count = len([r for r in self._test_results if r.Fixable()]) + self._InsertItemIntoRawList(results_for_builder, + fixable_count, self.FIXABLE_COUNT) + + # Create a test modifiers (FAILS, FLAKY etc) summary dictionary. + entry = {} + for test_name in self._test_results_map.iterkeys(): + result_char = self._GetModifierChar(test_name) + entry[result_char] = entry.get(result_char, 0) + 1 + + # Insert the pass/skip/failure summary dictionary. + self._InsertItemIntoRawList(results_for_builder, entry, + self.FIXABLE) + + # Insert the number of all the tests that are supposed to pass. + all_test_count = len(self._test_results) + self._InsertItemIntoRawList(results_for_builder, + all_test_count, self.ALL_FIXABLE_COUNT) + + def _InsertItemIntoRawList(self, results_for_builder, item, key): + """Inserts the item into the list with the given key in the results for + this builder. Creates the list if no such list exists. + + Args: + results_for_builder: Dictionary containing the test results for a + single builder. + item: Number or string to insert into the list. + key: Key in results_for_builder for the list to insert into. + """ + if key in results_for_builder: + raw_list = results_for_builder[key] + else: + raw_list = [] + + raw_list.insert(0, item) + raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG] + results_for_builder[key] = raw_list + + def _InsertItemRunLengthEncoded(self, item, encoded_results): + """Inserts the item into the run-length encoded results. + + Args: + item: String or number to insert. + encoded_results: run-length encoded results. An array of arrays, e.g. + [[3,'A'],[1,'Q']] encodes AAAQ. + """ + if len(encoded_results) and item == encoded_results[0][1]: + num_results = encoded_results[0][0] + if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG: + encoded_results[0][0] = num_results + 1 + else: + # Use a list instead of a class for the run-length encoding since + # we want the serialized form to be concise. + encoded_results.insert(0, [1, item]) + + def _InsertGenericMetaData(self, results_for_builder): + """ Inserts generic metadata (such as version number, current time etc) + into the JSON. + + Args: + results_for_builder: Dictionary containing the test results for + a single builder. + """ + self._InsertItemIntoRawList(results_for_builder, + self._build_number, self.BUILD_NUMBERS) + + # Include SVN revisions for the given repositories. + for (name, path) in self._svn_repositories: + # Note: for JSON file's backward-compatibility we use 'chrome' rather + # than 'chromium' here. + lowercase_name = name.lower() + if lowercase_name == 'chromium': + lowercase_name = 'chrome' + self._InsertItemIntoRawList(results_for_builder, + self._GetSVNRevision(path), + lowercase_name + 'Revision') + + self._InsertItemIntoRawList(results_for_builder, + int(time.time()), + self.TIME) + + def _InsertTestTimeAndResult(self, test_name, tests): + """ Insert a test item with its results to the given tests dictionary. + + Args: + tests: Dictionary containing test result entries. + """ + + result = self._get_result_char(test_name) + test_time = self._GetTestTiming(test_name) + + this_test = tests + for segment in test_name.split('/'): + if segment not in this_test: + this_test[segment] = {} + this_test = this_test[segment] + + if not len(this_test): + self._PopulateResultsAndTimesJSON(this_test) + + if self.RESULTS in this_test: + self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS]) + else: + this_test[self.RESULTS] = [[1, result]] + + if self.TIMES in this_test: + self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES]) + else: + this_test[self.TIMES] = [[1, test_time]] + + def _ConvertJSONToCurrentVersion(self, results_json): + """If the JSON does not match the current version, converts it to the + current version and adds in the new version number. + """ + if self.VERSION_KEY in results_json: + archive_version = results_json[self.VERSION_KEY] + if archive_version == self.VERSION: + return + else: + archive_version = 3 + + # version 3->4 + if archive_version == 3: + for results in results_json.values(): + self._ConvertTestsToTrie(results) + + results_json[self.VERSION_KEY] = self.VERSION + + def _ConvertTestsToTrie(self, results): + if not self.TESTS in results: + return + + test_results = results[self.TESTS] + test_results_trie = {} + for test in test_results.iterkeys(): + single_test_result = test_results[test] + AddPathToTrie(test, single_test_result, test_results_trie) + + results[self.TESTS] = test_results_trie + + def _PopulateResultsAndTimesJSON(self, results_and_times): + results_and_times[self.RESULTS] = [] + results_and_times[self.TIMES] = [] + return results_and_times + + def _CreateResultsForBuilderJSON(self): + results_for_builder = {} + results_for_builder[self.TESTS] = {} + return results_for_builder + + def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list): + """Removes items from the run-length encoded list after the final + item that exceeds the max number of builds to track. + + Args: + encoded_results: run-length encoded results. An array of arrays, e.g. + [[3,'A'],[1,'Q']] encodes AAAQ. + """ + num_builds = 0 + index = 0 + for result in encoded_list: + num_builds = num_builds + result[0] + index = index + 1 + if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG: + return encoded_list[:index] + return encoded_list + + def _NormalizeResultsJSON(self, test, test_name, tests): + """ Prune tests where all runs pass or tests that no longer exist and + truncate all results to maxNumberOfBuilds. + + Args: + test: ResultsAndTimes object for this test. + test_name: Name of the test. + tests: The JSON object with all the test results for this builder. + """ + test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds( + test[self.RESULTS]) + test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds( + test[self.TIMES]) + + is_all_pass = self._IsResultsAllOfType(test[self.RESULTS], + self.PASS_RESULT) + is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS], + self.NO_DATA_RESULT) + max_time = max([test_time[1] for test_time in test[self.TIMES]]) + + # Remove all passes/no-data from the results to reduce noise and + # filesize. If a test passes every run, but takes > MIN_TIME to run, + # don't throw away the data. + if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME): + del tests[test_name] + + # method could be a function pylint: disable=R0201 + def _IsResultsAllOfType(self, results, result_type): + """Returns whether all the results are of the given type + (e.g. all passes).""" + return len(results) == 1 and results[0][1] == result_type + + +class _FileUploader(object): + + def __init__(self, url, timeout_seconds): + self._url = url + self._timeout_seconds = timeout_seconds + + def UploadAsMultipartFormData(self, files, attrs): + file_objs = [] + for filename, path in files: + with file(path, 'rb') as fp: + file_objs.append(('file', filename, fp.read())) + + # FIXME: We should use the same variable names for the formal and actual + # parameters. + content_type, data = _EncodeMultipartFormData(attrs, file_objs) + return self._UploadData(content_type, data) + + def _UploadData(self, content_type, data): + start = time.time() + end = start + self._timeout_seconds + while time.time() < end: + try: + request = urllib2.Request(self._url, data, + {'Content-Type': content_type}) + return urllib2.urlopen(request) + except urllib2.HTTPError as e: + _log.warn("Received HTTP status %s loading \"%s\". " + 'Retrying in 10 seconds...', e.code, e.filename) + time.sleep(10) + + +def _GetMIMEType(filename): + return mimetypes.guess_type(filename)[0] or 'application/octet-stream' + + +# FIXME: Rather than taking tuples, this function should take more +# structured data. +def _EncodeMultipartFormData(fields, files): + """Encode form fields for multipart/form-data. + + Args: + fields: A sequence of (name, value) elements for regular form fields. + files: A sequence of (name, filename, value) elements for data to be + uploaded as files. + Returns: + (content_type, body) ready for httplib.HTTP instance. + + Source: + http://code.google.com/p/rietveld/source/browse/trunk/upload.py + """ + BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' + CRLF = '\r\n' + lines = [] + + for key, value in fields: + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"' % key) + lines.append('') + if isinstance(value, unicode): + value = value.encode('utf-8') + lines.append(value) + + for key, filename, value in files: + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"; ' + 'filename="%s"' % (key, filename)) + lines.append('Content-Type: %s' % _GetMIMEType(filename)) + lines.append('') + if isinstance(value, unicode): + value = value.encode('utf-8') + lines.append(value) + + lines.append('--' + BOUNDARY + '--') + lines.append('') + body = CRLF.join(lines) + content_type = 'multipart/form-data; boundary=%s' % BOUNDARY + return content_type, body diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py new file mode 100644 index 00000000000..d6aee057bfb --- /dev/null +++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py @@ -0,0 +1,213 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Most of this file was ported over from Blink's +# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py +# + +import unittest +import json + +from pylib.results.flakiness_dashboard import json_results_generator + + +class JSONGeneratorTest(unittest.TestCase): + + def setUp(self): + self.builder_name = 'DUMMY_BUILDER_NAME' + self.build_name = 'DUMMY_BUILD_NAME' + self.build_number = 'DUMMY_BUILDER_NUMBER' + + # For archived results. + self._json = None + self._num_runs = 0 + self._tests_set = set([]) + self._test_timings = {} + self._failed_count_map = {} + + self._PASS_count = 0 + self._DISABLED_count = 0 + self._FLAKY_count = 0 + self._FAILS_count = 0 + self._fixable_count = 0 + + self._orig_write_json = json_results_generator.WriteJSON + + # unused arguments ... pylint: disable=W0613 + def _WriteJSONStub(json_object, file_path, callback=None): + pass + + json_results_generator.WriteJSON = _WriteJSONStub + + def tearDown(self): + json_results_generator.WriteJSON = self._orig_write_json + + def _TestJSONGeneration(self, passed_tests_list, failed_tests_list): + tests_set = set(passed_tests_list) | set(failed_tests_list) + + DISABLED_tests = set([t for t in tests_set + if t.startswith('DISABLED_')]) + FLAKY_tests = set([t for t in tests_set + if t.startswith('FLAKY_')]) + FAILS_tests = set([t for t in tests_set + if t.startswith('FAILS_')]) + PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests) + + failed_tests = set(failed_tests_list) - DISABLED_tests + failed_count_map = dict([(t, 1) for t in failed_tests]) + + test_timings = {} + i = 0 + for test in tests_set: + test_timings[test] = float(self._num_runs * 100 + i) + i += 1 + + test_results_map = dict() + for test in tests_set: + test_results_map[test] = json_results_generator.TestResult( + test, failed=(test in failed_tests), + elapsed_time=test_timings[test]) + + generator = json_results_generator.JSONResultsGeneratorBase( + self.builder_name, self.build_name, self.build_number, + '', + None, # don't fetch past json results archive + test_results_map) + + failed_count_map = dict([(t, 1) for t in failed_tests]) + + # Test incremental json results + incremental_json = generator.GetJSON() + self._VerifyJSONResults( + tests_set, + test_timings, + failed_count_map, + len(PASS_tests), + len(DISABLED_tests), + len(FLAKY_tests), + len(DISABLED_tests | failed_tests), + incremental_json, + 1) + + # We don't verify the results here, but at least we make sure the code + # runs without errors. + generator.GenerateJSONOutput() + generator.GenerateTimesMSFile() + + def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map, + PASS_count, DISABLED_count, FLAKY_count, + fixable_count, json_obj, num_runs): + # Aliasing to a short name for better access to its constants. + JRG = json_results_generator.JSONResultsGeneratorBase + + self.assertIn(JRG.VERSION_KEY, json_obj) + self.assertIn(self.builder_name, json_obj) + + buildinfo = json_obj[self.builder_name] + self.assertIn(JRG.FIXABLE, buildinfo) + self.assertIn(JRG.TESTS, buildinfo) + self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs) + self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number) + + if tests_set or DISABLED_count: + fixable = {} + for fixable_items in buildinfo[JRG.FIXABLE]: + for (result_type, count) in fixable_items.iteritems(): + if result_type in fixable: + fixable[result_type] = fixable[result_type] + count + else: + fixable[result_type] = count + + if PASS_count: + self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count) + else: + self.assertTrue(JRG.PASS_RESULT not in fixable or + fixable[JRG.PASS_RESULT] == 0) + if DISABLED_count: + self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count) + else: + self.assertTrue(JRG.SKIP_RESULT not in fixable or + fixable[JRG.SKIP_RESULT] == 0) + if FLAKY_count: + self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count) + else: + self.assertTrue(JRG.FLAKY_RESULT not in fixable or + fixable[JRG.FLAKY_RESULT] == 0) + + if failed_count_map: + tests = buildinfo[JRG.TESTS] + for test_name in failed_count_map.iterkeys(): + test = self._FindTestInTrie(test_name, tests) + + failed = 0 + for result in test[JRG.RESULTS]: + if result[1] == JRG.FAIL_RESULT: + failed += result[0] + self.assertEqual(failed_count_map[test_name], failed) + + timing_count = 0 + for timings in test[JRG.TIMES]: + if timings[1] == test_timings[test_name]: + timing_count = timings[0] + self.assertEqual(1, timing_count) + + if fixable_count: + self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count) + + def _FindTestInTrie(self, path, trie): + nodes = path.split('/') + sub_trie = trie + for node in nodes: + self.assertIn(node, sub_trie) + sub_trie = sub_trie[node] + return sub_trie + + def testJSONGeneration(self): + self._TestJSONGeneration([], []) + self._TestJSONGeneration(['A1', 'B1'], []) + self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2']) + self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], []) + self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4']) + self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5']) + self._TestJSONGeneration( + ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'], + ['FAILS_D6']) + + # Generate JSON with the same test sets. (Both incremental results and + # archived results must be updated appropriately.) + self._TestJSONGeneration( + ['A', 'FLAKY_B', 'DISABLED_C'], + ['FAILS_D', 'FLAKY_E']) + self._TestJSONGeneration( + ['A', 'DISABLED_C', 'FLAKY_E'], + ['FLAKY_B', 'FAILS_D']) + self._TestJSONGeneration( + ['FLAKY_B', 'DISABLED_C', 'FAILS_D'], + ['A', 'FLAKY_E']) + + def testHierarchicalJSNGeneration(self): + # FIXME: Re-work tests to be more comprehensible and comprehensive. + self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C']) + + def testTestTimingsTrie(self): + individual_test_timings = [] + individual_test_timings.append( + json_results_generator.TestResult( + 'foo/bar/baz.html', + elapsed_time=1.2)) + individual_test_timings.append( + json_results_generator.TestResult('bar.html', elapsed_time=0.0001)) + trie = json_results_generator.TestTimingsTrie(individual_test_timings) + + expected_trie = { + 'bar.html': 0, + 'foo': { + 'bar': { + 'baz.html': 1200, + } + } + } + + self.assertEqual(json.dumps(trie), json.dumps(expected_trie)) diff --git a/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/build/android/pylib/results/flakiness_dashboard/results_uploader.py new file mode 100644 index 00000000000..b68a898b7da --- /dev/null +++ b/build/android/pylib/results/flakiness_dashboard/results_uploader.py @@ -0,0 +1,176 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Uploads the results to the flakiness dashboard server.""" +# pylint: disable=E1002,R0201 + +import logging +import os +import shutil +import tempfile +import xml + + +from devil.utils import cmd_helper +from pylib.constants import host_paths +from pylib.results.flakiness_dashboard import json_results_generator +from pylib.utils import repo_utils + + + +class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase): + """Writes test results to a JSON file and handles uploading that file to + the test results server. + """ + def __init__(self, builder_name, build_name, build_number, tmp_folder, + test_results_map, test_results_server, test_type, master_name): + super(JSONResultsGenerator, self).__init__( + builder_name=builder_name, + build_name=build_name, + build_number=build_number, + results_file_base_path=tmp_folder, + builder_base_url=None, + test_results_map=test_results_map, + svn_repositories=(('webkit', 'third_party/WebKit'), + ('chrome', '.')), + test_results_server=test_results_server, + test_type=test_type, + master_name=master_name) + + #override + def _GetModifierChar(self, test_name): + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + return self._test_results_map[test_name].modifier + + #override + def _GetSVNRevision(self, in_directory): + """Returns the git/svn revision for the given directory. + + Args: + in_directory: The directory relative to src. + """ + def _is_git_directory(in_directory): + """Returns true if the given directory is in a git repository. + + Args: + in_directory: The directory path to be tested. + """ + if os.path.exists(os.path.join(in_directory, '.git')): + return True + parent = os.path.dirname(in_directory) + if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory: + return False + return _is_git_directory(parent) + + in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory) + + if not os.path.exists(os.path.join(in_directory, '.svn')): + if _is_git_directory(in_directory): + return repo_utils.GetGitHeadSHA1(in_directory) + else: + return '' + + output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory) + try: + dom = xml.dom.minidom.parseString(output) + return dom.getElementsByTagName('entry')[0].getAttribute('revision') + except xml.parsers.expat.ExpatError: + return '' + return '' + + +class ResultsUploader(object): + """Handles uploading buildbot tests results to the flakiness dashboard.""" + def __init__(self, tests_type): + self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER') + self._master_name = os.environ.get('BUILDBOT_MASTERNAME') + self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME') + self._tests_type = tests_type + self._build_name = None + + if not self._build_number or not self._builder_name: + raise Exception('You should not be uploading tests results to the server' + 'from your local machine.') + + upstream = (tests_type != 'Chromium_Android_Instrumentation') + if not upstream: + self._build_name = 'chromium-android' + buildbot_branch = os.environ.get('BUILDBOT_BRANCH') + if not buildbot_branch: + buildbot_branch = 'master' + else: + # Ensure there's no leading "origin/" + buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:] + self._master_name = '%s-%s' % (self._build_name, buildbot_branch) + + self._test_results_map = {} + + def AddResults(self, test_results): + # TODO(frankf): Differentiate between fail/crash/timeouts. + conversion_map = [ + (test_results.GetPass(), False, + json_results_generator.JSONResultsGeneratorBase.PASS_RESULT), + (test_results.GetFail(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetCrash(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetTimeout(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetUnknown(), True, + json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT), + ] + + for results_list, failed, modifier in conversion_map: + for single_test_result in results_list: + test_result = json_results_generator.TestResult( + test=single_test_result.GetName(), + failed=failed, + elapsed_time=single_test_result.GetDuration() / 1000) + # The WebKit TestResult object sets the modifier it based on test name. + # Since we don't use the same test naming convention as WebKit the + # modifier will be wrong, so we need to overwrite it. + test_result.modifier = modifier + + self._test_results_map[single_test_result.GetName()] = test_result + + def Upload(self, test_results_server): + if not self._test_results_map: + return + + tmp_folder = tempfile.mkdtemp() + + try: + results_generator = JSONResultsGenerator( + builder_name=self._builder_name, + build_name=self._build_name, + build_number=self._build_number, + tmp_folder=tmp_folder, + test_results_map=self._test_results_map, + test_results_server=test_results_server, + test_type=self._tests_type, + master_name=self._master_name) + + json_files = ["incremental_results.json", "times_ms.json"] + results_generator.GenerateJSONOutput() + results_generator.GenerateTimesMSFile() + results_generator.UploadJSONFiles(json_files) + except Exception as e: # pylint: disable=broad-except + logging.error("Uploading results to test server failed: %s.", e) + finally: + shutil.rmtree(tmp_folder) + + +def Upload(results, flakiness_dashboard_server, test_type): + """Reports test results to the flakiness dashboard for Chrome for Android. + + Args: + results: test results. + flakiness_dashboard_server: the server to upload the results to. + test_type: the type of the tests (as displayed by the flakiness dashboard). + """ + uploader = ResultsUploader(test_type) + uploader.AddResults(results) + uploader.Upload(flakiness_dashboard_server) diff --git a/build/android/pylib/results/json_results.py b/build/android/pylib/results/json_results.py new file mode 100644 index 00000000000..593c67a6c3f --- /dev/null +++ b/build/android/pylib/results/json_results.py @@ -0,0 +1,166 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import itertools +import json + +from pylib.base import base_test_result + +def GenerateResultsDict(test_run_results): + """Create a results dict from |test_run_results| suitable for writing to JSON. + Args: + test_run_results: a list of base_test_result.TestRunResults objects. + Returns: + A results dict that mirrors the one generated by + base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON. + """ + # Example json output. + # { + # "global_tags": [], + # "all_tests": [ + # "test1", + # "test2", + # ], + # "disabled_tests": [], + # "per_iteration_data": [ + # { + # "test1": [ + # { + # "status": "SUCCESS", + # "elapsed_time_ms": 1, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ... + # ], + # "test2": [ + # { + # "status": "FAILURE", + # "elapsed_time_ms": 12, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ... + # ], + # }, + # { + # "test1": [ + # { + # "status": "SUCCESS", + # "elapsed_time_ms": 1, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ], + # "test2": [ + # { + # "status": "FAILURE", + # "elapsed_time_ms": 12, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ], + # }, + # ... + # ], + # } + + def status_as_string(s): + if s == base_test_result.ResultType.PASS: + return 'SUCCESS' + elif s == base_test_result.ResultType.SKIP: + return 'SKIPPED' + elif s == base_test_result.ResultType.FAIL: + return 'FAILURE' + elif s == base_test_result.ResultType.CRASH: + return 'CRASH' + elif s == base_test_result.ResultType.TIMEOUT: + return 'TIMEOUT' + elif s == base_test_result.ResultType.UNKNOWN: + return 'UNKNOWN' + + all_tests = set() + per_iteration_data = [] + for test_run_result in test_run_results: + iteration_data = collections.defaultdict(list) + if isinstance(test_run_result, list): + results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result)) + else: + results_iterable = test_run_result.GetAll() + + for r in results_iterable: + result_dict = { + 'status': status_as_string(r.GetType()), + 'elapsed_time_ms': r.GetDuration(), + 'output_snippet': r.GetLog(), + 'losless_snippet': '', + 'output_snippet_base64:': '', + 'tombstones': r.GetTombstones() or '', + } + iteration_data[r.GetName()].append(result_dict) + + all_tests = all_tests.union(set(iteration_data.iterkeys())) + per_iteration_data.append(iteration_data) + + return { + 'global_tags': [], + 'all_tests': sorted(list(all_tests)), + # TODO(jbudorick): Add support for disabled tests within base_test_result. + 'disabled_tests': [], + 'per_iteration_data': per_iteration_data, + } + + +def GenerateJsonResultsFile(test_run_result, file_path): + """Write |test_run_result| to JSON. + + This emulates the format of the JSON emitted by + base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON. + + Args: + test_run_result: a base_test_result.TestRunResults object. + file_path: The path to the JSON file to write. + """ + with open(file_path, 'w') as json_result_file: + json_result_file.write(json.dumps(GenerateResultsDict(test_run_result))) + + +def ParseResultsFromJson(json_results): + """Creates a list of BaseTestResult objects from JSON. + + Args: + json_results: A JSON dict in the format created by + GenerateJsonResultsFile. + """ + + def string_as_status(s): + if s == 'SUCCESS': + return base_test_result.ResultType.PASS + elif s == 'SKIPPED': + return base_test_result.ResultType.SKIP + elif s == 'FAILURE': + return base_test_result.ResultType.FAIL + elif s == 'CRASH': + return base_test_result.ResultType.CRASH + elif s == 'TIMEOUT': + return base_test_result.ResultType.TIMEOUT + else: + return base_test_result.ResultType.UNKNOWN + + results_list = [] + testsuite_runs = json_results['per_iteration_data'] + for testsuite_run in testsuite_runs: + for test, test_runs in testsuite_run.iteritems(): + results_list.extend( + [base_test_result.BaseTestResult(test, + string_as_status(tr['status']), + duration=tr['elapsed_time_ms']) + for tr in test_runs]) + return results_list + diff --git a/build/android/pylib/results/json_results_test.py b/build/android/pylib/results/json_results_test.py new file mode 100644 index 00000000000..c6239964fb4 --- /dev/null +++ b/build/android/pylib/results/json_results_test.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +from pylib.base import base_test_result +from pylib.results import json_results + + +class JsonResultsTest(unittest.TestCase): + + def testGenerateResultsDict_passedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.PASS) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEquals( + ['test.package.TestName'], + results_dict['all_tests']) + self.assertEquals(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEquals(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEquals('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_skippedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.SKIP) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEquals( + ['test.package.TestName'], + results_dict['all_tests']) + self.assertEquals(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEquals(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEquals('SKIPPED', test_iteration_result['status']) + + def testGenerateResultsDict_failedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.FAIL) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEquals( + ['test.package.TestName'], + results_dict['all_tests']) + self.assertEquals(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEquals(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEquals('FAILURE', test_iteration_result['status']) + + def testGenerateResultsDict_duration(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.PASS, duration=123) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEquals( + ['test.package.TestName'], + results_dict['all_tests']) + self.assertEquals(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEquals(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('elapsed_time_ms' in test_iteration_result) + self.assertEquals(123, test_iteration_result['elapsed_time_ms']) + + def testGenerateResultsDict_multipleResults(self): + result1 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.PASS) + result2 = base_test_result.BaseTestResult( + 'test.package.TestName2', base_test_result.ResultType.PASS) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result1) + all_results.AddResult(result2) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEquals( + ['test.package.TestName1', 'test.package.TestName2'], + results_dict['all_tests']) + + self.assertTrue('per_iteration_data' in results_dict) + iterations = results_dict['per_iteration_data'] + self.assertEquals(1, len(iterations)) + + expected_tests = set([ + 'test.package.TestName1', + 'test.package.TestName2', + ]) + + for test_name, iteration_result in iterations[0].iteritems(): + self.assertTrue(test_name in expected_tests) + expected_tests.remove(test_name) + self.assertEquals(1, len(iteration_result)) + + test_iteration_result = iteration_result[0] + self.assertTrue('status' in test_iteration_result) + self.assertEquals('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_passOnRetry(self): + raw_results = [] + + result1 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.FAIL) + run_results1 = base_test_result.TestRunResults() + run_results1.AddResult(result1) + raw_results.append(run_results1) + + result2 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.PASS) + run_results2 = base_test_result.TestRunResults() + run_results2.AddResult(result2) + raw_results.append(run_results2) + + results_dict = json_results.GenerateResultsDict([raw_results]) + self.assertEquals(['test.package.TestName1'], results_dict['all_tests']) + + # Check that there's only one iteration. + self.assertIn('per_iteration_data', results_dict) + iterations = results_dict['per_iteration_data'] + self.assertEquals(1, len(iterations)) + + # Check that test.package.TestName1 is the only test in the iteration. + self.assertEquals(1, len(iterations[0])) + self.assertIn('test.package.TestName1', iterations[0]) + + # Check that there are two results for test.package.TestName1. + actual_test_results = iterations[0]['test.package.TestName1'] + self.assertEquals(2, len(actual_test_results)) + + # Check that the first result is a failure. + self.assertIn('status', actual_test_results[0]) + self.assertEquals('FAILURE', actual_test_results[0]['status']) + + # Check that the second result is a success. + self.assertIn('status', actual_test_results[1]) + self.assertEquals('SUCCESS', actual_test_results[1]['status']) + + +if __name__ == '__main__': + unittest.main(verbosity=2) + diff --git a/build/android/pylib/results/report_results.py b/build/android/pylib/results/report_results.py new file mode 100644 index 00000000000..d39acd0d380 --- /dev/null +++ b/build/android/pylib/results/report_results.py @@ -0,0 +1,116 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Module containing utility functions for reporting results.""" + +import logging +import os +import re + +from pylib import constants +from pylib.results.flakiness_dashboard import results_uploader + + +def _LogToFile(results, test_type, suite_name): + """Log results to local files which can be used for aggregation later.""" + log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs') + if not os.path.exists(log_file_path): + os.mkdir(log_file_path) + full_file_name = os.path.join( + log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log') + if not os.path.exists(full_file_name): + with open(full_file_name, 'w') as log_file: + print >> log_file, '\n%s results for %s build %s:' % ( + test_type, os.environ.get('BUILDBOT_BUILDERNAME'), + os.environ.get('BUILDBOT_BUILDNUMBER')) + logging.info('Writing results to %s.', full_file_name) + + logging.info('Writing results to %s.', full_file_name) + with open(full_file_name, 'a') as log_file: + shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...') + print >> log_file, '%s%s' % (shortened_suite_name.ljust(30), + results.GetShortForm()) + + +def _LogToFlakinessDashboard(results, test_type, test_package, + flakiness_server): + """Upload results to the flakiness dashboard""" + logging.info('Upload results for test type "%s", test package "%s" to %s', + test_type, test_package, flakiness_server) + + try: + # TODO(jbudorick): remove Instrumentation once instrumentation tests + # switch to platform mode. + if test_type in ('instrumentation', 'Instrumentation'): + if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER: + assert test_package in ['ContentShellTest', + 'ChromePublicTest', + 'ChromeSyncShellTest', + 'AndroidWebViewTest', + 'SystemWebViewShellLayoutTest'] + dashboard_test_type = ('%s_instrumentation_tests' % + test_package.lower().rstrip('test')) + # Downstream server. + else: + dashboard_test_type = 'Chromium_Android_Instrumentation' + + elif test_type == 'gtest': + dashboard_test_type = test_package + + else: + logging.warning('Invalid test type') + return + + results_uploader.Upload( + results, flakiness_server, dashboard_test_type) + + except Exception: # pylint: disable=broad-except + logging.exception('Failure while logging to %s', flakiness_server) + + +def LogFull(results, test_type, test_package, annotation=None, + flakiness_server=None): + """Log the tests results for the test suite. + + The results will be logged three different ways: + 1. Log to stdout. + 2. Log to local files for aggregating multiple test steps + (on buildbots only). + 3. Log to flakiness dashboard (on buildbots only). + + Args: + results: An instance of TestRunResults object. + test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.). + test_package: Test package name (e.g. 'ipc_tests' for gtests, + 'ContentShellTest' for instrumentation tests) + annotation: If instrumenation test type, this is a list of annotations + (e.g. ['Smoke', 'SmallTest']). + flakiness_server: If provider, upload the results to flakiness dashboard + with this URL. + """ + if not results.DidRunPass(): + logging.critical('*' * 80) + logging.critical('Detailed Logs') + logging.critical('*' * 80) + for line in results.GetLogs().splitlines(): + logging.critical(line) + logging.critical('*' * 80) + logging.critical('Summary') + logging.critical('*' * 80) + for line in results.GetGtestForm().splitlines(): + logging.critical(line) + logging.critical('*' * 80) + + if os.environ.get('BUILDBOT_BUILDERNAME'): + # It is possible to have multiple buildbot steps for the same + # instrumenation test package using different annotations. + if annotation and len(annotation) == 1: + suite_name = annotation[0] + else: + suite_name = test_package + _LogToFile(results, test_type, suite_name) + + if flakiness_server: + _LogToFlakinessDashboard(results, test_type, test_package, + flakiness_server) diff --git a/build/android/pylib/symbols/__init__.py b/build/android/pylib/symbols/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py new file mode 100644 index 00000000000..ed3fe5c738a --- /dev/null +++ b/build/android/pylib/symbols/elf_symbolizer.py @@ -0,0 +1,468 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import datetime +import logging +import multiprocessing +import os +import posixpath +import Queue +import re +import subprocess +import sys +import threading +import time + + +# addr2line builds a possibly infinite memory cache that can exhaust +# the computer's memory if allowed to grow for too long. This constant +# controls how many lookups we do before restarting the process. 4000 +# gives near peak performance without extreme memory usage. +ADDR2LINE_RECYCLE_LIMIT = 4000 + + +class ELFSymbolizer(object): + """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer. + + This class is a frontend for addr2line (part of GNU binutils), designed to + symbolize batches of large numbers of symbols for a given ELF file. It + supports sharding symbolization against many addr2line instances and + pipelining of multiple requests per each instance (in order to hide addr2line + internals and OS pipe latencies). + + The interface exhibited by this class is a very simple asynchronous interface, + which is based on the following three methods: + - SymbolizeAsync(): used to request (enqueue) resolution of a given address. + - The |callback| method: used to communicated back the symbol information. + - Join(): called to conclude the batch to gather the last outstanding results. + In essence, before the Join method returns, this class will have issued as + many callbacks as the number of SymbolizeAsync() calls. In this regard, note + that due to multiprocess sharding, callbacks can be delivered out of order. + + Some background about addr2line: + - it is invoked passing the elf path in the cmdline, piping the addresses in + its stdin and getting results on its stdout. + - it has pretty large response times for the first requests, but it + works very well in streaming mode once it has been warmed up. + - it doesn't scale by itself (on more cores). However, spawning multiple + instances at the same time on the same file is pretty efficient as they + keep hitting the pagecache and become mostly CPU bound. + - it might hang or crash, mostly for OOM. This class deals with both of these + problems. + + Despite the "scary" imports and the multi* words above, (almost) no multi- + threading/processing is involved from the python viewpoint. Concurrency + here is achieved by spawning several addr2line subprocesses and handling their + output pipes asynchronously. Therefore, all the code here (with the exception + of the Queue instance in Addr2Line) should be free from mind-blowing + thread-safety concerns. + + The multiprocess sharding works as follows: + The symbolizer tries to use the lowest number of addr2line instances as + possible (with respect of |max_concurrent_jobs|) and enqueue all the requests + in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't + worth the startup cost. + The multiprocess logic kicks in as soon as the queues for the existing + instances grow. Specifically, once all the existing instances reach the + |max_queue_size| bound, a new addr2line instance is kicked in. + In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances + have a backlog of |max_queue_size|), back-pressure is applied on the caller by + blocking the SymbolizeAsync method. + + This module has been deliberately designed to be dependency free (w.r.t. of + other modules in this project), to allow easy reuse in external projects. + """ + + def __init__(self, elf_file_path, addr2line_path, callback, inlines=False, + max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50, + source_root_path=None, strip_base_path=None): + """Args: + elf_file_path: path of the elf file to be symbolized. + addr2line_path: path of the toolchain's addr2line binary. + callback: a callback which will be invoked for each resolved symbol with + the two args (sym_info, callback_arg). The former is an instance of + |ELFSymbolInfo| and contains the symbol information. The latter is an + embedder-provided argument which is passed to SymbolizeAsync(). + inlines: when True, the ELFSymbolInfo will contain also the details about + the outer inlining functions. When False, only the innermost function + will be provided. + max_concurrent_jobs: Max number of addr2line instances spawned. + Parallelize responsibly, addr2line is a memory and I/O monster. + max_queue_size: Max number of outstanding requests per addr2line instance. + addr2line_timeout: Max time (in seconds) to wait for a addr2line response. + After the timeout, the instance will be considered hung and respawned. + source_root_path: In some toolchains only the name of the source file is + is output, without any path information; disambiguation searches + through the source directory specified by |source_root_path| argument + for files whose name matches, adding the full path information to the + output. For example, if the toolchain outputs "unicode.cc" and there + is a file called "unicode.cc" located under |source_root_path|/foo, + the tool will replace "unicode.cc" with + "|source_root_path|/foo/unicode.cc". If there are multiple files with + the same name, disambiguation will fail because the tool cannot + determine which of the files was the source of the symbol. + strip_base_path: Rebases the symbols source paths onto |source_root_path| + (i.e replace |strip_base_path| with |source_root_path). + """ + assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path + self.elf_file_path = elf_file_path + self.addr2line_path = addr2line_path + self.callback = callback + self.inlines = inlines + self.max_concurrent_jobs = (max_concurrent_jobs or + min(multiprocessing.cpu_count(), 4)) + self.max_queue_size = max_queue_size + self.addr2line_timeout = addr2line_timeout + self.requests_counter = 0 # For generating monotonic request IDs. + self._a2l_instances = [] # Up to |max_concurrent_jobs| _Addr2Line inst. + + # If necessary, create disambiguation lookup table + self.disambiguate = source_root_path is not None + self.disambiguation_table = {} + self.strip_base_path = strip_base_path + if self.disambiguate: + self.source_root_path = os.path.abspath(source_root_path) + self._CreateDisambiguationTable() + + # Create one addr2line instance. More instances will be created on demand + # (up to |max_concurrent_jobs|) depending on the rate of the requests. + self._CreateNewA2LInstance() + + def SymbolizeAsync(self, addr, callback_arg=None): + """Requests symbolization of a given address. + + This method is not guaranteed to return immediately. It generally does, but + in some scenarios (e.g. all addr2line instances have full queues) it can + block to create back-pressure. + + Args: + addr: address to symbolize. + callback_arg: optional argument which will be passed to the |callback|.""" + assert isinstance(addr, int) + + # Process all the symbols that have been resolved in the meanwhile. + # Essentially, this drains all the addr2line(s) out queues. + for a2l_to_purge in self._a2l_instances: + a2l_to_purge.ProcessAllResolvedSymbolsInQueue() + a2l_to_purge.RecycleIfNecessary() + + # Find the best instance according to this logic: + # 1. Find an existing instance with the shortest queue. + # 2. If all of instances' queues are full, but there is room in the pool, + # (i.e. < |max_concurrent_jobs|) create a new instance. + # 3. If there were already |max_concurrent_jobs| instances and all of them + # had full queues, make back-pressure. + + # 1. + def _SortByQueueSizeAndReqID(a2l): + return (a2l.queue_size, a2l.first_request_id) + a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID) + + # 2. + if (a2l.queue_size >= self.max_queue_size and + len(self._a2l_instances) < self.max_concurrent_jobs): + a2l = self._CreateNewA2LInstance() + + # 3. + if a2l.queue_size >= self.max_queue_size: + a2l.WaitForNextSymbolInQueue() + + a2l.EnqueueRequest(addr, callback_arg) + + def Join(self): + """Waits for all the outstanding requests to complete and terminates.""" + for a2l in self._a2l_instances: + a2l.WaitForIdle() + a2l.Terminate() + + def _CreateNewA2LInstance(self): + assert len(self._a2l_instances) < self.max_concurrent_jobs + a2l = ELFSymbolizer.Addr2Line(self) + self._a2l_instances.append(a2l) + return a2l + + def _CreateDisambiguationTable(self): + """ Non-unique file names will result in None entries""" + start_time = time.time() + logging.info('Collecting information about available source files...') + self.disambiguation_table = {} + + for root, _, filenames in os.walk(self.source_root_path): + for f in filenames: + self.disambiguation_table[f] = os.path.join(root, f) if (f not in + self.disambiguation_table) else None + logging.info('Finished collecting information about ' + 'possible files (took %.1f s).', + (time.time() - start_time)) + + + class Addr2Line(object): + """A python wrapper around an addr2line instance. + + The communication with the addr2line process looks as follows: + [STDIN] [STDOUT] (from addr2line's viewpoint) + > f001111 + > f002222 + < Symbol::Name(foo, bar) for f001111 + < /path/to/source/file.c:line_number + > f003333 + < Symbol::Name2() for f002222 + < /path/to/source/file.c:line_number + < Symbol::Name3() for f003333 + < /path/to/source/file.c:line_number + """ + + SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*') + + def __init__(self, symbolizer): + self._symbolizer = symbolizer + self._lib_file_name = posixpath.basename(symbolizer.elf_file_path) + + # The request queue (i.e. addresses pushed to addr2line's stdin and not + # yet retrieved on stdout) + self._request_queue = collections.deque() + + # This is essentially len(self._request_queue). It has been optimized to a + # separate field because turned out to be a perf hot-spot. + self.queue_size = 0 + + # Keep track of the number of symbols a process has processed to + # avoid a single process growing too big and using all the memory. + self._processed_symbols_count = 0 + + # Objects required to handle the addr2line subprocess. + self._proc = None # Subprocess.Popen(...) instance. + self._thread = None # Threading.thread instance. + self._out_queue = None # Queue.Queue instance (for buffering a2l stdout). + self._RestartAddr2LineProcess() + + def EnqueueRequest(self, addr, callback_arg): + """Pushes an address to addr2line's stdin (and keeps track of it).""" + self._symbolizer.requests_counter += 1 # For global "age" of requests. + req_idx = self._symbolizer.requests_counter + self._request_queue.append((addr, callback_arg, req_idx)) + self.queue_size += 1 + self._WriteToA2lStdin(addr) + + def WaitForIdle(self): + """Waits until all the pending requests have been symbolized.""" + while self.queue_size > 0: + self.WaitForNextSymbolInQueue() + + def WaitForNextSymbolInQueue(self): + """Waits for the next pending request to be symbolized.""" + if not self.queue_size: + return + + # This outer loop guards against a2l hanging (detecting stdout timeout). + while True: + start_time = datetime.datetime.now() + timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout) + + # The inner loop guards against a2l crashing (checking if it exited). + while datetime.datetime.now() - start_time < timeout: + # poll() returns !None if the process exited. a2l should never exit. + if self._proc.poll(): + logging.warning('addr2line crashed, respawning (lib: %s).', + self._lib_file_name) + self._RestartAddr2LineProcess() + # TODO(primiano): the best thing to do in this case would be + # shrinking the pool size as, very likely, addr2line is crashed + # due to low memory (and the respawned one will die again soon). + + try: + lines = self._out_queue.get(block=True, timeout=0.25) + except Queue.Empty: + # On timeout (1/4 s.) repeat the inner loop and check if either the + # addr2line process did crash or we waited its output for too long. + continue + + # In nominal conditions, we get straight to this point. + self._ProcessSymbolOutput(lines) + return + + # If this point is reached, we waited more than |addr2line_timeout|. + logging.warning('Hung addr2line process, respawning (lib: %s).', + self._lib_file_name) + self._RestartAddr2LineProcess() + + def ProcessAllResolvedSymbolsInQueue(self): + """Consumes all the addr2line output lines produced (without blocking).""" + if not self.queue_size: + return + while True: + try: + lines = self._out_queue.get_nowait() + except Queue.Empty: + break + self._ProcessSymbolOutput(lines) + + def RecycleIfNecessary(self): + """Restarts the process if it has been used for too long. + + A long running addr2line process will consume excessive amounts + of memory without any gain in performance.""" + if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT: + self._RestartAddr2LineProcess() + + + def Terminate(self): + """Kills the underlying addr2line process. + + The poller |_thread| will terminate as well due to the broken pipe.""" + try: + self._proc.kill() + self._proc.communicate() # Essentially wait() without risking deadlock. + except Exception: # pylint: disable=broad-except + # An exception while terminating? How interesting. + pass + self._proc = None + + def _WriteToA2lStdin(self, addr): + self._proc.stdin.write('%s\n' % hex(addr)) + if self._symbolizer.inlines: + # In the case of inlines we output an extra blank line, which causes + # addr2line to emit a (??,??:0) tuple that we use as a boundary marker. + self._proc.stdin.write('\n') + self._proc.stdin.flush() + + def _ProcessSymbolOutput(self, lines): + """Parses an addr2line symbol output and triggers the client callback.""" + (_, callback_arg, _) = self._request_queue.popleft() + self.queue_size -= 1 + + innermost_sym_info = None + sym_info = None + for (line1, line2) in lines: + prev_sym_info = sym_info + name = line1 if not line1.startswith('?') else None + source_path = None + source_line = None + m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2) + if m: + if not m.group(1).startswith('?'): + source_path = m.group(1) + if not m.group(2).startswith('?'): + source_line = int(m.group(2)) + else: + logging.warning('Got invalid symbol path from addr2line: %s', line2) + + # In case disambiguation is on, and needed + was_ambiguous = False + disambiguated = False + if self._symbolizer.disambiguate: + if source_path and not posixpath.isabs(source_path): + path = self._symbolizer.disambiguation_table.get(source_path) + was_ambiguous = True + disambiguated = path is not None + source_path = path if disambiguated else source_path + + # Use absolute paths (so that paths are consistent, as disambiguation + # uses absolute paths) + if source_path and not was_ambiguous: + source_path = os.path.abspath(source_path) + + if source_path and self._symbolizer.strip_base_path: + # Strip the base path + source_path = re.sub('^' + self._symbolizer.strip_base_path, + self._symbolizer.source_root_path or '', source_path) + + sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous, + disambiguated) + if prev_sym_info: + prev_sym_info.inlined_by = sym_info + if not innermost_sym_info: + innermost_sym_info = sym_info + + self._processed_symbols_count += 1 + self._symbolizer.callback(innermost_sym_info, callback_arg) + + def _RestartAddr2LineProcess(self): + if self._proc: + self.Terminate() + + # The only reason of existence of this Queue (and the corresponding + # Thread below) is the lack of a subprocess.stdout.poll_avail_lines(). + # Essentially this is a pipe able to extract a couple of lines atomically. + self._out_queue = Queue.Queue() + + # Start the underlying addr2line process in line buffered mode. + + cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle', + '--exe=' + self._symbolizer.elf_file_path] + if self._symbolizer.inlines: + cmd += ['--inlines'] + self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, + stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True) + + # Start the poller thread, which simply moves atomically the lines read + # from the addr2line's stdout to the |_out_queue|. + self._thread = threading.Thread( + target=ELFSymbolizer.Addr2Line.StdoutReaderThread, + args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines)) + self._thread.daemon = True # Don't prevent early process exit. + self._thread.start() + + self._processed_symbols_count = 0 + + # Replay the pending requests on the new process (only for the case + # of a hung addr2line timing out during the game). + for (addr, _, _) in self._request_queue: + self._WriteToA2lStdin(addr) + + @staticmethod + def StdoutReaderThread(process_pipe, queue, inlines): + """The poller thread fn, which moves the addr2line stdout to the |queue|. + + This is the only piece of code not running on the main thread. It merely + writes to a Queue, which is thread-safe. In the case of inlines, it + detects the ??,??:0 marker and sends the lines atomically, such that the + main thread always receives all the lines corresponding to one symbol in + one shot.""" + try: + lines_for_one_symbol = [] + while True: + line1 = process_pipe.readline().rstrip('\r\n') + line2 = process_pipe.readline().rstrip('\r\n') + if not line1 or not line2: + break + inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or + (line1 != '??' and line2 != '??:0')) + if not inlines or inline_has_more_lines: + lines_for_one_symbol += [(line1, line2)] + if inline_has_more_lines: + continue + queue.put(lines_for_one_symbol) + lines_for_one_symbol = [] + process_pipe.close() + + # Every addr2line processes will die at some point, please die silently. + except (IOError, OSError): + pass + + @property + def first_request_id(self): + """Returns the request_id of the oldest pending request in the queue.""" + return self._request_queue[0][2] if self._request_queue else 0 + + +class ELFSymbolInfo(object): + """The result of the symbolization passed as first arg. of each callback.""" + + def __init__(self, name, source_path, source_line, was_ambiguous=False, + disambiguated=False): + """All the fields here can be None (if addr2line replies with '??').""" + self.name = name + self.source_path = source_path + self.source_line = source_line + # In the case of |inlines|=True, the |inlined_by| points to the outer + # function inlining the current one (and so on, to form a chain). + self.inlined_by = None + self.disambiguated = disambiguated + self.was_ambiguous = was_ambiguous + + def __str__(self): + return '%s [%s:%d]' % ( + self.name or '??', self.source_path or '??', self.source_line or 0) diff --git a/build/android/pylib/symbols/elf_symbolizer_unittest.py b/build/android/pylib/symbols/elf_symbolizer_unittest.py new file mode 100644 index 00000000000..1d95b15ca91 --- /dev/null +++ b/build/android/pylib/symbols/elf_symbolizer_unittest.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import functools +import logging +import os +import unittest + +from pylib.symbols import elf_symbolizer +from pylib.symbols import mock_addr2line + + +_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__), + 'mock_addr2line') +_INCOMPLETE_MOCK_ADDR = 1024 * 1024 +_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024 +_INLINE_MOCK_ADDR = 3 * 1024 * 1024 + + +class ELFSymbolizerTest(unittest.TestCase): + def setUp(self): + self._callback = functools.partial( + ELFSymbolizerTest._SymbolizeCallback, self) + self._resolved_addresses = set() + # Mute warnings, we expect them due to the crash/hang tests. + logging.getLogger().setLevel(logging.ERROR) + + def testParallelism1(self): + self._RunTest(max_concurrent_jobs=1, num_symbols=100) + + def testParallelism4(self): + self._RunTest(max_concurrent_jobs=4, num_symbols=100) + + def testParallelism8(self): + self._RunTest(max_concurrent_jobs=8, num_symbols=100) + + def testCrash(self): + os.environ['MOCK_A2L_CRASH_EVERY'] = '99' + self._RunTest(max_concurrent_jobs=1, num_symbols=100) + os.environ['MOCK_A2L_CRASH_EVERY'] = '0' + + def testHang(self): + os.environ['MOCK_A2L_HANG_EVERY'] = '99' + self._RunTest(max_concurrent_jobs=1, num_symbols=100) + os.environ['MOCK_A2L_HANG_EVERY'] = '0' + + def testInlines(self): + """Stimulate the inline processing logic.""" + symbolizer = elf_symbolizer.ELFSymbolizer( + elf_file_path='/path/doesnt/matter/mock_lib1.so', + addr2line_path=_MOCK_A2L_PATH, + callback=self._callback, + inlines=True, + max_concurrent_jobs=4) + + for addr in xrange(1000): + exp_inline = False + exp_unknown = False + + # First 100 addresses with inlines. + if addr < 100: + addr += _INLINE_MOCK_ADDR + exp_inline = True + + # Followed by 100 without inlines. + elif addr < 200: + pass + + # Followed by 100 interleaved inlines and not inlines. + elif addr < 300: + if addr & 1: + addr += _INLINE_MOCK_ADDR + exp_inline = True + + # Followed by 100 interleaved inlines and unknonwn. + elif addr < 400: + if addr & 1: + addr += _INLINE_MOCK_ADDR + exp_inline = True + else: + addr += _UNKNOWN_MOCK_ADDR + exp_unknown = True + + exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None + exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None + exp_source_line = addr if not exp_unknown else None + cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline) + symbolizer.SymbolizeAsync(addr, cb_arg) + + symbolizer.Join() + + def testIncompleteSyminfo(self): + """Stimulate the symbol-not-resolved logic.""" + symbolizer = elf_symbolizer.ELFSymbolizer( + elf_file_path='/path/doesnt/matter/mock_lib1.so', + addr2line_path=_MOCK_A2L_PATH, + callback=self._callback, + max_concurrent_jobs=1) + + # Test symbols with valid name but incomplete path. + addr = _INCOMPLETE_MOCK_ADDR + exp_name = 'mock_sym_for_addr_%d' % addr + exp_source_path = None + exp_source_line = None + cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) + symbolizer.SymbolizeAsync(addr, cb_arg) + + # Test symbols with no name or sym info. + addr = _UNKNOWN_MOCK_ADDR + exp_name = None + exp_source_path = None + exp_source_line = None + cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) + symbolizer.SymbolizeAsync(addr, cb_arg) + + symbolizer.Join() + + def _RunTest(self, max_concurrent_jobs, num_symbols): + symbolizer = elf_symbolizer.ELFSymbolizer( + elf_file_path='/path/doesnt/matter/mock_lib1.so', + addr2line_path=_MOCK_A2L_PATH, + callback=self._callback, + max_concurrent_jobs=max_concurrent_jobs, + addr2line_timeout=0.5) + + for addr in xrange(num_symbols): + exp_name = 'mock_sym_for_addr_%d' % addr + exp_source_path = 'mock_src/mock_lib1.so.c' + exp_source_line = addr + cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False) + symbolizer.SymbolizeAsync(addr, cb_arg) + + symbolizer.Join() + + # Check that all the expected callbacks have been received. + for addr in xrange(num_symbols): + self.assertIn(addr, self._resolved_addresses) + self._resolved_addresses.remove(addr) + + # Check for unexpected callbacks. + self.assertEqual(len(self._resolved_addresses), 0) + + def _SymbolizeCallback(self, sym_info, cb_arg): + self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo)) + self.assertTrue(isinstance(cb_arg, tuple)) + self.assertEqual(len(cb_arg), 5) + + # Unpack expectations from the callback extra argument. + (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg + if exp_name is None: + self.assertIsNone(sym_info.name) + else: + self.assertTrue(sym_info.name.startswith(exp_name)) + self.assertEqual(sym_info.source_path, exp_source_path) + self.assertEqual(sym_info.source_line, exp_source_line) + + if exp_inlines: + self.assertEqual(sym_info.name, exp_name + '_inner') + self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle') + self.assertEqual(sym_info.inlined_by.inlined_by.name, + exp_name + '_outer') + + # Check against duplicate callbacks. + self.assertNotIn(addr, self._resolved_addresses) + self._resolved_addresses.add(addr) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/pylib/symbols/mock_addr2line/__init__.py b/build/android/pylib/symbols/mock_addr2line/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line new file mode 100644 index 00000000000..cd58f56d576 --- /dev/null +++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Simple mock for addr2line. + +Outputs mock symbol information, with each symbol being a function of the +original address (so it is easy to double-check consistency in unittests). +""" + +import optparse +import os +import posixpath +import sys +import time + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('-e', '--exe', dest='exe') # Path of the debug-library.so. + # Silently swallow the other unnecessary arguments. + parser.add_option('-C', '--demangle', action='store_true') + parser.add_option('-f', '--functions', action='store_true') + parser.add_option('-i', '--inlines', action='store_true') + options, _ = parser.parse_args(argv[1:]) + lib_file_name = posixpath.basename(options.exe) + processed_sym_count = 0 + crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0)) + hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0)) + + while(True): + line = sys.stdin.readline().rstrip('\r') + if not line: + break + + # An empty line should generate '??,??:0' (is used as marker for inlines). + if line == '\n': + print '??' + print '??:0' + sys.stdout.flush() + continue + + addr = int(line, 16) + processed_sym_count += 1 + if crash_every and processed_sym_count % crash_every == 0: + sys.exit(1) + if hang_every and processed_sym_count % hang_every == 0: + time.sleep(1) + + # Addresses < 1M will return good mock symbol information. + if addr < 1024 * 1024: + print 'mock_sym_for_addr_%d' % addr + print 'mock_src/%s.c:%d' % (lib_file_name, addr) + + # Addresses 1M <= x < 2M will return symbols with a name but a missing path. + elif addr < 2 * 1024 * 1024: + print 'mock_sym_for_addr_%d' % addr + print '??:0' + + # Addresses 2M <= x < 3M will return unknown symbol information. + elif addr < 3 * 1024 * 1024: + print '??' + print '??' + + # Addresses 3M <= x < 4M will return inlines. + elif addr < 4 * 1024 * 1024: + print 'mock_sym_for_addr_%d_inner' % addr + print 'mock_src/%s.c:%d' % (lib_file_name, addr) + print 'mock_sym_for_addr_%d_middle' % addr + print 'mock_src/%s.c:%d' % (lib_file_name, addr) + print 'mock_sym_for_addr_%d_outer' % addr + print 'mock_src/%s.c:%d' % (lib_file_name, addr) + + sys.stdout.flush() + + +if __name__ == '__main__': + main(sys.argv) \ No newline at end of file diff --git a/build/android/pylib/uirobot/__init__.py b/build/android/pylib/uirobot/__init__.py new file mode 100644 index 00000000000..5cac026153c --- /dev/null +++ b/build/android/pylib/uirobot/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/pylib/uirobot/uirobot_test_instance.py b/build/android/pylib/uirobot/uirobot_test_instance.py new file mode 100644 index 00000000000..1891ab7782b --- /dev/null +++ b/build/android/pylib/uirobot/uirobot_test_instance.py @@ -0,0 +1,77 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging + +from devil.android import apk_helper +from pylib.base import test_instance + +class UirobotTestInstance(test_instance.TestInstance): + + def __init__(self, args, error_func): + """Constructor. + + Args: + args: Command line arguments. + """ + super(UirobotTestInstance, self).__init__() + if not args.app_under_test: + error_func('Must set --app-under-test.') + self._app_under_test = args.app_under_test + self._minutes = args.minutes + + if args.remote_device_file: + with open(args.remote_device_file) as remote_device_file: + device_json = json.load(remote_device_file) + else: + device_json = {} + device_type = device_json.get('device_type', 'Android') + if args.device_type: + if device_type and device_type != args.device_type: + logging.info('Overriding device_type from %s to %s', + device_type, args.device_type) + device_type = args.device_type + + if device_type == 'Android': + self._suite = 'Android Uirobot' + self._package_name = apk_helper.GetPackageName(self._app_under_test) + elif device_type == 'iOS': + self._suite = 'iOS Uirobot' + self._package_name = self._app_under_test + + + #override + def TestType(self): + """Returns type of test.""" + return 'uirobot' + + #override + def SetUp(self): + """Setup for test.""" + pass + + #override + def TearDown(self): + """Teardown for test.""" + pass + + @property + def app_under_test(self): + """Returns the app to run the test on.""" + return self._app_under_test + + @property + def minutes(self): + """Returns the number of minutes to run the uirobot for.""" + return self._minutes + + @property + def package_name(self): + """Returns the name of the package in the APK.""" + return self._package_name + + @property + def suite(self): + return self._suite diff --git a/build/android/pylib/utils/__init__.py b/build/android/pylib/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/android/pylib/utils/argparse_utils.py b/build/android/pylib/utils/argparse_utils.py new file mode 100644 index 00000000000..e456d9ddab0 --- /dev/null +++ b/build/android/pylib/utils/argparse_utils.py @@ -0,0 +1,50 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse + + +class CustomHelpAction(argparse.Action): + '''Allows defining custom help actions. + + Help actions can run even when the parser would otherwise fail on missing + arguments. The first help or custom help command mentioned on the command + line will have its help text displayed. + + Usage: + parser = argparse.ArgumentParser(...) + CustomHelpAction.EnableFor(parser) + parser.add_argument('--foo-help', + action='custom_help', + custom_help_text='this is the help message', + help='What this helps with') + ''' + # Derived from argparse._HelpAction from + # https://github.com/python/cpython/blob/master/Lib/argparse.py + + # pylint: disable=redefined-builtin + # (complains about 'help' being redefined) + def __init__(self, + option_strings, + dest=argparse.SUPPRESS, + default=argparse.SUPPRESS, + custom_help_text=None, + help=None): + super(CustomHelpAction, self).__init__(option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + + if not custom_help_text: + raise ValueError('custom_help_text is required') + self._help_text = custom_help_text + + def __call__(self, parser, namespace, values, option_string=None): + print self._help_text + parser.exit() + + @staticmethod + def EnableFor(parser): + parser.register('action', 'custom_help', CustomHelpAction) diff --git a/build/android/pylib/utils/command_option_parser.py b/build/android/pylib/utils/command_option_parser.py new file mode 100644 index 00000000000..cf501d09df5 --- /dev/null +++ b/build/android/pylib/utils/command_option_parser.py @@ -0,0 +1,75 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""An option parser which handles the first arg as a command. + +Add other nice functionality such as printing a list of commands +and an example in usage. +""" + +import optparse +import sys + + +class CommandOptionParser(optparse.OptionParser): + """Wrapper class for OptionParser to help with listing commands.""" + + def __init__(self, *args, **kwargs): + """Creates a CommandOptionParser. + + Args: + commands_dict: A dictionary mapping command strings to an object defining + - add_options_func: Adds options to the option parser + - run_command_func: Runs the command itself. + example: An example command. + everything else: Passed to optparse.OptionParser contructor. + """ + self.commands_dict = kwargs.pop('commands_dict', {}) + self.example = kwargs.pop('example', '') + if not 'usage' in kwargs: + kwargs['usage'] = 'Usage: %prog [options]' + optparse.OptionParser.__init__(self, *args, **kwargs) + + #override + def get_usage(self): + normal_usage = optparse.OptionParser.get_usage(self) + command_list = self.get_command_list() + example = self.get_example() + return self.expand_prog_name(normal_usage + example + command_list) + + #override + def get_command_list(self): + if self.commands_dict.keys(): + return '\nCommands:\n %s\n' % '\n '.join( + sorted(self.commands_dict.keys())) + return '' + + def get_example(self): + if self.example: + return '\nExample:\n %s\n' % self.example + return '' + + +def ParseAndExecute(option_parser, argv=None): + """Parses options/args from argv and runs the specified command. + + Args: + option_parser: A CommandOptionParser object. + argv: Command line arguments. If None, automatically draw from sys.argv. + + Returns: + An exit code. + """ + if not argv: + argv = sys.argv + + if len(argv) < 2 or argv[1] not in option_parser.commands_dict: + # Parse args first, if this is '--help', optparse will print help and exit + option_parser.parse_args(argv) + option_parser.error('Invalid command.') + + cmd = option_parser.commands_dict[argv[1]] + cmd.add_options_func(option_parser) + options, args = option_parser.parse_args(argv) + return cmd.run_command_func(argv[1], options, args, option_parser) diff --git a/build/android/pylib/utils/emulator.py b/build/android/pylib/utils/emulator.py new file mode 100644 index 00000000000..e2a5fea35df --- /dev/null +++ b/build/android/pylib/utils/emulator.py @@ -0,0 +1,520 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides an interface to start and stop Android emulator. + + Emulator: The class provides the methods to launch/shutdown the emulator with + the android virtual device named 'avd_armeabi' . +""" + +import logging +import os +import signal +import subprocess +import time + +from devil.android import device_errors +from devil.android import device_utils +from devil.android.sdk import adb_wrapper +from devil.utils import cmd_helper +from pylib import constants +from pylib import pexpect +from pylib.utils import time_profile + +# Default sdcard size in the format of [amount][unit] +DEFAULT_SDCARD_SIZE = '512M' +# Default internal storage (MB) of emulator image +DEFAULT_STORAGE_SIZE = '1024M' + +# Each emulator has 60 secs of wait time for launching +_BOOT_WAIT_INTERVALS = 6 +_BOOT_WAIT_INTERVAL_TIME = 10 + +# Path for avd files and avd dir +_BASE_AVD_DIR = os.path.expanduser(os.path.join('~', '.android', 'avd')) +_TOOLS_ANDROID_PATH = os.path.join(constants.ANDROID_SDK_ROOT, + 'tools', 'android') + +# Template used to generate config.ini files for the emulator +CONFIG_TEMPLATE = """avd.ini.encoding=ISO-8859-1 +hw.dPad=no +hw.lcd.density=320 +sdcard.size={sdcard.size} +hw.cpu.arch={hw.cpu.arch} +hw.device.hash=-708107041 +hw.camera.back=none +disk.dataPartition.size=800M +hw.gpu.enabled={gpu} +skin.path=720x1280 +skin.dynamic=yes +hw.keyboard=yes +hw.ramSize=1024 +hw.device.manufacturer=Google +hw.sdCard=yes +hw.mainKeys=no +hw.accelerometer=yes +skin.name=720x1280 +abi.type={abi.type} +hw.trackBall=no +hw.device.name=Galaxy Nexus +hw.battery=yes +hw.sensors.proximity=yes +image.sysdir.1=system-images/android-{api.level}/default/{abi.type}/ +hw.sensors.orientation=yes +hw.audioInput=yes +hw.camera.front=none +hw.gps=yes +vm.heapSize=128 +{extras}""" + +CONFIG_REPLACEMENTS = { + 'x86': { + '{hw.cpu.arch}': 'x86', + '{abi.type}': 'x86', + '{extras}': '' + }, + 'arm': { + '{hw.cpu.arch}': 'arm', + '{abi.type}': 'armeabi-v7a', + '{extras}': 'hw.cpu.model=cortex-a8\n' + }, + 'mips': { + '{hw.cpu.arch}': 'mips', + '{abi.type}': 'mips', + '{extras}': '' + } +} + +class EmulatorLaunchException(Exception): + """Emulator failed to launch.""" + pass + +def WaitForEmulatorLaunch(num): + """Wait for emulators to finish booting + + Emulators on bots are launch with a separate background process, to avoid + running tests before the emulators are fully booted, this function waits for + a number of emulators to finish booting + + Arg: + num: the amount of emulators to wait. + """ + for _ in range(num*_BOOT_WAIT_INTERVALS): + emulators = [device_utils.DeviceUtils(a) + for a in adb_wrapper.AdbWrapper.Devices() + if a.is_emulator] + if len(emulators) >= num: + logging.info('All %d emulators launched', num) + return + logging.info( + 'Waiting for %d emulators, %d of them already launched', num, + len(emulators)) + time.sleep(_BOOT_WAIT_INTERVAL_TIME) + raise Exception("Expected %d emulators, %d launched within time limit" % + (num, len(emulators))) + +def KillAllEmulators(): + """Kill all running emulators that look like ones we started. + + There are odd 'sticky' cases where there can be no emulator process + running but a device slot is taken. A little bot trouble and we're out of + room forever. + """ + logging.info('Killing all existing emulators and existing the program') + emulators = [device_utils.DeviceUtils(a) + for a in adb_wrapper.AdbWrapper.Devices() + if a.is_emulator] + if not emulators: + return + for e in emulators: + e.adb.Emu(['kill']) + logging.info('Emulator killing is async; give a few seconds for all to die.') + for _ in range(10): + if not any(a.is_emulator for a in adb_wrapper.AdbWrapper.Devices()): + return + time.sleep(1) + + +def DeleteAllTempAVDs(): + """Delete all temporary AVDs which are created for tests. + + If the test exits abnormally and some temporary AVDs created when testing may + be left in the system. Clean these AVDs. + """ + logging.info('Deleting all the avd files') + avds = device_utils.GetAVDs() + if not avds: + return + for avd_name in avds: + if 'run_tests_avd' in avd_name: + cmd = [_TOOLS_ANDROID_PATH, '-s', 'delete', 'avd', '--name', avd_name] + cmd_helper.RunCmd(cmd) + logging.info('Delete AVD %s', avd_name) + + +class PortPool(object): + """Pool for emulator port starting position that changes over time.""" + _port_min = 5554 + _port_max = 5585 + _port_current_index = 0 + + @classmethod + def port_range(cls): + """Return a range of valid ports for emulator use. + + The port must be an even number between 5554 and 5584. Sometimes + a killed emulator "hangs on" to a port long enough to prevent + relaunch. This is especially true on slow machines (like a bot). + Cycling through a port start position helps make us resilient.""" + ports = range(cls._port_min, cls._port_max, 2) + n = cls._port_current_index + cls._port_current_index = (n + 1) % len(ports) + return ports[n:] + ports[:n] + + +def _GetAvailablePort(): + """Returns an available TCP port for the console.""" + used_ports = [] + emulators = [device_utils.DeviceUtils(a) + for a in adb_wrapper.AdbWrapper.Devices() + if a.is_emulator] + for emulator in emulators: + used_ports.append(emulator.adb.GetDeviceSerial().split('-')[1]) + for port in PortPool.port_range(): + if str(port) not in used_ports: + return port + + +def LaunchTempEmulators(emulator_count, abi, api_level, enable_kvm=False, + kill_and_launch=True, sdcard_size=DEFAULT_SDCARD_SIZE, + storage_size=DEFAULT_STORAGE_SIZE, wait_for_boot=True, + headless=False): + """Create and launch temporary emulators and wait for them to boot. + + Args: + emulator_count: number of emulators to launch. + abi: the emulator target platform + api_level: the api level (e.g., 19 for Android v4.4 - KitKat release) + wait_for_boot: whether or not to wait for emulators to boot up + headless: running emulator with no ui + + Returns: + List of emulators. + """ + emulators = [] + for n in xrange(emulator_count): + t = time_profile.TimeProfile('Emulator launch %d' % n) + # Creates a temporary AVD. + avd_name = 'run_tests_avd_%d' % n + logging.info('Emulator launch %d with avd_name=%s and api=%d', + n, avd_name, api_level) + emulator = Emulator(avd_name, abi, enable_kvm=enable_kvm, + sdcard_size=sdcard_size, storage_size=storage_size, + headless=headless) + emulator.CreateAVD(api_level) + emulator.Launch(kill_all_emulators=(n == 0 and kill_and_launch)) + t.Stop() + emulators.append(emulator) + # Wait for all emulators to boot completed. + if wait_for_boot: + for emulator in emulators: + emulator.ConfirmLaunch(True) + logging.info('All emulators are fully booted') + return emulators + + +def LaunchEmulator(avd_name, abi, kill_and_launch=True, enable_kvm=False, + sdcard_size=DEFAULT_SDCARD_SIZE, + storage_size=DEFAULT_STORAGE_SIZE, headless=False): + """Launch an existing emulator with name avd_name. + + Args: + avd_name: name of existing emulator + abi: the emulator target platform + headless: running emulator with no ui + + Returns: + emulator object. + """ + logging.info('Specified emulator named avd_name=%s launched', avd_name) + emulator = Emulator(avd_name, abi, enable_kvm=enable_kvm, + sdcard_size=sdcard_size, storage_size=storage_size, + headless=headless) + emulator.Launch(kill_all_emulators=kill_and_launch) + emulator.ConfirmLaunch(True) + return emulator + + +class Emulator(object): + """Provides the methods to launch/shutdown the emulator. + + The emulator has the android virtual device named 'avd_armeabi'. + + The emulator could use any even TCP port between 5554 and 5584 for the + console communication, and this port will be part of the device name like + 'emulator-5554'. Assume it is always True, as the device name is the id of + emulator managed in this class. + + Attributes: + emulator: Path of Android's emulator tool. + popen: Popen object of the running emulator process. + device: Device name of this emulator. + """ + + # Signals we listen for to kill the emulator on + _SIGNALS = (signal.SIGINT, signal.SIGHUP) + + # Time to wait for an emulator launch, in seconds. This includes + # the time to launch the emulator and a wait-for-device command. + _LAUNCH_TIMEOUT = 120 + + # Timeout interval of wait-for-device command before bouncing to a a + # process life check. + _WAITFORDEVICE_TIMEOUT = 5 + + # Time to wait for a 'wait for boot complete' (property set on device). + _WAITFORBOOT_TIMEOUT = 300 + + def __init__(self, avd_name, abi, enable_kvm=False, + sdcard_size=DEFAULT_SDCARD_SIZE, + storage_size=DEFAULT_STORAGE_SIZE, headless=False): + """Init an Emulator. + + Args: + avd_name: name of the AVD to create + abi: target platform for emulator being created, defaults to x86 + """ + android_sdk_root = constants.ANDROID_SDK_ROOT + self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator') + self.android = _TOOLS_ANDROID_PATH + self.popen = None + self.device_serial = None + self.abi = abi + self.avd_name = avd_name + self.sdcard_size = sdcard_size + self.storage_size = storage_size + self.enable_kvm = enable_kvm + self.headless = headless + + @staticmethod + def _DeviceName(): + """Return our device name.""" + port = _GetAvailablePort() + return ('emulator-%d' % port, port) + + def CreateAVD(self, api_level): + """Creates an AVD with the given name. + + Args: + api_level: the api level of the image + + Return avd_name. + """ + + if self.abi == 'arm': + abi_option = 'armeabi-v7a' + elif self.abi == 'mips': + abi_option = 'mips' + else: + abi_option = 'x86' + + api_target = 'android-%s' % api_level + + avd_command = [ + self.android, + '--silent', + 'create', 'avd', + '--name', self.avd_name, + '--abi', abi_option, + '--target', api_target, + '--sdcard', self.sdcard_size, + '--force', + ] + avd_cmd_str = ' '.join(avd_command) + logging.info('Create AVD command: %s', avd_cmd_str) + avd_process = pexpect.spawn(avd_cmd_str) + + # Instead of creating a custom profile, we overwrite config files. + avd_process.expect('Do you wish to create a custom hardware profile') + avd_process.sendline('no\n') + avd_process.expect('Created AVD \'%s\'' % self.avd_name) + + # Replace current configuration with default Galaxy Nexus config. + ini_file = os.path.join(_BASE_AVD_DIR, '%s.ini' % self.avd_name) + new_config_ini = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, + 'config.ini') + + # Remove config files with defaults to replace with Google's GN settings. + os.unlink(ini_file) + os.unlink(new_config_ini) + + # Create new configuration files with Galaxy Nexus by Google settings. + with open(ini_file, 'w') as new_ini: + new_ini.write('avd.ini.encoding=ISO-8859-1\n') + new_ini.write('target=%s\n' % api_target) + new_ini.write('path=%s/%s.avd\n' % (_BASE_AVD_DIR, self.avd_name)) + new_ini.write('path.rel=avd/%s.avd\n' % self.avd_name) + + custom_config = CONFIG_TEMPLATE + replacements = CONFIG_REPLACEMENTS[self.abi] + for key in replacements: + custom_config = custom_config.replace(key, replacements[key]) + custom_config = custom_config.replace('{api.level}', str(api_level)) + custom_config = custom_config.replace('{sdcard.size}', self.sdcard_size) + custom_config.replace('{gpu}', 'no' if self.headless else 'yes') + + with open(new_config_ini, 'w') as new_config_ini: + new_config_ini.write(custom_config) + + return self.avd_name + + + def _DeleteAVD(self): + """Delete the AVD of this emulator.""" + avd_command = [ + self.android, + '--silent', + 'delete', + 'avd', + '--name', self.avd_name, + ] + logging.info('Delete AVD command: %s', ' '.join(avd_command)) + cmd_helper.RunCmd(avd_command) + + def ResizeAndWipeAvd(self, storage_size): + """Wipes old AVD and creates new AVD of size |storage_size|. + + This serves as a work around for '-partition-size' and '-wipe-data' + """ + userdata_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, + 'userdata.img') + userdata_qemu_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name, + 'userdata-qemu.img') + resize_cmd = ['resize2fs', userdata_img, '%s' % storage_size] + logging.info('Resizing userdata.img to ideal size') + cmd_helper.RunCmd(resize_cmd) + wipe_cmd = ['cp', userdata_img, userdata_qemu_img] + logging.info('Replacing userdata-qemu.img with the new userdata.img') + cmd_helper.RunCmd(wipe_cmd) + + def Launch(self, kill_all_emulators): + """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the + emulator is ready for use. + + If fails, an exception will be raised. + """ + if kill_all_emulators: + KillAllEmulators() # just to be sure + self._AggressiveImageCleanup() + (self.device_serial, port) = self._DeviceName() + self.ResizeAndWipeAvd(storage_size=self.storage_size) + emulator_command = [ + self.emulator, + # Speed up emulator launch by 40%. Really. + '-no-boot-anim', + ] + if self.headless: + emulator_command.extend([ + '-no-skin', + '-no-audio', + '-no-window' + ]) + else: + emulator_command.extend([ + '-gpu', 'on' + ]) + emulator_command.extend([ + # Use a familiar name and port. + '-avd', self.avd_name, + '-port', str(port), + # all the argument after qemu are sub arguments for qemu + '-qemu', '-m', '1024', + ]) + if self.abi == 'x86' and self.enable_kvm: + emulator_command.extend([ + # For x86 emulator --enable-kvm will fail early, avoiding accidental + # runs in a slow mode (i.e. without hardware virtualization support). + '--enable-kvm', + ]) + + logging.info('Emulator launch command: %s', ' '.join(emulator_command)) + self.popen = subprocess.Popen(args=emulator_command, + stderr=subprocess.STDOUT) + self._InstallKillHandler() + + @staticmethod + def _AggressiveImageCleanup(): + """Aggressive cleanup of emulator images. + + Experimentally it looks like our current emulator use on the bot + leaves image files around in /tmp/android-$USER. If a "random" + name gets reused, we choke with a 'File exists' error. + TODO(jrg): is there a less hacky way to accomplish the same goal? + """ + logging.info('Aggressive Image Cleanup') + emulator_imagedir = '/tmp/android-%s' % os.environ['USER'] + if not os.path.exists(emulator_imagedir): + return + for image in os.listdir(emulator_imagedir): + full_name = os.path.join(emulator_imagedir, image) + if 'emulator' in full_name: + logging.info('Deleting emulator image %s', full_name) + os.unlink(full_name) + + def ConfirmLaunch(self, wait_for_boot=False): + """Confirm the emulator launched properly. + + Loop on a wait-for-device with a very small timeout. On each + timeout, check the emulator process is still alive. + After confirming a wait-for-device can be successful, make sure + it returns the right answer. + """ + seconds_waited = 0 + number_of_waits = 2 # Make sure we can wfd twice + + device = device_utils.DeviceUtils(self.device_serial) + while seconds_waited < self._LAUNCH_TIMEOUT: + try: + device.adb.WaitForDevice( + timeout=self._WAITFORDEVICE_TIMEOUT, retries=1) + number_of_waits -= 1 + if not number_of_waits: + break + except device_errors.CommandTimeoutError: + seconds_waited += self._WAITFORDEVICE_TIMEOUT + device.adb.KillServer() + self.popen.poll() + if self.popen.returncode != None: + raise EmulatorLaunchException('EMULATOR DIED') + + if seconds_waited >= self._LAUNCH_TIMEOUT: + raise EmulatorLaunchException('TIMEOUT with wait-for-device') + + logging.info('Seconds waited on wait-for-device: %d', seconds_waited) + if wait_for_boot: + # Now that we checked for obvious problems, wait for a boot complete. + # Waiting for the package manager is sometimes problematic. + device.WaitUntilFullyBooted(timeout=self._WAITFORBOOT_TIMEOUT) + logging.info('%s is now fully booted', self.avd_name) + + def Shutdown(self): + """Shuts down the process started by launch.""" + self._DeleteAVD() + if self.popen: + self.popen.poll() + if self.popen.returncode == None: + self.popen.kill() + self.popen = None + + def _ShutdownOnSignal(self, _signum, _frame): + logging.critical('emulator _ShutdownOnSignal') + for sig in self._SIGNALS: + signal.signal(sig, signal.SIG_DFL) + self.Shutdown() + raise KeyboardInterrupt # print a stack + + def _InstallKillHandler(self): + """Install a handler to kill the emulator when we exit unexpectedly.""" + for sig in self._SIGNALS: + signal.signal(sig, self._ShutdownOnSignal) diff --git a/build/android/pylib/utils/findbugs.py b/build/android/pylib/utils/findbugs.py new file mode 100644 index 00000000000..04568938ee3 --- /dev/null +++ b/build/android/pylib/utils/findbugs.py @@ -0,0 +1,155 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import xml.dom.minidom + +from devil.utils import cmd_helper +from pylib import constants +from pylib.constants import host_paths + + +_FINDBUGS_HOME = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', + 'findbugs') +_FINDBUGS_JAR = os.path.join(_FINDBUGS_HOME, 'lib', 'findbugs.jar') +_FINDBUGS_MAX_HEAP = 768 +_FINDBUGS_PLUGIN_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'tools', 'android', 'findbugs_plugin', 'lib', + 'chromiumPlugin.jar') + + +def _ParseXmlResults(results_doc): + warnings = set() + for en in (n for n in results_doc.documentElement.childNodes + if n.nodeType == xml.dom.Node.ELEMENT_NODE): + if en.tagName == 'BugInstance': + warnings.add(_ParseBugInstance(en)) + return warnings + + +def _GetMessage(node): + for c in (n for n in node.childNodes + if n.nodeType == xml.dom.Node.ELEMENT_NODE): + if c.tagName == 'Message': + if (len(c.childNodes) == 1 + and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE): + return c.childNodes[0].data + return None + + +def _ParseBugInstance(node): + bug = FindBugsWarning(node.getAttribute('type')) + msg_parts = [] + for c in (n for n in node.childNodes + if n.nodeType == xml.dom.Node.ELEMENT_NODE): + if c.tagName == 'Class': + msg_parts.append(_GetMessage(c)) + elif c.tagName == 'Method': + msg_parts.append(_GetMessage(c)) + elif c.tagName == 'Field': + msg_parts.append(_GetMessage(c)) + elif c.tagName == 'SourceLine': + bug.file_name = c.getAttribute('sourcefile') + if c.hasAttribute('start'): + bug.start_line = int(c.getAttribute('start')) + if c.hasAttribute('end'): + bug.end_line = int(c.getAttribute('end')) + msg_parts.append(_GetMessage(c)) + elif (c.tagName == 'ShortMessage' and len(c.childNodes) == 1 + and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE): + msg_parts.append(c.childNodes[0].data) + bug.message = tuple(m for m in msg_parts if m) + return bug + + +class FindBugsWarning(object): + + def __init__(self, bug_type='', end_line=0, file_name='', message=None, + start_line=0): + self.bug_type = bug_type + self.end_line = end_line + self.file_name = file_name + if message is None: + self.message = tuple() + else: + self.message = message + self.start_line = start_line + + def __cmp__(self, other): + return (cmp(self.file_name, other.file_name) + or cmp(self.start_line, other.start_line) + or cmp(self.end_line, other.end_line) + or cmp(self.bug_type, other.bug_type) + or cmp(self.message, other.message)) + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + def __hash__(self): + return hash((self.bug_type, self.end_line, self.file_name, self.message, + self.start_line)) + + def __ne__(self, other): + return not self == other + + def __str__(self): + return '%s: %s' % (self.bug_type, '\n '.join(self.message)) + + +def Run(exclude, classes_to_analyze, auxiliary_classes, output_file, + findbug_args, jars): + """Run FindBugs. + + Args: + exclude: the exclude xml file, refer to FindBugs's -exclude command option. + classes_to_analyze: the list of classes need to analyze, refer to FindBug's + -onlyAnalyze command line option. + auxiliary_classes: the classes help to analyze, refer to FindBug's + -auxclasspath command line option. + output_file: An optional path to dump XML results to. + findbug_args: A list of addtional command line options to pass to Findbugs. + """ + # TODO(jbudorick): Get this from the build system. + system_classes = [ + os.path.join(constants.ANDROID_SDK_ROOT, 'platforms', + 'android-%s' % constants.ANDROID_SDK_VERSION, 'android.jar') + ] + system_classes.extend(os.path.abspath(classes) + for classes in auxiliary_classes or []) + + cmd = ['java', + '-classpath', '%s:' % _FINDBUGS_JAR, + '-Xmx%dm' % _FINDBUGS_MAX_HEAP, + '-Dfindbugs.home="%s"' % _FINDBUGS_HOME, + '-jar', _FINDBUGS_JAR, + '-textui', '-sortByClass', + '-pluginList', _FINDBUGS_PLUGIN_PATH, '-xml:withMessages'] + if system_classes: + cmd.extend(['-auxclasspath', ':'.join(system_classes)]) + if classes_to_analyze: + cmd.extend(['-onlyAnalyze', classes_to_analyze]) + if exclude: + cmd.extend(['-exclude', os.path.abspath(exclude)]) + if output_file: + cmd.extend(['-output', output_file]) + if findbug_args: + cmd.extend(findbug_args) + cmd.extend(os.path.abspath(j) for j in jars or []) + + if output_file: + _, _, stderr = cmd_helper.GetCmdStatusOutputAndError(cmd) + + results_doc = xml.dom.minidom.parse(output_file) + else: + _, raw_out, stderr = cmd_helper.GetCmdStatusOutputAndError(cmd) + results_doc = xml.dom.minidom.parseString(raw_out) + + for line in stderr.splitlines(): + logging.debug(' %s', line) + + current_warnings_set = _ParseXmlResults(results_doc) + + return (' '.join(cmd), current_warnings_set) + diff --git a/build/android/pylib/utils/isolator.py b/build/android/pylib/utils/isolator.py new file mode 100644 index 00000000000..f8177e00739 --- /dev/null +++ b/build/android/pylib/utils/isolator.py @@ -0,0 +1,192 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import glob +import os +import shutil +import sys +import tempfile + +from devil.utils import cmd_helper +from pylib import constants +from pylib.constants import host_paths + + +_ISOLATE_SCRIPT = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'tools', 'swarming_client', 'isolate.py') + + +def DefaultPathVariables(): + return { + 'DEPTH': host_paths.DIR_SOURCE_ROOT, + 'PRODUCT_DIR': constants.GetOutDirectory(), + } + + +def DefaultConfigVariables(): + # Note: This list must match the --config-vars in build/isolate.gypi + return { + 'CONFIGURATION_NAME': constants.GetBuildType(), + 'OS': 'android', + 'asan': '0', + 'branding': 'Chromium', + 'chromeos': '0', + 'component': 'static_library', + 'enable_pepper_cdms': '0', + 'enable_plugins': '0', + 'fastbuild': '0', + 'icu_use_data_file_flag': '1', + 'kasko': '0', + 'lsan': '0', + 'msan': '0', + # TODO(maruel): This may not always be true. + 'target_arch': 'arm', + 'tsan': '0', + 'use_custom_libcxx': '0', + 'use_instrumented_libraries': '0', + 'use_prebuilt_instrumented_libraries': '0', + 'use_ozone': '0', + 'use_x11': '0', + 'v8_use_external_startup_data': '1', + 'msvs_version': '0', + } + + +def IsIsolateEmpty(isolate_path): + """Returns whether there are no files in the .isolate.""" + with open(isolate_path) as f: + return "'files': []" in f.read() + + +class Isolator(object): + """Manages calls to isolate.py for the android test runner scripts.""" + + def __init__(self): + self._isolate_deps_dir = tempfile.mkdtemp() + + @property + def isolate_deps_dir(self): + return self._isolate_deps_dir + + def Clear(self): + """Deletes the isolate dependency directory.""" + if os.path.exists(self._isolate_deps_dir): + shutil.rmtree(self._isolate_deps_dir) + + def Remap(self, isolate_abs_path, isolated_abs_path, + path_variables=None, config_variables=None): + """Remaps data dependencies into |self._isolate_deps_dir|. + + Args: + isolate_abs_path: The absolute path to the .isolate file, which specifies + data dependencies in the source tree. + isolated_abs_path: The absolute path to the .isolated file, which is + generated by isolate.py and specifies data dependencies in + |self._isolate_deps_dir| and their digests. + path_variables: A dict containing everything that should be passed + as a |--path-variable| to the isolate script. Defaults to the return + value of |DefaultPathVariables()|. + config_variables: A dict containing everything that should be passed + as a |--config-variable| to the isolate script. Defaults to the return + value of |DefaultConfigVariables()|. + Raises: + Exception if the isolate command fails for some reason. + """ + if not path_variables: + path_variables = DefaultPathVariables() + if not config_variables: + config_variables = DefaultConfigVariables() + + isolate_cmd = [ + sys.executable, _ISOLATE_SCRIPT, 'remap', + '--isolate', isolate_abs_path, + '--isolated', isolated_abs_path, + '--outdir', self._isolate_deps_dir, + ] + for k, v in path_variables.iteritems(): + isolate_cmd.extend(['--path-variable', k, v]) + for k, v in config_variables.iteritems(): + isolate_cmd.extend(['--config-variable', k, v]) + + exit_code, _ = cmd_helper.GetCmdStatusAndOutput(isolate_cmd) + if exit_code: + raise Exception('isolate command failed: %s' % ' '.join(isolate_cmd)) + + def VerifyHardlinks(self): + """Checks |isolate_deps_dir| for a hardlink. + + Returns: + True if a hardlink is found. + False if nothing is found. + Raises: + Exception if a non-hardlink is found. + """ + for root, _, filenames in os.walk(self._isolate_deps_dir): + if filenames: + linked_file = os.path.join(root, filenames[0]) + orig_file = os.path.join( + self._isolate_deps_dir, + os.path.relpath(linked_file, self._isolate_deps_dir)) + if os.stat(linked_file).st_ino == os.stat(orig_file).st_ino: + return True + else: + raise Exception('isolate remap command did not use hardlinks.') + return False + + def PurgeExcluded(self, deps_exclusion_list): + """Deletes anything on |deps_exclusion_list| from |self._isolate_deps_dir|. + + Args: + deps_exclusion_list: A list of globs to exclude from the isolate + dependency directory. + """ + excluded_paths = ( + x for y in deps_exclusion_list + for x in glob.glob( + os.path.abspath(os.path.join(self._isolate_deps_dir, y)))) + for p in excluded_paths: + if os.path.isdir(p): + shutil.rmtree(p) + else: + os.remove(p) + + @classmethod + def _DestructiveMerge(cls, src, dest): + if os.path.exists(dest) and os.path.isdir(dest): + for p in os.listdir(src): + cls._DestructiveMerge(os.path.join(src, p), os.path.join(dest, p)) + os.rmdir(src) + else: + shutil.move(src, dest) + + + def MoveOutputDeps(self): + """Moves files from the output directory to the top level of + |self._isolate_deps_dir|. + + Moves pak files from the output directory to to /paks + Moves files from the product directory to + """ + # On Android, all pak files need to be in the top-level 'paks' directory. + paks_dir = os.path.join(self._isolate_deps_dir, 'paks') + os.mkdir(paks_dir) + + deps_out_dir = os.path.join( + self._isolate_deps_dir, + os.path.relpath(os.path.join(constants.GetOutDirectory(), os.pardir), + host_paths.DIR_SOURCE_ROOT)) + for root, _, filenames in os.walk(deps_out_dir): + for filename in fnmatch.filter(filenames, '*.pak'): + shutil.move(os.path.join(root, filename), paks_dir) + + # Move everything in PRODUCT_DIR to top level. + deps_product_dir = os.path.join( + deps_out_dir, os.path.basename(constants.GetOutDirectory())) + if os.path.isdir(deps_product_dir): + for p in os.listdir(deps_product_dir): + Isolator._DestructiveMerge(os.path.join(deps_product_dir, p), + os.path.join(self._isolate_deps_dir, p)) + os.rmdir(deps_product_dir) + os.rmdir(deps_out_dir) diff --git a/build/android/pylib/utils/logging_utils.py b/build/android/pylib/utils/logging_utils.py new file mode 100644 index 00000000000..2c2eabf5b91 --- /dev/null +++ b/build/android/pylib/utils/logging_utils.py @@ -0,0 +1,98 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import contextlib +import logging +import os + +from pylib.constants import host_paths + +_COLORAMA_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src') + +with host_paths.SysPath(_COLORAMA_PATH): + import colorama + +class ColorStreamHandler(logging.StreamHandler): + """Handler that can be used to colorize logging output. + + Example using a specific logger: + + logger = logging.getLogger('my_logger') + logger.addHandler(ColorStreamHandler()) + logger.info('message') + + Example using the root logger: + + ColorStreamHandler.MakeDefault() + logging.info('message') + + """ + # pylint does not see members added dynamically in the constructor. + # pylint: disable=no-member + color_map = { + logging.DEBUG: colorama.Fore.CYAN, + logging.WARNING: colorama.Fore.YELLOW, + logging.ERROR: colorama.Fore.RED, + logging.CRITICAL: colorama.Back.RED + colorama.Style.BRIGHT, + } + + def __init__(self, force_color=False): + super(ColorStreamHandler, self).__init__() + self.force_color = force_color + + @property + def is_tty(self): + isatty = getattr(self.stream, 'isatty', None) + return isatty and isatty() + + #override + def format(self, record): + message = logging.StreamHandler.format(self, record) + if self.force_color or self.is_tty: + return self.Colorize(message, record.levelno) + return message + + def Colorize(self, message, log_level): + try: + return self.color_map[log_level] + message + colorama.Style.RESET_ALL + except KeyError: + return message + + @staticmethod + def MakeDefault(force_color=False): + """ + Replaces the default logging handlers with a coloring handler. To use + a colorizing handler at the same time as others, either register them + after this call, or add the ColorStreamHandler on the logger using + Logger.addHandler() + + Args: + force_color: Set to True to bypass the tty check and always colorize. + """ + # If the existing handlers aren't removed, messages are duplicated + logging.getLogger().handlers = [] + logging.getLogger().addHandler(ColorStreamHandler(force_color)) + + +@contextlib.contextmanager +def SuppressLogging(level=logging.ERROR): + """Momentarilly suppress logging events from all loggers. + + TODO(jbudorick): This is not thread safe. Log events from other threads might + also inadvertently dissapear. + + Example: + + with logging_utils.SuppressLogging(): + # all but CRITICAL logging messages are suppressed + logging.info('just doing some thing') # not shown + logging.critical('something really bad happened') # still shown + + Args: + level: logging events with this or lower levels are suppressed. + """ + logging.disable(level) + yield + logging.disable(logging.NOTSET) diff --git a/build/android/pylib/utils/proguard.py b/build/android/pylib/utils/proguard.py new file mode 100644 index 00000000000..89dc4c79b33 --- /dev/null +++ b/build/android/pylib/utils/proguard.py @@ -0,0 +1,291 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import tempfile + +from devil.utils import cmd_helper +from pylib import constants + + +_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$') +_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$') +_PROGUARD_SECTION_RE = re.compile( + r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) ' + r'\(count = \d+\):$') +_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$') +_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$') +_ELEMENT_PRIMITIVE = 0 +_ELEMENT_ARRAY = 1 +_ELEMENT_ANNOTATION = 2 +_PROGUARD_ELEMENT_RES = [ + (_ELEMENT_PRIMITIVE, + re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')), + (_ELEMENT_ARRAY, + re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')), + (_ELEMENT_ANNOTATION, + re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$')) +] +_PROGUARD_INDENT_WIDTH = 2 +_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$') + +_PROGUARD_PATH_SDK = os.path.join( + constants.PROGUARD_ROOT, 'lib', 'proguard.jar') +_PROGUARD_PATH_BUILT = ( + os.path.join(os.environ['ANDROID_BUILD_TOP'], 'external', 'proguard', + 'lib', 'proguard.jar') + if 'ANDROID_BUILD_TOP' in os.environ else None) +_PROGUARD_PATH = ( + _PROGUARD_PATH_SDK if os.path.exists(_PROGUARD_PATH_SDK) + else _PROGUARD_PATH_BUILT) + + +def Dump(jar_path): + """Dumps class and method information from a JAR into a dict via proguard. + + Args: + jar_path: An absolute path to the JAR file to dump. + Returns: + A dict in the following format: + { + 'classes': [ + { + 'class': '', + 'superclass': '', + 'annotations': {/* dict -- see below */}, + 'methods': [ + { + 'method': '', + 'annotations': {/* dict -- see below */}, + }, + ... + ], + }, + ... + ], + } + + Annotations dict format: + { + 'empty-annotation-class-name': None, + 'annotation-class-name': { + 'field': 'primitive-value', + 'field': [ 'array-item-1', 'array-item-2', ... ], + 'field': { + /* Object value */ + 'field': 'primitive-value', + 'field': [ 'array-item-1', 'array-item-2', ... ], + 'field': { /* Object value */ } + } + } + } + + Note that for top-level annotations their class names are used for + identification, whereas for any nested annotations the corresponding + field names are used. + + One drawback of this approach is that an array containing empty + annotation classes will be represented as an array of 'None' values, + thus it will not be possible to find out annotation class names. + On the other hand, storing both annotation class name and the field name + would produce a very complex JSON. + """ + + with tempfile.NamedTemporaryFile() as proguard_output: + cmd_helper.GetCmdStatusAndOutput([ + 'java', + '-jar', _PROGUARD_PATH, + '-injars', jar_path, + '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify', + '-dump', proguard_output.name]) + return Parse(proguard_output) + +class _AnnotationElement(object): + def __init__(self, name, ftype, depth): + self.ref = None + self.name = name + self.ftype = ftype + self.depth = depth + +class _ParseState(object): + _INITIAL_VALUES = (lambda: None, list, dict) + # Empty annotations are represented as 'None', not as an empty dictionary. + _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None) + + def __init__(self): + self._class_result = None + self._method_result = None + self._parse_annotations = False + self._annotation_stack = [] + + def ResetPerSection(self, section_name): + self.InitMethod(None) + self._parse_annotations = ( + section_name in ['Class file attributes', 'Methods']) + + def ParseAnnotations(self): + return self._parse_annotations + + def CreateAndInitClass(self, class_name): + self.InitMethod(None) + self._class_result = { + 'class': class_name, + 'superclass': '', + 'annotations': {}, + 'methods': [], + } + return self._class_result + + def HasCurrentClass(self): + return bool(self._class_result) + + def SetSuperClass(self, superclass): + assert self.HasCurrentClass() + self._class_result['superclass'] = superclass + + def InitMethod(self, method_name): + self._annotation_stack = [] + if method_name: + self._method_result = { + 'method': method_name, + 'annotations': {}, + } + self._class_result['methods'].append(self._method_result) + else: + self._method_result = None + + def InitAnnotation(self, annotation, depth): + if not self._annotation_stack: + # Add a fake parent element comprising 'annotations' dictionary, + # so we can work uniformly with both top-level and nested annotations. + annotations = _AnnotationElement( + '<<>>', _ELEMENT_ANNOTATION, depth - 1) + if self._method_result: + annotations.ref = self._method_result['annotations'] + else: + annotations.ref = self._class_result['annotations'] + self._annotation_stack = [annotations] + self._BacktrackAnnotationStack(depth) + if not self.HasCurrentAnnotation(): + self._annotation_stack.append( + _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth)) + self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES) + + def HasCurrentAnnotation(self): + return len(self._annotation_stack) > 1 + + def InitAnnotationField(self, field, field_type, depth): + self._BacktrackAnnotationStack(depth) + # Create the parent representation, if needed. E.g. annotations + # are represented with `None`, not with `{}` until they receive the first + # field. + self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES) + if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY: + # Nested arrays are not allowed in annotations. + assert not field_type == _ELEMENT_ARRAY + # Use array index instead of bogus field name. + field = len(self._annotation_stack[-1].ref) + self._annotation_stack.append(_AnnotationElement(field, field_type, depth)) + self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES) + + def UpdateCurrentAnnotationFieldValue(self, value, depth): + self._BacktrackAnnotationStack(depth) + self._InitOrUpdateCurrentField(value) + + def _CreateAnnotationPlaceHolder(self, constructors): + assert self.HasCurrentAnnotation() + field = self._annotation_stack[-1] + if field.ref is None: + field.ref = constructors[field.ftype]() + self._InitOrUpdateCurrentField(field.ref) + + def _BacktrackAnnotationStack(self, depth): + stack = self._annotation_stack + while len(stack) > 0 and stack[-1].depth >= depth: + stack.pop() + + def _InitOrUpdateCurrentField(self, value): + assert self.HasCurrentAnnotation() + parent = self._annotation_stack[-2] + assert not parent.ref is None + # There can be no nested constant element values. + assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION] + field = self._annotation_stack[-1] + if type(value) is str and not field.ftype == _ELEMENT_PRIMITIVE: + # The value comes from the output parser via + # UpdateCurrentAnnotationFieldValue, and should be a value of a constant + # element. If it isn't, just skip it. + return + if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref): + parent.ref.append(value) + else: + parent.ref[field.name] = value + + +def _GetDepth(prefix): + return len(prefix) // _PROGUARD_INDENT_WIDTH + +def Parse(proguard_output): + results = { + 'classes': [], + } + + state = _ParseState() + + for line in proguard_output: + line = line.strip('\r\n') + + m = _PROGUARD_CLASS_RE.match(line) + if m: + results['classes'].append( + state.CreateAndInitClass(m.group(1).replace('/', '.'))) + continue + + if not state.HasCurrentClass(): + continue + + m = _PROGUARD_SUPERCLASS_RE.match(line) + if m: + state.SetSuperClass(m.group(1).replace('/', '.')) + continue + + m = _PROGUARD_SECTION_RE.match(line) + if m: + state.ResetPerSection(m.group(1)) + continue + + m = _PROGUARD_METHOD_RE.match(line) + if m: + state.InitMethod(m.group(1)) + continue + + if not state.ParseAnnotations(): + continue + + m = _PROGUARD_ANNOTATION_RE.match(line) + if m: + # Ignore the annotation package. + state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1))) + continue + + if state.HasCurrentAnnotation(): + m = None + for (element_type, element_re) in _PROGUARD_ELEMENT_RES: + m = element_re.match(line) + if m: + state.InitAnnotationField( + m.group(2), element_type, _GetDepth(m.group(1))) + break + if m: + continue + m = _PROGUARD_ANNOTATION_VALUE_RE.match(line) + if m: + state.UpdateCurrentAnnotationFieldValue( + m.group(2), _GetDepth(m.group(1))) + else: + state.InitMethod(None) + + + return results diff --git a/build/android/pylib/utils/proguard_test.py b/build/android/pylib/utils/proguard_test.py new file mode 100644 index 00000000000..497e12d4c9c --- /dev/null +++ b/build/android/pylib/utils/proguard_test.py @@ -0,0 +1,490 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +from pylib.utils import proguard + +class TestParse(unittest.TestCase): + + def setUp(self): + self.maxDiff = None + + def testClass(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + ' Superclass: java/lang/Object']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': 'java.lang.Object', + 'annotations': {}, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testMethod(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: ()V']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': '', + 'annotations': {} + } + ] + } + ] + } + self.assertEquals(expected, actual) + + def testClassAnnotation(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Class file attributes (count = 3):', + ' - Annotation [Lorg/example/Annotation;]:', + ' - Annotation [Lorg/example/AnnotationWithValue;]:', + ' - Constant element value [attr \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:', + ' - Constant element value [attr1 \'13\']', + ' - Utf8 [val1]', + ' - Constant element value [attr2 \'13\']', + ' - Utf8 [val2]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': { + 'Annotation': None, + 'AnnotationWithValue': {'attr': 'val'}, + 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'} + }, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testClassAnnotationWithArrays(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Class file attributes (count = 3):', + ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:', + ' - Array element value [arrayAttr]:', + ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val2]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': { + 'AnnotationWithEmptyArray': {'arrayAttr': []}, + 'AnnotationWithOneElemArray': {'arrayAttr': ['val']}, + 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']} + }, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testNestedClassAnnotations(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Class file attributes (count = 1):', + ' - Annotation [Lorg/example/OuterAnnotation;]:', + ' - Constant element value [outerAttr \'13\']', + ' - Utf8 [outerVal]', + ' - Array element value [outerArr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [outerArrVal1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [outerArrVal2]', + ' - Annotation element value [emptyAnn]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Annotation element value [ann]:', + ' - Annotation [Lorg/example/InnerAnnotation;]:', + ' - Constant element value [innerAttr \'13\']', + ' - Utf8 [innerVal]', + ' - Array element value [innerArr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [innerArrVal1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [innerArrVal2]', + ' - Annotation element value [emptyInnerAnn]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': { + 'OuterAnnotation': { + 'outerAttr': 'outerVal', + 'outerArr': ['outerArrVal1', 'outerArrVal2'], + 'emptyAnn': None, + 'ann': { + 'innerAttr': 'innerVal', + 'innerArr': ['innerArrVal1', 'innerArrVal2'], + 'emptyInnerAnn': None + } + } + }, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testClassArraysOfAnnotations(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Class file attributes (count = 1):', + ' - Annotation [Lorg/example/OuterAnnotation;]:', + ' - Array element value [arrayWithEmptyAnnotations]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Array element value [outerArray]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation;]:', + ' - Constant element value [innerAttr \'115\']', + ' - Utf8 [innerVal]', + ' - Array element value [arguments]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', + ' - Constant element value [arg1Attr \'115\']', + ' - Utf8 [arg1Val]', + ' - Array element value [arg1Array]:', + ' - Constant element value [(default) \'73\']', + ' - Integer [11]', + ' - Constant element value [(default) \'73\']', + ' - Integer [12]', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', + ' - Constant element value [arg2Attr \'115\']', + ' - Utf8 [arg2Val]', + ' - Array element value [arg2Array]:', + ' - Constant element value [(default) \'73\']', + ' - Integer [21]', + ' - Constant element value [(default) \'73\']', + ' - Integer [22]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': { + 'OuterAnnotation': { + 'arrayWithEmptyAnnotations': [None, None], + 'outerArray': [ + { + 'innerAttr': 'innerVal', + 'arguments': [ + {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']}, + {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']} + ] + } + ] + } + }, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testReadFullClassFileAttributes(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Class file attributes (count = 3):', + ' - Source file attribute:', + ' - Utf8 [Class.java]', + ' - Runtime visible annotations attribute:', + ' - Annotation [Lorg/example/IntValueAnnotation;]:', + ' - Constant element value [value \'73\']', + ' - Integer [19]', + ' - Inner classes attribute (count = 1)', + ' - InnerClassesInfo:', + ' Access flags: 0x9 = public static', + ' - Class [org/example/Class1]', + ' - Class [org/example/Class2]', + ' - Utf8 [OnPageFinishedHelper]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': { + 'IntValueAnnotation': { + 'value': '19', + } + }, + 'methods': [] + } + ] + } + self.assertEquals(expected, actual) + + def testMethodAnnotation(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: Test()V', + ' - Annotation [Lorg/example/Annotation;]:', + ' - Annotation [Lorg/example/AnnotationWithValue;]:', + ' - Constant element value [attr \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:', + ' - Constant element value [attr1 \'13\']', + ' - Utf8 [val1]', + ' - Constant element value [attr2 \'13\']', + ' - Utf8 [val2]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': 'Test', + 'annotations': { + 'Annotation': None, + 'AnnotationWithValue': {'attr': 'val'}, + 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'} + }, + } + ] + } + ] + } + self.assertEquals(expected, actual) + + def testMethodAnnotationWithArrays(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: Test()V', + ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:', + ' - Array element value [arrayAttr]:', + ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val2]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': 'Test', + 'annotations': { + 'AnnotationWithEmptyArray': {'arrayAttr': []}, + 'AnnotationWithOneElemArray': {'arrayAttr': ['val']}, + 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']} + }, + } + ] + } + ] + } + self.assertEquals(expected, actual) + + def testMethodAnnotationWithPrimitivesAndArrays(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: Test()V', + ' - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:', + ' - Constant element value [attr \'13\']', + ' - Utf8 [val]', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:', + ' - Array element value [arrayAttr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val]', + ' - Constant element value [attr \'13\']', + ' - Utf8 [val]', + ' - Annotation [Lorg/example/AnnotationTwoArrays;]:', + ' - Array element value [arrayAttr1]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val1]', + ' - Array element value [arrayAttr2]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [val2]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': 'Test', + 'annotations': { + 'AnnotationPrimitiveThenArray': {'attr': 'val', + 'arrayAttr': ['val']}, + 'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'], + 'attr': 'val'}, + 'AnnotationTwoArrays': {'arrayAttr1': ['val1'], + 'arrayAttr2': ['val2']} + }, + } + ] + } + ] + } + self.assertEquals(expected, actual) + + def testNestedMethodAnnotations(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: Test()V', + ' - Annotation [Lorg/example/OuterAnnotation;]:', + ' - Constant element value [outerAttr \'13\']', + ' - Utf8 [outerVal]', + ' - Array element value [outerArr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [outerArrVal1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [outerArrVal2]', + ' - Annotation element value [emptyAnn]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Annotation element value [ann]:', + ' - Annotation [Lorg/example/InnerAnnotation;]:', + ' - Constant element value [innerAttr \'13\']', + ' - Utf8 [innerVal]', + ' - Array element value [innerArr]:', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [innerArrVal1]', + ' - Constant element value [(default) \'13\']', + ' - Utf8 [innerArrVal2]', + ' - Annotation element value [emptyInnerAnn]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': 'Test', + 'annotations': { + 'OuterAnnotation': { + 'outerAttr': 'outerVal', + 'outerArr': ['outerArrVal1', 'outerArrVal2'], + 'emptyAnn': None, + 'ann': { + 'innerAttr': 'innerVal', + 'innerArr': ['innerArrVal1', 'innerArrVal2'], + 'emptyInnerAnn': None + } + } + }, + } + ] + } + ] + } + self.assertEquals(expected, actual) + + def testMethodArraysOfAnnotations(self): + actual = proguard.Parse( + ['- Program class: org/example/Test', + 'Methods (count = 1):', + '- Method: Test()V', + ' - Annotation [Lorg/example/OuterAnnotation;]:', + ' - Array element value [arrayWithEmptyAnnotations]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/EmptyAnnotation;]:', + ' - Array element value [outerArray]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation;]:', + ' - Constant element value [innerAttr \'115\']', + ' - Utf8 [innerVal]', + ' - Array element value [arguments]:', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', + ' - Constant element value [arg1Attr \'115\']', + ' - Utf8 [arg1Val]', + ' - Array element value [arg1Array]:', + ' - Constant element value [(default) \'73\']', + ' - Integer [11]', + ' - Constant element value [(default) \'73\']', + ' - Integer [12]', + ' - Annotation element value [(default)]:', + ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:', + ' - Constant element value [arg2Attr \'115\']', + ' - Utf8 [arg2Val]', + ' - Array element value [arg2Array]:', + ' - Constant element value [(default) \'73\']', + ' - Integer [21]', + ' - Constant element value [(default) \'73\']', + ' - Integer [22]']) + expected = { + 'classes': [ + { + 'class': 'org.example.Test', + 'superclass': '', + 'annotations': {}, + 'methods': [ + { + 'method': 'Test', + 'annotations': { + 'OuterAnnotation': { + 'arrayWithEmptyAnnotations': [None, None], + 'outerArray': [ + { + 'innerAttr': 'innerVal', + 'arguments': [ + {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']}, + {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']} + ] + } + ] + } + } + } + ] + } + ] + } + self.assertEquals(expected, actual) diff --git a/build/android/pylib/utils/repo_utils.py b/build/android/pylib/utils/repo_utils.py new file mode 100644 index 00000000000..5a0efa8b6ee --- /dev/null +++ b/build/android/pylib/utils/repo_utils.py @@ -0,0 +1,16 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from devil.utils import cmd_helper + + +def GetGitHeadSHA1(in_directory): + """Returns the git hash tag for the given directory. + + Args: + in_directory: The directory where git is to be run. + """ + command_line = ['git', 'log', '-1', '--pretty=format:%H'] + output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory) + return output[0:40] diff --git a/build/android/pylib/utils/time_profile.py b/build/android/pylib/utils/time_profile.py new file mode 100644 index 00000000000..094799c4f2a --- /dev/null +++ b/build/android/pylib/utils/time_profile.py @@ -0,0 +1,45 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import time + + +class TimeProfile(object): + """Class for simple profiling of action, with logging of cost.""" + + def __init__(self, description='operation'): + self._starttime = None + self._endtime = None + self._description = description + self.Start() + + def Start(self): + self._starttime = time.time() + self._endtime = None + + def GetDelta(self): + """Returns the rounded delta. + + Also stops the timer if Stop() has not already been called. + """ + if self._endtime is None: + self.Stop(log=False) + delta = self._endtime - self._starttime + delta = round(delta, 2) if delta < 10 else round(delta, 1) + return delta + + def LogResult(self): + """Logs the result.""" + logging.info('%s seconds to perform %s', self.GetDelta(), self._description) + + def Stop(self, log=True): + """Stop profiling. + + Args: + log: Log the delta (defaults to true). + """ + self._endtime = time.time() + if log: + self.LogResult() diff --git a/build/android/pylib/utils/xvfb.py b/build/android/pylib/utils/xvfb.py new file mode 100644 index 00000000000..cb9d50e8fd9 --- /dev/null +++ b/build/android/pylib/utils/xvfb.py @@ -0,0 +1,58 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=W0702 + +import os +import signal +import subprocess +import sys +import time + + +def _IsLinux(): + """Return True if on Linux; else False.""" + return sys.platform.startswith('linux') + + +class Xvfb(object): + """Class to start and stop Xvfb if relevant. Nop if not Linux.""" + + def __init__(self): + self._pid = 0 + + def Start(self): + """Start Xvfb and set an appropriate DISPLAY environment. Linux only. + + Copied from tools/code_coverage/coverage_posix.py + """ + if not _IsLinux(): + return + proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24', + '-ac'], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self._pid = proc.pid + if not self._pid: + raise Exception('Could not start Xvfb') + os.environ['DISPLAY'] = ':9' + + # Now confirm, giving a chance for it to start if needed. + for _ in range(10): + proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True) + _, retcode = os.waitpid(proc.pid, 0) + if retcode == 0: + break + time.sleep(0.25) + if retcode != 0: + raise Exception('Could not confirm Xvfb happiness') + + def Stop(self): + """Stop Xvfb if needed. Linux only.""" + if self._pid: + try: + os.kill(self._pid, signal.SIGKILL) + except: + pass + del os.environ['DISPLAY'] + self._pid = 0 diff --git a/build/android/pylib/valgrind_tools.py b/build/android/pylib/valgrind_tools.py new file mode 100644 index 00000000000..81428939f52 --- /dev/null +++ b/build/android/pylib/valgrind_tools.py @@ -0,0 +1,235 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=R0201 + +import glob +import logging +import os.path +import subprocess +import sys + +from devil.android import device_errors +from devil.android.valgrind_tools import base_tool +from pylib.constants import DIR_SOURCE_ROOT + + +def SetChromeTimeoutScale(device, scale): + """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale.""" + path = '/data/local/tmp/chrome_timeout_scale' + if not scale or scale == 1.0: + # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0 + device.RunShellCommand('rm %s' % path) + else: + device.WriteFile(path, '%f' % scale, as_root=True) + + + +class AddressSanitizerTool(base_tool.BaseTool): + """AddressSanitizer tool.""" + + WRAPPER_NAME = '/system/bin/asanwrapper' + # Disable memcmp overlap check.There are blobs (gl drivers) + # on some android devices that use memcmp on overlapping regions, + # nothing we can do about that. + EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1' + + def __init__(self, device): + super(AddressSanitizerTool, self).__init__() + self._device = device + + @classmethod + def CopyFiles(cls, device): + """Copies ASan tools to the device.""" + libs = glob.glob(os.path.join(DIR_SOURCE_ROOT, + 'third_party/llvm-build/Release+Asserts/', + 'lib/clang/*/lib/linux/', + 'libclang_rt.asan-arm-android.so')) + assert len(libs) == 1 + subprocess.call( + [os.path.join( + DIR_SOURCE_ROOT, + 'tools/android/asan/third_party/asan_device_setup.sh'), + '--device', str(device), + '--lib', libs[0], + '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS]) + device.WaitUntilFullyBooted() + + def GetTestWrapper(self): + return AddressSanitizerTool.WRAPPER_NAME + + def GetUtilWrapper(self): + """Returns the wrapper for utilities, such as forwarder. + + AddressSanitizer wrapper must be added to all instrumented binaries, + including forwarder and the like. This can be removed if such binaries + were built without instrumentation. """ + return self.GetTestWrapper() + + def SetupEnvironment(self): + try: + self._device.EnableRoot() + except device_errors.CommandFailedError as e: + # Try to set the timeout scale anyway. + # TODO(jbudorick) Handle this exception appropriately after interface + # conversions are finished. + logging.error(str(e)) + SetChromeTimeoutScale(self._device, self.GetTimeoutScale()) + + def CleanUpEnvironment(self): + SetChromeTimeoutScale(self._device, None) + + def GetTimeoutScale(self): + # Very slow startup. + return 20.0 + + +class ValgrindTool(base_tool.BaseTool): + """Base abstract class for Valgrind tools.""" + + VG_DIR = '/data/local/tmp/valgrind' + VGLOGS_DIR = '/data/local/tmp/vglogs' + + def __init__(self, device): + super(ValgrindTool, self).__init__() + self._device = device + # exactly 31 chars, SystemProperties::PROP_NAME_MAX + self._wrap_properties = ['wrap.com.google.android.apps.ch', + 'wrap.org.chromium.native_test'] + + @classmethod + def CopyFiles(cls, device): + """Copies Valgrind tools to the device.""" + device.RunShellCommand( + 'rm -r %s; mkdir %s' % (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR)) + device.RunShellCommand( + 'rm -r %s; mkdir %s' % (ValgrindTool.VGLOGS_DIR, + ValgrindTool.VGLOGS_DIR)) + files = cls.GetFilesForTool() + device.PushChangedFiles( + [((os.path.join(DIR_SOURCE_ROOT, f), + os.path.join(ValgrindTool.VG_DIR, os.path.basename(f))) + for f in files)]) + + def SetupEnvironment(self): + """Sets up device environment.""" + self._device.RunShellCommand('chmod 777 /data/local/tmp') + self._device.RunShellCommand('setenforce 0') + for prop in self._wrap_properties: + self._device.RunShellCommand( + 'setprop %s "logwrapper %s"' % (prop, self.GetTestWrapper())) + SetChromeTimeoutScale(self._device, self.GetTimeoutScale()) + + def CleanUpEnvironment(self): + """Cleans up device environment.""" + for prop in self._wrap_properties: + self._device.RunShellCommand('setprop %s ""' % (prop,)) + SetChromeTimeoutScale(self._device, None) + + @staticmethod + def GetFilesForTool(): + """Returns a list of file names for the tool.""" + raise NotImplementedError() + + def NeedsDebugInfo(self): + """Whether this tool requires debug info. + + Returns: + True if this tool can not work with stripped binaries. + """ + return True + + +class MemcheckTool(ValgrindTool): + """Memcheck tool.""" + + def __init__(self, device): + super(MemcheckTool, self).__init__(device) + + @staticmethod + def GetFilesForTool(): + """Returns a list of file names for the tool.""" + return ['tools/valgrind/android/vg-chrome-wrapper.sh', + 'tools/valgrind/memcheck/suppressions.txt', + 'tools/valgrind/memcheck/suppressions_android.txt'] + + def GetTestWrapper(self): + """Returns a string that is to be prepended to the test command line.""" + return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh' + + def GetTimeoutScale(self): + """Returns a multiplier that should be applied to timeout values.""" + return 30 + + +class TSanTool(ValgrindTool): + """ThreadSanitizer tool. See http://code.google.com/p/data-race-test .""" + + def __init__(self, device): + super(TSanTool, self).__init__(device) + + @staticmethod + def GetFilesForTool(): + """Returns a list of file names for the tool.""" + return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh', + 'tools/valgrind/tsan/suppressions.txt', + 'tools/valgrind/tsan/suppressions_android.txt', + 'tools/valgrind/tsan/ignores.txt'] + + def GetTestWrapper(self): + """Returns a string that is to be prepended to the test command line.""" + return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh' + + def GetTimeoutScale(self): + """Returns a multiplier that should be applied to timeout values.""" + return 30.0 + + +TOOL_REGISTRY = { + 'memcheck': MemcheckTool, + 'memcheck-renderer': MemcheckTool, + 'tsan': TSanTool, + 'tsan-renderer': TSanTool, + 'asan': AddressSanitizerTool, +} + + +def CreateTool(tool_name, device): + """Creates a tool with the specified tool name. + + Args: + tool_name: Name of the tool to create. + device: A DeviceUtils instance. + Returns: + A tool for the specified tool_name. + """ + if not tool_name: + return base_tool.BaseTool() + + ctor = TOOL_REGISTRY.get(tool_name) + if ctor: + return ctor(device) + else: + print 'Unknown tool %s, available tools: %s' % ( + tool_name, ', '.join(sorted(TOOL_REGISTRY.keys()))) + sys.exit(1) + +def PushFilesForTool(tool_name, device): + """Pushes the files required for |tool_name| to |device|. + + Args: + tool_name: Name of the tool to create. + device: A DeviceUtils instance. + """ + if not tool_name: + return + + clazz = TOOL_REGISTRY.get(tool_name) + if clazz: + clazz.CopyFiles(device) + else: + print 'Unknown tool %s, available tools: %s' % ( + tool_name, ', '.join(sorted(TOOL_REGISTRY.keys()))) + sys.exit(1) + diff --git a/build/android/pylintrc b/build/android/pylintrc new file mode 100644 index 00000000000..8005a5d276c --- /dev/null +++ b/build/android/pylintrc @@ -0,0 +1,15 @@ +[FORMAT] + +max-line-length=80 + +[MESSAGES CONTROL] + +disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements, + +[REPORTS] + +reports=no + +[VARIABLES] + +dummy-variables-rgx=^_.*$|dummy diff --git a/build/android/resource_sizes.py b/build/android/resource_sizes.py new file mode 100644 index 00000000000..b38d6475eaa --- /dev/null +++ b/build/android/resource_sizes.py @@ -0,0 +1,465 @@ +#!/usr/bin/python +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints the size of each given file and optionally computes the size of + libchrome.so without the dependencies added for building with android NDK. + Also breaks down the contents of the APK to determine the installed size + and assign size contributions to different classes of file. +""" + +import collections +import json +import logging +import operator +import optparse +import os +import re +import struct +import sys +import tempfile +import zipfile +import zlib + +import devil_chromium +from devil.utils import cmd_helper +from pylib import constants +from pylib.constants import host_paths + +_GRIT_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'tools', 'grit') + +# Prepend the grit module from the source tree so it takes precedence over other +# grit versions that might present in the search path. +with host_paths.SysPath(_GRIT_PATH, 1): + from grit.format import data_pack # pylint: disable=import-error + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import perf_tests_results_helper # pylint: disable=import-error + +# Python had a bug in zipinfo parsing that triggers on ChromeModern.apk +# https://bugs.python.org/issue14315 +def _PatchedDecodeExtra(self): + # Try to decode the extra field. + extra = self.extra + unpack = struct.unpack + while len(extra) >= 4: + tp, ln = unpack('= 24: + counts = unpack('\w+) (?:_Pragma\(.*?\) )?(?P\d+)$') + + +def CountStaticInitializers(so_path): + def get_elf_section_size(readelf_stdout, section_name): + # Matches: .ctors PROGBITS 000000000516add0 5169dd0 000010 00 WA 0 0 8 + match = re.search(r'\.%s.*$' % re.escape(section_name), + readelf_stdout, re.MULTILINE) + if not match: + return (False, -1) + size_str = re.split(r'\W+', match.group(0))[5] + return (True, int(size_str, 16)) + + # Find the number of files with at least one static initializer. + # First determine if we're 32 or 64 bit + stdout = cmd_helper.GetCmdOutput(['readelf', '-h', so_path]) + elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0) + elf_class = re.split(r'\W+', elf_class_line)[1] + if elf_class == 'ELF32': + word_size = 4 + else: + word_size = 8 + + # Then find the number of files with global static initializers. + # NOTE: this is very implementation-specific and makes assumptions + # about how compiler and linker implement global static initializers. + si_count = 0 + stdout = cmd_helper.GetCmdOutput(['readelf', '-SW', so_path]) + has_init_array, init_array_size = get_elf_section_size(stdout, 'init_array') + if has_init_array: + si_count = init_array_size / word_size + si_count = max(si_count, 0) + return si_count + + +def GetStaticInitializers(so_path): + output = cmd_helper.GetCmdOutput([_DUMP_STATIC_INITIALIZERS_PATH, '-d', + so_path]) + return output.splitlines() + + +def ReportPerfResult(chart_data, graph_title, trace_title, value, units, + improvement_direction='down', important=True): + """Outputs test results in correct format. + + If chart_data is None, it outputs data in old format. If chart_data is a + dictionary, formats in chartjson format. If any other format defaults to + old format. + """ + if chart_data and isinstance(chart_data, dict): + chart_data['charts'].setdefault(graph_title, {}) + chart_data['charts'][graph_title][trace_title] = { + 'type': 'scalar', + 'value': value, + 'units': units, + 'improvement_direction': improvement_direction, + 'important': important + } + else: + perf_tests_results_helper.PrintPerfResult( + graph_title, trace_title, [value], units) + + +def PrintResourceSizes(files, chartjson=None): + """Prints the sizes of each given file. + + Args: + files: List of files to print sizes for. + """ + for f in files: + ReportPerfResult(chartjson, 'ResourceSizes', os.path.basename(f) + ' size', + os.path.getsize(f), 'bytes') + + +def PrintApkAnalysis(apk_filename, chartjson=None): + """Analyse APK to determine size contributions of different file classes.""" + # Define a named tuple type for file grouping. + # name: Human readable name for this file group + # regex: Regular expression to match filename + # extracted: Function that takes a file name and returns whether the file is + # extracted from the apk at install/runtime. + FileGroup = collections.namedtuple('FileGroup', + ['name', 'regex', 'extracted']) + + # File groups are checked in sequence, so more specific regexes should be + # earlier in the list. + YES = lambda _: True + NO = lambda _: False + FILE_GROUPS = ( + FileGroup('Native code', r'\.so$', lambda f: 'crazy' not in f), + FileGroup('Java code', r'\.dex$', YES), + FileGroup('Native resources (no l10n)', r'\.pak$', NO), + # For locale paks, assume only english paks are extracted. + FileGroup('Native resources (l10n)', r'\.lpak$', lambda f: 'en_' in f), + FileGroup('ICU (i18n library) data', r'assets/icudtl\.dat$', NO), + FileGroup('V8 Snapshots', r'\.bin$', NO), + FileGroup('PNG drawables', r'\.png$', NO), + FileGroup('Non-compiled Android resources', r'^res/', NO), + FileGroup('Compiled Android resources', r'\.arsc$', NO), + FileGroup('Package metadata', r'^(META-INF/|AndroidManifest\.xml$)', NO), + FileGroup('Unknown files', r'.', NO), + ) + + apk = zipfile.ZipFile(apk_filename, 'r') + try: + apk_contents = apk.infolist() + finally: + apk.close() + + total_apk_size = os.path.getsize(apk_filename) + apk_basename = os.path.basename(apk_filename) + + found_files = {} + for group in FILE_GROUPS: + found_files[group] = [] + + for member in apk_contents: + for group in FILE_GROUPS: + if re.search(group.regex, member.filename): + found_files[group].append(member) + break + else: + raise KeyError('No group found for file "%s"' % member.filename) + + total_install_size = total_apk_size + + for group in FILE_GROUPS: + uncompressed_size = sum(member.file_size for member in found_files[group]) + packed_size = sum(member.compress_size for member in found_files[group]) + install_size = packed_size + install_bytes = sum(member.file_size for member in found_files[group] + if group.extracted(member.filename)) + install_size += install_bytes + total_install_size += install_bytes + + ReportPerfResult(chartjson, apk_basename + '_Breakdown', + group.name + ' size', packed_size, 'bytes') + ReportPerfResult(chartjson, apk_basename + '_InstallBreakdown', + group.name + ' size', install_size, 'bytes') + ReportPerfResult(chartjson, apk_basename + '_Uncompressed', + group.name + ' size', uncompressed_size, 'bytes') + + transfer_size = _CalculateCompressedSize(apk_filename) + ReportPerfResult(chartjson, apk_basename + '_InstallSize', + 'Estimated installed size', total_install_size, 'bytes') + ReportPerfResult(chartjson, apk_basename + '_InstallSize', 'APK size', + total_apk_size, 'bytes') + ReportPerfResult(chartjson, apk_basename + '_TransferSize', + 'Transfer size (deflate)', transfer_size, 'bytes') + + +def IsPakFileName(file_name): + """Returns whether the given file name ends with .pak or .lpak.""" + return file_name.endswith('.pak') or file_name.endswith('.lpak') + + +def PrintPakAnalysis(apk_filename, min_pak_resource_size): + """Print sizes of all resources in all pak files in |apk_filename|.""" + print + print 'Analyzing pak files in %s...' % apk_filename + + # A structure for holding details about a pak file. + Pak = collections.namedtuple( + 'Pak', ['filename', 'compress_size', 'file_size', 'resources']) + + # Build a list of Pak objets for each pak file. + paks = [] + apk = zipfile.ZipFile(apk_filename, 'r') + try: + for i in (x for x in apk.infolist() if IsPakFileName(x.filename)): + with tempfile.NamedTemporaryFile() as f: + f.write(apk.read(i.filename)) + f.flush() + paks.append(Pak(i.filename, i.compress_size, i.file_size, + data_pack.DataPack.ReadDataPack(f.name).resources)) + finally: + apk.close() + + # Output the overall pak file summary. + total_files = len(paks) + total_compress_size = sum(pak.compress_size for pak in paks) + total_file_size = sum(pak.file_size for pak in paks) + print 'Total pak files: %d' % total_files + print 'Total compressed size: %s' % _FormatBytes(total_compress_size) + print 'Total uncompressed size: %s' % _FormatBytes(total_file_size) + print + + # Output the table of details about all pak files. + print '%25s%11s%21s%21s' % ( + 'FILENAME', 'RESOURCES', 'COMPRESSED SIZE', 'UNCOMPRESSED SIZE') + for pak in sorted(paks, key=operator.attrgetter('file_size'), reverse=True): + print '%25s %10s %12s %6.2f%% %12s %6.2f%%' % ( + pak.filename, + len(pak.resources), + _FormatBytes(pak.compress_size), + 100.0 * pak.compress_size / total_compress_size, + _FormatBytes(pak.file_size), + 100.0 * pak.file_size / total_file_size) + + print + print 'Analyzing pak resources in %s...' % apk_filename + + # Calculate aggregate stats about resources across pak files. + resource_count_map = collections.defaultdict(int) + resource_size_map = collections.defaultdict(int) + resource_overhead_bytes = 6 + for pak in paks: + for r in pak.resources: + resource_count_map[r] += 1 + resource_size_map[r] += len(pak.resources[r]) + resource_overhead_bytes + + # Output the overall resource summary. + total_resource_size = sum(resource_size_map.values()) + total_resource_count = len(resource_count_map) + assert total_resource_size <= total_file_size + print 'Total pak resources: %s' % total_resource_count + print 'Total uncompressed resource size: %s' % _FormatBytes( + total_resource_size) + print + + resource_id_name_map = _GetResourceIdNameMap() + + # Output the table of details about all resources across pak files. + print + print '%56s %5s %17s' % ('RESOURCE', 'COUNT', 'UNCOMPRESSED SIZE') + for i in sorted(resource_size_map, key=resource_size_map.get, + reverse=True): + if resource_size_map[i] >= min_pak_resource_size: + print '%56s %5s %9s %6.2f%%' % ( + resource_id_name_map.get(i, i), + resource_count_map[i], + _FormatBytes(resource_size_map[i]), + 100.0 * resource_size_map[i] / total_resource_size) + + +def _GetResourceIdNameMap(): + """Returns a map of {resource_id: resource_name}.""" + out_dir = constants.GetOutDirectory() + assert os.path.isdir(out_dir), 'Failed to locate out dir at %s' % out_dir + print 'Looking at resources in: %s' % out_dir + + grit_headers = [] + for root, _, files in os.walk(out_dir): + if root.endswith('grit'): + grit_headers += [os.path.join(root, f) for f in files if f.endswith('.h')] + assert grit_headers, 'Failed to find grit headers in %s' % out_dir + + id_name_map = {} + for header in grit_headers: + with open(header, 'r') as f: + for line in f.readlines(): + m = _RC_HEADER_RE.match(line.strip()) + if m: + i = int(m.group('id')) + name = m.group('name') + if i in id_name_map and name != id_name_map[i]: + print 'WARNING: Resource ID conflict %s (%s vs %s)' % ( + i, id_name_map[i], name) + id_name_map[i] = name + return id_name_map + + +def PrintStaticInitializersCount(so_with_symbols_path, chartjson=None): + """Emits the performance result for static initializers found in the provided + shared library. Additionally, files for which static initializers were + found are printed on the standard output. + + Args: + so_with_symbols_path: Path to the unstripped libchrome.so file. + """ + # GetStaticInitializers uses get-static-initializers.py to get a list of all + # static initializers. This does not work on all archs (particularly arm). + # TODO(rnephew): Get rid of warning when crbug.com/585588 is fixed. + si_count = CountStaticInitializers(so_with_symbols_path) + static_initializers = GetStaticInitializers(so_with_symbols_path) + if si_count != len(static_initializers): + print ('There are %d files with static initializers, but ' + 'dump-static-initializers found %d:' % + (si_count, len(static_initializers))) + else: + print 'Found %d files with static initializers:' % si_count + print '\n'.join(static_initializers) + + ReportPerfResult(chartjson, 'StaticInitializersCount', 'count', + si_count, 'count') + +def _FormatBytes(byts): + """Pretty-print a number of bytes.""" + if byts > 2**20.0: + byts /= 2**20.0 + return '%.2fm' % byts + if byts > 2**10.0: + byts /= 2**10.0 + return '%.2fk' % byts + return str(byts) + + +def _CalculateCompressedSize(file_path): + CHUNK_SIZE = 256 * 1024 + compressor = zlib.compressobj() + total_size = 0 + with open(file_path, 'rb') as f: + for chunk in iter(lambda: f.read(CHUNK_SIZE), ''): + total_size += len(compressor.compress(chunk)) + total_size += len(compressor.flush()) + return total_size + + +def main(argv): + usage = """Usage: %prog [options] file1 file2 ... + +Pass any number of files to graph their sizes. Any files with the extension +'.apk' will be broken down into their components on a separate graph.""" + option_parser = optparse.OptionParser(usage=usage) + option_parser.add_option('--so-path', help='Path to libchrome.so.') + option_parser.add_option('--so-with-symbols-path', + help='Path to libchrome.so with symbols.') + option_parser.add_option('--min-pak-resource-size', type='int', + default=20*1024, + help='Minimum byte size of displayed pak resources.') + option_parser.add_option('--build_type', dest='build_type', default='Debug', + help='Sets the build type, default is Debug.') + option_parser.add_option('--chromium-output-directory', + help='Location of the build artifacts. ' + 'Takes precidence over --build_type.') + option_parser.add_option('--chartjson', action="store_true", + help='Sets output mode to chartjson.') + option_parser.add_option('--output-dir', default='.', + help='Directory to save chartjson to.') + option_parser.add_option('-d', '--device', + help='Dummy option for perf runner.') + options, args = option_parser.parse_args(argv) + files = args[1:] + chartjson = _BASE_CHART.copy() if options.chartjson else None + + constants.SetBuildType(options.build_type) + if options.chromium_output_directory: + constants.SetOutputDirectory(options.chromium_output_directory) + constants.CheckOutputDirectory() + + # For backward compatibilty with buildbot scripts, treat --so-path as just + # another file to print the size of. We don't need it for anything special any + # more. + if options.so_path: + files.append(options.so_path) + + if not files: + option_parser.error('Must specify a file') + + devil_chromium.Initialize() + + if options.so_with_symbols_path: + PrintStaticInitializersCount( + options.so_with_symbols_path, chartjson=chartjson) + + PrintResourceSizes(files, chartjson=chartjson) + + for f in files: + if f.endswith('.apk'): + PrintApkAnalysis(f, chartjson=chartjson) + PrintPakAnalysis(f, options.min_pak_resource_size) + + if chartjson: + results_path = os.path.join(options.output_dir, 'results-chart.json') + logging.critical('Dumping json to %s', results_path) + with open(results_path, 'w') as json_file: + json.dump(chartjson, json_file) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/rezip.gyp b/build/android/rezip.gyp new file mode 100644 index 00000000000..dcb71a1cfda --- /dev/null +++ b/build/android/rezip.gyp @@ -0,0 +1,44 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Build the rezip build tool. +{ + 'targets': [ + { + # GN: //build/android/rezip:rezip + 'target_name': 'rezip_apk_jar', + 'type': 'none', + 'variables': { + 'java_in_dir': 'rezip', + 'compile_stamp': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/compile.stamp', + 'javac_jar_path': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar', + }, + 'actions': [ + { + 'action_name': 'javac_<(_target_name)', + 'message': 'Compiling <(_target_name) java sources', + 'variables': { + 'java_sources': ['>!@(find >(java_in_dir) -name "*.java")'], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/javac.py', + '>@(java_sources)', + ], + 'outputs': [ + '<(compile_stamp)', + '<(javac_jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/javac.py', + '--classpath=', + '--jar-path=<(javac_jar_path)', + '--stamp=<(compile_stamp)', + '>@(java_sources)', + ] + }, + ], + } + ], +} diff --git a/build/android/rezip/BUILD.gn b/build/android/rezip/BUILD.gn new file mode 100644 index 00000000000..b9a39a6920d --- /dev/null +++ b/build/android/rezip/BUILD.gn @@ -0,0 +1,11 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# GYP: //build/android/rezip.gyp:rezip_apk_jar +java_library("rezip") { + jar_path = "$root_build_dir/lib.java/rezip_apk.jar" + java_files = [ "RezipApk.java" ] +} diff --git a/build/android/rezip/RezipApk.java b/build/android/rezip/RezipApk.java new file mode 100644 index 00000000000..43d75447c8f --- /dev/null +++ b/build/android/rezip/RezipApk.java @@ -0,0 +1,448 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.jar.JarOutputStream; +import java.util.regex.Pattern; +import java.util.zip.CRC32; + +/** + * Command line tool used to build APKs which support loading the native code library + * directly from the APK file. To construct the APK we rename the native library by + * adding the prefix "crazy." to the filename. This is done to prevent the Android + * Package Manager from extracting the library. The native code must be page aligned + * and uncompressed. The page alignment is implemented by adding a zero filled file + * in front of the the native code library. This tool is designed so that running + * SignApk and/or zipalign on the resulting APK does not break the page alignment. + * This is achieved by outputing the filenames in the same canonical order used + * by SignApk and adding the same alignment fields added by zipalign. + */ +class RezipApk { + // Alignment to use for non-compressed files (must match zipalign). + private static final int ALIGNMENT = 4; + + // Alignment to use for non-compressed *.so files + private static final int LIBRARY_ALIGNMENT = 4096; + + // Files matching this pattern are not copied to the output when adding alignment. + // When reordering and verifying the APK they are copied to the end of the file. + private static Pattern sMetaFilePattern = + Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA)|com/android/otacert))|(" + + Pattern.quote(JarFile.MANIFEST_NAME) + ")$"); + + // Pattern for matching a shared library in the APK + private static Pattern sLibraryPattern = Pattern.compile("^lib/[^/]*/lib.*[.]so$"); + // Pattern for match the crazy linker in the APK + private static Pattern sCrazyLinkerPattern = + Pattern.compile("^lib/[^/]*/libchromium_android_linker.so$"); + // Pattern for matching a crazy loaded shared library in the APK + private static Pattern sCrazyLibraryPattern = Pattern.compile("^lib/[^/]*/crazy.lib.*[.]so$"); + + private static boolean isLibraryFilename(String filename) { + return sLibraryPattern.matcher(filename).matches() + && !sCrazyLinkerPattern.matcher(filename).matches(); + } + + private static boolean isCrazyLibraryFilename(String filename) { + return sCrazyLibraryPattern.matcher(filename).matches(); + } + + private static String renameLibraryForCrazyLinker(String filename) { + int lastSlash = filename.lastIndexOf('/'); + // We rename the library, so that the Android Package Manager + // no longer extracts the library. + return filename.substring(0, lastSlash + 1) + "crazy." + filename.substring(lastSlash + 1); + } + + /** + * Wraps another output stream, counting the number of bytes written. + */ + private static class CountingOutputStream extends OutputStream { + private long mCount = 0; + private OutputStream mOut; + + public CountingOutputStream(OutputStream out) { + this.mOut = out; + } + + /** Returns the number of bytes written. */ + public long getCount() { + return mCount; + } + + @Override public void write(byte[] b, int off, int len) throws IOException { + mOut.write(b, off, len); + mCount += len; + } + + @Override public void write(int b) throws IOException { + mOut.write(b); + mCount++; + } + + @Override public void close() throws IOException { + mOut.close(); + } + + @Override public void flush() throws IOException { + mOut.flush(); + } + } + + private static String outputName(JarEntry entry, boolean rename) { + String inName = entry.getName(); + if (rename && entry.getSize() > 0 && isLibraryFilename(inName)) { + return renameLibraryForCrazyLinker(inName); + } + return inName; + } + + /** + * Comparator used to sort jar entries from the input file. + * Sorting is done based on the output filename (which maybe renamed). + * Filenames are in natural string order, except that filenames matching + * the meta-file pattern are always after other files. This is so the manifest + * and signature are at the end of the file after any alignment file. + */ + private static class EntryComparator implements Comparator { + private boolean mRename; + + public EntryComparator(boolean rename) { + mRename = rename; + } + + @Override + public int compare(JarEntry j1, JarEntry j2) { + String o1 = outputName(j1, mRename); + String o2 = outputName(j2, mRename); + boolean o1Matches = sMetaFilePattern.matcher(o1).matches(); + boolean o2Matches = sMetaFilePattern.matcher(o2).matches(); + if (o1Matches != o2Matches) { + return o1Matches ? 1 : -1; + } else { + return o1.compareTo(o2); + } + } + } + + // Build an ordered list of jar entries. The jar entries from the input are + // sorted based on the output filenames (which maybe renamed). If |omitMetaFiles| + // is true do not include the jar entries for the META-INF files. + // Entries are ordered in the deterministic order used by SignApk. + private static List getOutputFileOrderEntries( + JarFile jar, boolean omitMetaFiles, boolean rename) { + List entries = new ArrayList(); + for (Enumeration e = jar.entries(); e.hasMoreElements(); ) { + JarEntry entry = e.nextElement(); + if (entry.isDirectory()) { + continue; + } + if (omitMetaFiles && sMetaFilePattern.matcher(entry.getName()).matches()) { + continue; + } + entries.add(entry); + } + + // We sort the input entries by name. When present META-INF files + // are sorted to the end. + Collections.sort(entries, new EntryComparator(rename)); + return entries; + } + + /** + * Add a zero filled alignment file at this point in the zip file, + * The added file will be added before |name| and after |prevName|. + * The size of the alignment file is such that the location of the + * file |name| will be on a LIBRARY_ALIGNMENT boundary. + * + * Note this arrangement is devised so that running SignApk and/or zipalign on the resulting + * file will not alter the alignment. + * + * @param offset number of bytes into the output file at this point. + * @param timestamp time in millis since the epoch to include in the header. + * @param name the name of the library filename. + * @param prevName the name of the previous file in the archive (or null). + * @param out jar output stream to write the alignment file to. + * + * @throws IOException if the output file can not be written. + */ + private static void addAlignmentFile( + long offset, long timestamp, String name, String prevName, + JarOutputStream out) throws IOException { + + // Compute the start and alignment of the library, as if it was next. + int headerSize = JarFile.LOCHDR + name.length(); + long libOffset = offset + headerSize; + int libNeeded = LIBRARY_ALIGNMENT - (int) (libOffset % LIBRARY_ALIGNMENT); + if (libNeeded == LIBRARY_ALIGNMENT) { + // Already aligned, no need to added alignment file. + return; + } + + // Check that there is not another file between the library and the + // alignment file. + String alignName = name.substring(0, name.length() - 2) + "align"; + if (prevName != null && prevName.compareTo(alignName) >= 0) { + throw new UnsupportedOperationException( + "Unable to insert alignment file, because there is " + + "another file in front of the file to be aligned. " + + "Other file: " + prevName + " Alignment file: " + alignName + + " file: " + name); + } + + // Compute the size of the alignment file header. + headerSize = JarFile.LOCHDR + alignName.length(); + // We are going to add an alignment file of type STORED. This file + // will itself induce a zipalign alignment adjustment. + int extraNeeded = + (ALIGNMENT - (int) ((offset + headerSize) % ALIGNMENT)) % ALIGNMENT; + headerSize += extraNeeded; + + if (libNeeded < headerSize + 1) { + // The header was bigger than the alignment that we need, add another page. + libNeeded += LIBRARY_ALIGNMENT; + } + // Compute the size of the alignment file. + libNeeded -= headerSize; + + // Build the header for the alignment file. + byte[] zeroBuffer = new byte[libNeeded]; + JarEntry alignEntry = new JarEntry(alignName); + alignEntry.setMethod(JarEntry.STORED); + alignEntry.setSize(libNeeded); + alignEntry.setTime(timestamp); + CRC32 crc = new CRC32(); + crc.update(zeroBuffer); + alignEntry.setCrc(crc.getValue()); + + if (extraNeeded != 0) { + alignEntry.setExtra(new byte[extraNeeded]); + } + + // Output the alignment file. + out.putNextEntry(alignEntry); + out.write(zeroBuffer); + out.closeEntry(); + out.flush(); + } + + // Make a JarEntry for the output file which corresponds to the input + // file. The output file will be called |name|. The output file will always + // be uncompressed (STORED). If the input is not STORED it is necessary to inflate + // it to compute the CRC and size of the output entry. + private static JarEntry makeStoredEntry(String name, JarEntry inEntry, JarFile in) + throws IOException { + JarEntry outEntry = new JarEntry(name); + outEntry.setMethod(JarEntry.STORED); + + if (inEntry.getMethod() == JarEntry.STORED) { + outEntry.setCrc(inEntry.getCrc()); + outEntry.setSize(inEntry.getSize()); + } else { + // We are inflating the file. We need to compute the CRC and size. + byte[] buffer = new byte[4096]; + CRC32 crc = new CRC32(); + int size = 0; + int num; + InputStream data = in.getInputStream(inEntry); + while ((num = data.read(buffer)) > 0) { + crc.update(buffer, 0, num); + size += num; + } + data.close(); + outEntry.setCrc(crc.getValue()); + outEntry.setSize(size); + } + return outEntry; + } + + /** + * Copy the contents of the input APK file to the output APK file. If |rename| is + * true then non-empty libraries (*.so) in the input will be renamed by prefixing + * "crazy.". This is done to prevent the Android Package Manager extracting the + * library. Note the crazy linker itself is not renamed, for bootstrapping reasons. + * Empty libraries are not renamed (they are in the APK to workaround a bug where + * the Android Package Manager fails to delete old versions when upgrading). + * There must be exactly one "crazy" library in the output stream. The "crazy" + * library will be uncompressed and page aligned in the output stream. Page + * alignment is implemented by adding a zero filled file, regular alignment is + * implemented by adding a zero filled extra field to the zip file header. If + * |addAlignment| is true a page alignment file is added, otherwise the "crazy" + * library must already be page aligned. Care is taken so that the output is generated + * in the same way as SignApk. This is important so that running SignApk and + * zipalign on the output does not break the page alignment. The archive may not + * contain a "*.apk" as SignApk has special nested signing logic that we do not + * support. + * + * @param in The input APK File. + * @param out The output APK stream. + * @param countOut Counting output stream (to measure the current offset). + * @param addAlignment Whether to add the alignment file or just check. + * @param rename Whether to rename libraries to be "crazy". + * + * @throws IOException if the output file can not be written. + */ + private static void rezip( + JarFile in, JarOutputStream out, CountingOutputStream countOut, + boolean addAlignment, boolean rename) throws IOException { + + List entries = getOutputFileOrderEntries(in, addAlignment, rename); + long timestamp = System.currentTimeMillis(); + byte[] buffer = new byte[4096]; + boolean firstEntry = true; + String prevName = null; + int numCrazy = 0; + for (JarEntry inEntry : entries) { + // Rename files, if specied. + String name = outputName(inEntry, rename); + if (name.endsWith(".apk")) { + throw new UnsupportedOperationException( + "Nested APKs are not supported: " + name); + } + + // Build the header. + JarEntry outEntry = null; + boolean isCrazy = isCrazyLibraryFilename(name); + if (isCrazy) { + // "crazy" libraries are alway output uncompressed (STORED). + outEntry = makeStoredEntry(name, inEntry, in); + numCrazy++; + if (numCrazy > 1) { + throw new UnsupportedOperationException( + "Found more than one library\n" + + "Multiple libraries are not supported for APKs that use " + + "'load_library_from_zip'.\n" + + "See crbug/388223.\n" + + "Note, check that your build is clean.\n" + + "An unclean build can incorrectly incorporate old " + + "libraries in the APK."); + } + } else if (inEntry.getMethod() == JarEntry.STORED) { + // Preserve the STORED method of the input entry. + outEntry = new JarEntry(inEntry); + outEntry.setExtra(null); + } else { + // Create a new entry so that the compressed len is recomputed. + outEntry = new JarEntry(name); + } + outEntry.setTime(timestamp); + + // Compute and add alignment + long offset = countOut.getCount(); + if (firstEntry) { + // The first entry in a jar file has an extra field of + // four bytes that you can't get rid of; any extra + // data you specify in the JarEntry is appended to + // these forced four bytes. This is JAR_MAGIC in + // JarOutputStream; the bytes are 0xfeca0000. + firstEntry = false; + offset += 4; + } + if (outEntry.getMethod() == JarEntry.STORED) { + if (isCrazy) { + if (addAlignment) { + addAlignmentFile(offset, timestamp, name, prevName, out); + } + // We check that we did indeed get to a page boundary. + offset = countOut.getCount() + JarFile.LOCHDR + name.length(); + if ((offset % LIBRARY_ALIGNMENT) != 0) { + throw new AssertionError( + "Library was not page aligned when verifying page alignment. " + + "Library name: " + name + " Expected alignment: " + + LIBRARY_ALIGNMENT + "Offset: " + offset + " Error: " + + (offset % LIBRARY_ALIGNMENT)); + } + } else { + // This is equivalent to zipalign. + offset += JarFile.LOCHDR + name.length(); + int needed = (ALIGNMENT - (int) (offset % ALIGNMENT)) % ALIGNMENT; + if (needed != 0) { + outEntry.setExtra(new byte[needed]); + } + } + } + out.putNextEntry(outEntry); + + // Copy the data from the input to the output + int num; + InputStream data = in.getInputStream(inEntry); + while ((num = data.read(buffer)) > 0) { + out.write(buffer, 0, num); + } + data.close(); + out.closeEntry(); + out.flush(); + prevName = name; + } + if (numCrazy == 0) { + throw new AssertionError("There was no crazy library in the archive"); + } + } + + private static void usage() { + System.err.println("Usage: prealignapk (addalignment|reorder) input.apk output.apk"); + System.err.println("\"crazy\" libraries are always inflated in the output"); + System.err.println( + " renamealign - rename libraries with \"crazy.\" prefix and add alignment file"); + System.err.println(" align - add alignment file"); + System.err.println(" reorder - re-creates canonical ordering and checks alignment"); + System.exit(2); + } + + public static void main(String[] args) throws IOException { + if (args.length != 3) usage(); + + boolean addAlignment = false; + boolean rename = false; + if (args[0].equals("renamealign")) { + // Normal case. Before signing we rename the library and add an alignment file. + addAlignment = true; + rename = true; + } else if (args[0].equals("align")) { + // LGPL compliance case. Before signing, we add an alignment file to a + // reconstructed APK which already contains the "crazy" library. + addAlignment = true; + rename = false; + } else if (args[0].equals("reorder")) { + // Normal case. After jarsigning we write the file in the canonical order and check. + addAlignment = false; + } else { + usage(); + } + + String inputFilename = args[1]; + String outputFilename = args[2]; + + JarFile inputJar = null; + FileOutputStream outputFile = null; + + try { + inputJar = new JarFile(new File(inputFilename), true); + outputFile = new FileOutputStream(outputFilename); + + CountingOutputStream outCount = new CountingOutputStream(outputFile); + JarOutputStream outputJar = new JarOutputStream(outCount); + + // Match the compression level used by SignApk. + outputJar.setLevel(9); + + rezip(inputJar, outputJar, outCount, addAlignment, rename); + outputJar.close(); + } finally { + if (inputJar != null) inputJar.close(); + if (outputFile != null) outputFile.close(); + } + } +} diff --git a/build/android/screenshot.py b/build/android/screenshot.py new file mode 100644 index 00000000000..6ab906086d7 --- /dev/null +++ b/build/android/screenshot.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys + +import devil_chromium +from devil.android.tools import screenshot + +if __name__ == '__main__': + devil_chromium.Initialize() + sys.exit(screenshot.main()) diff --git a/build/android/setup.gyp b/build/android/setup.gyp new file mode 100644 index 00000000000..0ef05315561 --- /dev/null +++ b/build/android/setup.gyp @@ -0,0 +1,112 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'conditions': [ + ['android_must_copy_system_libraries == 1', { + 'targets': [ + { + # These libraries from the Android ndk are required to be packaged with + # any APK that is built with them. build/java_apk.gypi expects any + # libraries that should be packaged with the apk to be in + # <(SHARED_LIB_DIR) + 'target_name': 'copy_system_libraries', + 'type': 'none', + 'copies': [ + { + 'destination': '<(SHARED_LIB_DIR)/', + 'files': [ + '<(android_libcpp_libs_dir)/libc++_shared.so', + ], + }, + ], + }, + ], + }], + ], + 'targets': [ + { + 'target_name': 'get_build_device_configurations', + 'type': 'none', + 'actions': [ + { + 'action_name': 'get configurations', + 'inputs': [ + 'gyp/util/build_device.py', + 'gyp/get_device_configuration.py', + ], + 'outputs': [ + '<(build_device_config_path)', + '<(build_device_config_path).fake', + ], + 'action': [ + 'python', 'gyp/get_device_configuration.py', + '--output=<(build_device_config_path)', + '--output-directory=<(PRODUCT_DIR)', + ], + } + ], + }, + { + # Target for creating common output build directories. Creating output + # dirs beforehand ensures that build scripts can assume these folders to + # exist and there are no race conditions resulting from build scripts + # trying to create these directories. + # The build/java.gypi target depends on this target. + 'target_name': 'build_output_dirs', + 'type': 'none', + 'actions': [ + { + 'action_name': 'create_java_output_dirs', + 'variables' : { + 'output_dirs' : [ + '<(PRODUCT_DIR)/apks', + '<(PRODUCT_DIR)/lib.java', + '<(PRODUCT_DIR)/test.lib.java', + ] + }, + 'inputs' : [], + # By not specifying any outputs, we ensure that this command isn't + # re-run when the output directories are touched (i.e. apks are + # written to them). + 'outputs': [''], + 'action': [ + 'mkdir', + '-p', + '<@(output_dirs)', + ], + }, + ], + }, # build_output_dirs + { + 'target_name': 'sun_tools_java', + 'type': 'none', + 'variables': { + 'found_jar_path': '<(PRODUCT_DIR)/sun_tools_java/tools.jar', + 'jar_path': '<(found_jar_path)', + }, + 'includes': [ + '../../build/host_prebuilt_jar.gypi', + ], + 'actions': [ + { + 'action_name': 'find_sun_tools_jar', + 'variables' : { + }, + 'inputs' : [ + 'gyp/find_sun_tools_jar.py', + 'gyp/util/build_utils.py', + ], + 'outputs': [ + '<(found_jar_path)', + ], + 'action': [ + 'python', 'gyp/find_sun_tools_jar.py', + '--output', '<(found_jar_path)', + ], + }, + ], + }, # sun_tools_java + ] +} + diff --git a/build/android/stacktrace/java_deobfuscate.py b/build/android/stacktrace/java_deobfuscate.py new file mode 100644 index 00000000000..a468ec30556 --- /dev/null +++ b/build/android/stacktrace/java_deobfuscate.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A tool to deobfuscate Java stack traces. + +Utility wrapper around ReTrace to deobfuscate stack traces that have been +mangled by ProGuard. Takes stack traces from stdin (eg. adb logcat | +java_deobfuscate.py proguard.mapping) and files. +""" + +# Can just run: +# java -jar third_party/proguard/lib/retrace.jar -regex \ +# "(?:.*?\bat\s+%c\.%m\s*\(%s(?::%l)?\)\s*)|(?:(?:.*?[:\"]\s+)?%c(?::.*)?)" \ +# ~/mapping +# in terminal to achieve same effect as this tool. + +import argparse +import os +import subprocess +import sys + +_THIRD_PARTY_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, + 'third_party')) +sys.path.append(os.path.join(_THIRD_PARTY_DIR, 'catapult', 'devil')) +from devil.utils import cmd_helper + + +# This regex is taken from +# http://proguard.sourceforge.net/manual/retrace/usage.html. +_LINE_PARSE_REGEX = ( + r'(?:.*?\bat\s+%c\.%m\s*\(%s(?::%l)?\)\s*)|(?:(?:.*?[:"]\s+)?%c(?::.*)?)') + + +def main(): + parser = argparse.ArgumentParser(description=(__doc__)) + parser.add_argument( + 'mapping_file', + help='ProGuard mapping file from build which the stacktrace is from.') + parser.add_argument( + '--stacktrace', + help='Stacktrace file to be deobfuscated.') + args = parser.parse_args() + + retrace_path = os.path.join(_THIRD_PARTY_DIR, 'proguard', + 'lib', 'retrace.jar') + + base_args = ['java', '-jar', retrace_path, '-regex', _LINE_PARSE_REGEX, + args.mapping_file] + if args.stacktrace: + subprocess.call(base_args + [args.stacktrace]) + else: + for line in cmd_helper.IterCmdOutputLines(base_args): + print line + + +if __name__ == '__main__': + main() diff --git a/build/android/strip_native_libraries.gypi b/build/android/strip_native_libraries.gypi new file mode 100644 index 00000000000..be8a5cb0da0 --- /dev/null +++ b/build/android/strip_native_libraries.gypi @@ -0,0 +1,54 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that strips +# native libraries. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'strip_native_libraries', +# 'actions': [ +# 'variables': { +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'input_paths': 'files to be added to the list of inputs' +# 'stamp': 'file to touch when the action is complete' +# 'stripped_libraries_dir': 'directory to store stripped libraries', +# }, +# 'includes': [ '../../build/android/strip_native_libraries.gypi' ], +# ], +# }, +# + +{ + 'message': 'Stripping libraries for <(_target_name)', + 'variables': { + 'input_paths': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/strip_library_for_device.py', + '<(ordered_libraries_file)', + '>@(input_paths)', + ], + 'outputs': [ + '<(stamp)', + ], + 'conditions': [ + ['android_must_copy_system_libraries == 1', { + # Add a fake output to force the build to always re-run this step. This + # is required because the real inputs are not known at gyp-time and + # changing base.so may not trigger changes to dependent libraries. + 'outputs': [ '<(stamp).fake' ] + }], + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/strip_library_for_device.py', + '--android-strip=<(android_strip)', + '--android-strip-arg=--strip-unneeded', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(stamp)', + ], +} diff --git a/build/android/test_runner.gypi b/build/android/test_runner.gypi new file mode 100644 index 00000000000..5127e2a54a7 --- /dev/null +++ b/build/android/test_runner.gypi @@ -0,0 +1,107 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a script in the output bin directory which runs the test +# target using the test runner script in build/android/pylib/test_runner.py. +# +# To use this, include this file in a gtest or instrumentation test target. +# { +# 'target_name': 'gtest', +# 'type': 'none', +# 'variables': { +# 'test_type': 'gtest', # string +# 'test_suite_name': 'gtest_suite' # string +# 'isolate_file': 'path/to/gtest.isolate' # string +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# +# { +# 'target_name': 'instrumentation_apk', +# 'type': 'none', +# 'variables': { +# 'test_type': 'instrumentation', # string +# 'apk_name': 'TestApk' # string +# 'isolate_file': 'path/to/instrumentation_test.isolate' # string +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# +# { +# 'target_name': 'junit_test', +# 'type': 'none', +# 'variables': { +# 'test_type': 'junit', # string +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# + +{ + 'variables': { + 'variables': { + 'additional_apks%': [], + 'isolate_file%': '', + 'shard_timeout%': '', + 'test_runner_path%': '', + }, + 'test_runner_args': ['--output-directory', '<(PRODUCT_DIR)'], + 'conditions': [ + ['test_type == "gtest"', { + 'test_runner_args': ['--suite', '<(test_suite_name)'], + 'script_name': 'run_<(test_suite_name)', + }], + ['test_type == "instrumentation"', { + 'test_runner_args': [ + '--apk-under-test', '>(tested_apk_path)', + '--test-apk', '>(final_apk_path)', + ], + 'script_name': 'run_<(_target_name)', + 'conditions': [ + ['emma_instrument != 0', { + 'test_runner_args': [ + '--coverage-dir', '<(PRODUCT_DIR)/coverage', + ], + }], + ], + }], + ['test_type == "junit"', { + 'test_runner_args': ['--test-suite', '<(_target_name)'], + 'script_name': 'run_<(_target_name)', + }], + ['additional_apks != []', { + 'test_runner_args': ['--additional-apk-list', '>(additional_apks)'], + }], + ['isolate_file != ""', { + 'test_runner_args': ['--isolate-file-path', '<(isolate_file)'] + }], + ['shard_timeout != ""', { + 'test_runner_args': ['--shard-timeout', '<(shard_timeout)'] + }], + ['test_runner_path != ""', { + 'test_runner_args': ['--test-runner-path', '<(test_runner_path)'] + }], + ], + }, + 'actions': [ + { + 'action_name': 'create_test_runner_script_<(script_name)', + 'message': 'Creating test runner script <(script_name)', + 'variables': { + 'script_output_path': '<(PRODUCT_DIR)/bin/<(script_name)', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/create_test_runner_script.py', + ], + 'outputs': [ + '<(script_output_path)' + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_test_runner_script.py', + '--script-output-path=<(script_output_path)', + '<(test_type)', '<@(test_runner_args)', + ], + }, + ], +} diff --git a/build/android/test_runner.py b/build/android/test_runner.py new file mode 100644 index 00000000000..e4747aba5d9 --- /dev/null +++ b/build/android/test_runner.py @@ -0,0 +1,947 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs all types of tests from one unified interface.""" + +import argparse +import collections +import itertools +import logging +import os +import signal +import sys +import threading +import unittest + +import devil_chromium +from devil import base_error +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_utils +from devil.android import forwarder +from devil.android import ports +from devil.utils import reraiser_thread +from devil.utils import run_tests_helper + +from pylib import constants +from pylib.base import base_test_result +from pylib.base import environment_factory +from pylib.base import test_dispatcher +from pylib.base import test_instance_factory +from pylib.base import test_run_factory +from pylib.constants import host_paths +from pylib.linker import setup as linker_setup +from pylib.junit import setup as junit_setup +from pylib.junit import test_dispatcher as junit_dispatcher +from pylib.monkey import setup as monkey_setup +from pylib.monkey import test_options as monkey_test_options +from pylib.results import json_results +from pylib.results import report_results + + +_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json')) + + +def AddCommonOptions(parser): + """Adds all common options to |parser|.""" + + group = parser.add_argument_group('Common Options') + + default_build_type = os.environ.get('BUILDTYPE', 'Debug') + + debug_or_release_group = group.add_mutually_exclusive_group() + debug_or_release_group.add_argument( + '--debug', action='store_const', const='Debug', dest='build_type', + default=default_build_type, + help=('If set, run test suites under out/Debug. ' + 'Default is env var BUILDTYPE or Debug.')) + debug_or_release_group.add_argument( + '--release', action='store_const', const='Release', dest='build_type', + help=('If set, run test suites under out/Release. ' + 'Default is env var BUILDTYPE or Debug.')) + + group.add_argument('--build-directory', dest='build_directory', + help=('Path to the directory in which build files are' + ' located (should not include build type)')) + group.add_argument('--output-directory', dest='output_directory', + help=('Path to the directory in which build files are' + ' located (must include build type). This will take' + ' precedence over --debug, --release and' + ' --build-directory')) + group.add_argument('--num_retries', '--num-retries', dest='num_retries', + type=int, default=2, + help=('Number of retries for a test before ' + 'giving up (default: %(default)s).')) + group.add_argument('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + group.add_argument('--flakiness-dashboard-server', + dest='flakiness_dashboard_server', + help=('Address of the server that is hosting the ' + 'Chrome for Android flakiness dashboard.')) + group.add_argument('--enable-platform-mode', action='store_true', + help=('Run the test scripts in platform mode, which ' + 'conceptually separates the test runner from the ' + '"device" (local or remote, real or emulated) on ' + 'which the tests are running. [experimental]')) + group.add_argument('-e', '--environment', default='local', + choices=constants.VALID_ENVIRONMENTS, + help='Test environment to run in (default: %(default)s).') + group.add_argument('--adb-path', type=os.path.abspath, + help=('Specify the absolute path of the adb binary that ' + 'should be used.')) + group.add_argument('--json-results-file', '--test-launcher-summary-output', + dest='json_results_file', + help='If set, will dump results in JSON form ' + 'to specified file.') + + logcat_output_group = group.add_mutually_exclusive_group() + logcat_output_group.add_argument( + '--logcat-output-dir', + help='If set, will dump logcats recorded during test run to directory. ' + 'File names will be the device ids with timestamps.') + logcat_output_group.add_argument( + '--logcat-output-file', + help='If set, will merge logcats recorded during test run and dump them ' + 'to the specified file.') + + class FastLocalDevAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + namespace.verbose_count = max(namespace.verbose_count, 1) + namespace.num_retries = 0 + namespace.enable_device_cache = True + namespace.enable_concurrent_adb = True + namespace.skip_clear_data = True + namespace.extract_test_list_from_filter = True + + group.add_argument('--fast-local-dev', type=bool, nargs=0, + action=FastLocalDevAction, + help='Alias for: --verbose --num-retries=0 ' + '--enable-device-cache --enable-concurrent-adb ' + '--skip-clear-data --extract-test-list-from-filter') + +def ProcessCommonOptions(args): + """Processes and handles all common options.""" + run_tests_helper.SetLogLevel(args.verbose_count) + constants.SetBuildType(args.build_type) + if args.build_directory: + constants.SetBuildDirectory(args.build_directory) + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + + devil_chromium.Initialize( + output_directory=constants.GetOutDirectory(), + adb_path=args.adb_path) + + # Some things such as Forwarder require ADB to be in the environment path. + adb_dir = os.path.dirname(constants.GetAdbPath()) + if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep): + os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH'] + + +def AddRemoteDeviceOptions(parser): + group = parser.add_argument_group('Remote Device Options') + + group.add_argument('--trigger', + help=('Only triggers the test if set. Stores test_run_id ' + 'in given file path. ')) + group.add_argument('--collect', + help=('Only collects the test results if set. ' + 'Gets test_run_id from given file path.')) + group.add_argument('--remote-device', action='append', + help='Device type to run test on.') + group.add_argument('--results-path', + help='File path to download results to.') + group.add_argument('--api-protocol', + help='HTTP protocol to use. (http or https)') + group.add_argument('--api-address', + help='Address to send HTTP requests.') + group.add_argument('--api-port', + help='Port to send HTTP requests to.') + group.add_argument('--runner-type', + help='Type of test to run as.') + group.add_argument('--runner-package', + help='Package name of test.') + group.add_argument('--device-type', + choices=constants.VALID_DEVICE_TYPES, + help=('Type of device to run on. iOS or android')) + group.add_argument('--device-oem', action='append', + help='Device OEM to run on.') + group.add_argument('--remote-device-file', + help=('File with JSON to select remote device. ' + 'Overrides all other flags.')) + group.add_argument('--remote-device-timeout', type=int, + help='Times to retry finding remote device') + group.add_argument('--network-config', type=int, + help='Integer that specifies the network environment ' + 'that the tests will be run in.') + group.add_argument('--test-timeout', type=int, + help='Test run timeout in seconds.') + + device_os_group = group.add_mutually_exclusive_group() + device_os_group.add_argument('--remote-device-minimum-os', + help='Minimum OS on device.') + device_os_group.add_argument('--remote-device-os', action='append', + help='OS to have on the device.') + + api_secret_group = group.add_mutually_exclusive_group() + api_secret_group.add_argument('--api-secret', default='', + help='API secret for remote devices.') + api_secret_group.add_argument('--api-secret-file', default='', + help='Path to file that contains API secret.') + + api_key_group = group.add_mutually_exclusive_group() + api_key_group.add_argument('--api-key', default='', + help='API key for remote devices.') + api_key_group.add_argument('--api-key-file', default='', + help='Path to file that contains API key.') + + +def AddDeviceOptions(parser): + """Adds device options to |parser|.""" + group = parser.add_argument_group(title='Device Options') + group.add_argument('--tool', + dest='tool', + help=('Run the test under a tool ' + '(use --tool help to list them)')) + group.add_argument('-d', '--device', dest='test_device', + help=('Target device for the test suite ' + 'to run on.')) + group.add_argument('--blacklist-file', help='Device blacklist file.') + group.add_argument('--enable-device-cache', action='store_true', + help='Cache device state to disk between runs') + group.add_argument('--enable-concurrent-adb', action='store_true', + help='Run multiple adb commands at the same time, even ' + 'for the same device.') + group.add_argument('--skip-clear-data', action='store_true', + help='Do not wipe app data between tests. Use this to ' + 'speed up local development and never on bots ' + '(increases flakiness)') + group.add_argument('--target-devices-file', + help='Path to file with json list of device serials to ' + 'run tests on. When not specified, all available ' + 'devices are used.') + + +def AddGTestOptions(parser): + """Adds gtest options to |parser|.""" + + group = parser.add_argument_group('GTest Options') + group.add_argument('-s', '--suite', dest='suite_name', + nargs='+', metavar='SUITE_NAME', required=True, + help='Executable name of the test suite to run.') + group.add_argument('--executable-dist-dir', + help="Path to executable's dist directory for native" + " (non-apk) tests.") + group.add_argument('--test-apk-incremental-install-script', + help='Path to install script for the test apk.') + group.add_argument('--gtest_also_run_disabled_tests', + '--gtest-also-run-disabled-tests', + dest='run_disabled', action='store_true', + help='Also run disabled tests if applicable.') + group.add_argument('-a', '--test-arguments', dest='test_arguments', + default='', + help='Additional arguments to pass to the test.') + group.add_argument('-t', '--shard-timeout', + dest='shard_timeout', type=int, default=120, + help='Timeout to wait for each test ' + '(default: %(default)s).') + group.add_argument('--isolate_file_path', + '--isolate-file-path', + dest='isolate_file_path', + help='.isolate file path to override the default ' + 'path') + group.add_argument('--app-data-file', action='append', dest='app_data_files', + help='A file path relative to the app data directory ' + 'that should be saved to the host.') + group.add_argument('--app-data-file-dir', + help='Host directory to which app data files will be' + ' saved. Used with --app-data-file.') + group.add_argument('--delete-stale-data', dest='delete_stale_data', + action='store_true', + help='Delete stale test data on the device.') + group.add_argument('--repeat', '--gtest_repeat', '--gtest-repeat', + dest='repeat', type=int, default=0, + help='Number of times to repeat the specified set of ' + 'tests.') + group.add_argument('--break-on-failure', '--break_on_failure', + dest='break_on_failure', action='store_true', + help='Whether to break on failure.') + group.add_argument('--extract-test-list-from-filter', + action='store_true', + help='When a test filter is specified, and the list of ' + 'tests can be determined from it, skip querying the ' + 'device for the list of all tests. Speeds up local ' + 'development, but is not safe to use on bots (' + 'http://crbug.com/549214') + + filter_group = group.add_mutually_exclusive_group() + filter_group.add_argument('-f', '--gtest_filter', '--gtest-filter', + dest='test_filter', + help='googletest-style filter string.') + filter_group.add_argument('--gtest-filter-file', dest='test_filter_file', + help='Path to file that contains googletest-style ' + 'filter strings. (Lines will be joined with ' + '":" to create a single filter string.)') + + AddDeviceOptions(parser) + AddCommonOptions(parser) + AddRemoteDeviceOptions(parser) + + +def AddLinkerTestOptions(parser): + group = parser.add_argument_group('Linker Test Options') + group.add_argument('-f', '--gtest-filter', dest='test_filter', + help='googletest-style filter string.') + AddCommonOptions(parser) + AddDeviceOptions(parser) + + +def AddJavaTestOptions(argument_group): + """Adds the Java test options to |option_parser|.""" + + argument_group.add_argument( + '-f', '--test-filter', '--gtest_filter', '--gtest-filter', + dest='test_filter', + help=('Test filter (if not fully qualified, will run all matches).')) + argument_group.add_argument( + '--repeat', dest='repeat', type=int, default=0, + help='Number of times to repeat the specified set of tests.') + argument_group.add_argument( + '--break-on-failure', '--break_on_failure', + dest='break_on_failure', action='store_true', + help='Whether to break on failure.') + argument_group.add_argument( + '-A', '--annotation', dest='annotation_str', + help=('Comma-separated list of annotations. Run only tests with any of ' + 'the given annotations. An annotation can be either a key or a ' + 'key-values pair. A test that has no annotation is considered ' + '"SmallTest".')) + argument_group.add_argument( + '-E', '--exclude-annotation', dest='exclude_annotation_str', + help=('Comma-separated list of annotations. Exclude tests with these ' + 'annotations.')) + argument_group.add_argument( + '--screenshot-directory', dest='screenshot_dir', + help='Capture screenshots of test failures') + argument_group.add_argument( + '--save-perf-json', action='store_true', + help='Saves the JSON file for each UI Perf test.') + argument_group.add_argument( + '--official-build', action='store_true', help='Run official build tests.') + argument_group.add_argument( + '--disable-dalvik-asserts', dest='set_asserts', action='store_false', + default=True, help='Removes the dalvik.vm.enableassertions property') + + + +def ProcessJavaTestOptions(args): + """Processes options/arguments and populates |options| with defaults.""" + + # TODO(jbudorick): Handle most of this function in argparse. + if args.annotation_str: + args.annotations = args.annotation_str.split(',') + elif args.test_filter: + args.annotations = [] + else: + args.annotations = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest', + 'EnormousTest', 'IntegrationTest'] + + if args.exclude_annotation_str: + args.exclude_annotations = args.exclude_annotation_str.split(',') + else: + args.exclude_annotations = [] + + +def AddInstrumentationTestOptions(parser): + """Adds Instrumentation test options to |parser|.""" + + parser.usage = '%(prog)s [options]' + + group = parser.add_argument_group('Instrumentation Test Options') + AddJavaTestOptions(group) + + java_or_python_group = group.add_mutually_exclusive_group() + java_or_python_group.add_argument( + '-j', '--java-only', action='store_false', + dest='run_python_tests', default=True, help='Run only the Java tests.') + java_or_python_group.add_argument( + '-p', '--python-only', action='store_false', + dest='run_java_tests', default=True, + help='DEPRECATED') + + group.add_argument('--host-driven-root', + help='DEPRECATED') + group.add_argument('-w', '--wait_debugger', dest='wait_for_debugger', + action='store_true', + help='Wait for debugger.') + group.add_argument('--apk-under-test', + help='Path or name of the apk under test.') + group.add_argument('--apk-under-test-incremental-install-script', + help='Path to install script for the --apk-under-test.') + group.add_argument('--test-apk', required=True, + help='Path or name of the apk containing the tests ' + '(name is without the .apk extension; ' + 'e.g. "ContentShellTest").') + group.add_argument('--test-apk-incremental-install-script', + help='Path to install script for the --test-apk.') + group.add_argument('--additional-apk', action='append', + dest='additional_apks', default=[], + help='Additional apk that must be installed on ' + 'the device when the tests are run') + group.add_argument('--coverage-dir', + help=('Directory in which to place all generated ' + 'EMMA coverage files.')) + group.add_argument('--device-flags', dest='device_flags', default='', + help='The relative filepath to a file containing ' + 'command-line flags to set on the device') + group.add_argument('--device-flags-file', default='', + help='The relative filepath to a file containing ' + 'command-line flags to set on the device') + group.add_argument('--isolate_file_path', + '--isolate-file-path', + dest='isolate_file_path', + help='.isolate file path to override the default ' + 'path') + group.add_argument('--delete-stale-data', dest='delete_stale_data', + action='store_true', + help='Delete stale test data on the device.') + group.add_argument('--timeout-scale', type=float, + help='Factor by which timeouts should be scaled.') + group.add_argument('--strict-mode', dest='strict_mode', default='testing', + help='StrictMode command-line flag set on the device, ' + 'death/testing to kill the process, off to stop ' + 'checking, flash to flash only. Default testing.') + group.add_argument('--regenerate-goldens', dest='regenerate_goldens', + action='store_true', + help='Causes the render tests to not fail when a check' + 'fails or the golden image is missing but to render' + 'the view and carry on.') + group.add_argument('--store-tombstones', dest='store_tombstones', + action='store_true', + help='Add tombstones in results if crash.') + + AddCommonOptions(parser) + AddDeviceOptions(parser) + AddRemoteDeviceOptions(parser) + + +def AddJUnitTestOptions(parser): + """Adds junit test options to |parser|.""" + + group = parser.add_argument_group('JUnit Test Options') + group.add_argument( + '-s', '--test-suite', dest='test_suite', required=True, + help=('JUnit test suite to run.')) + group.add_argument( + '-f', '--test-filter', dest='test_filter', + help='Filters tests googletest-style.') + group.add_argument( + '--package-filter', dest='package_filter', + help='Filters tests by package.') + group.add_argument( + '--runner-filter', dest='runner_filter', + help='Filters tests by runner class. Must be fully qualified.') + group.add_argument( + '--sdk-version', dest='sdk_version', type=int, + help='The Android SDK version.') + group.add_argument( + '--coverage-dir', dest='coverage_dir', + help='Directory to store coverage info.') + AddCommonOptions(parser) + + +def AddMonkeyTestOptions(parser): + """Adds monkey test options to |parser|.""" + + group = parser.add_argument_group('Monkey Test Options') + group.add_argument( + '--package', required=True, choices=constants.PACKAGE_INFO.keys(), + metavar='PACKAGE', help='Package under test.') + group.add_argument( + '--event-count', default=10000, type=int, + help='Number of events to generate (default: %(default)s).') + group.add_argument( + '--category', default='', + help='A list of allowed categories.') + group.add_argument( + '--throttle', default=100, type=int, + help='Delay between events (ms) (default: %(default)s). ') + group.add_argument( + '--seed', type=int, + help=('Seed value for pseudo-random generator. Same seed value generates ' + 'the same sequence of events. Seed is randomized by default.')) + group.add_argument( + '--extra-args', default='', + help=('String of other args to pass to the command verbatim.')) + + AddCommonOptions(parser) + AddDeviceOptions(parser) + +def ProcessMonkeyTestOptions(args): + """Processes all monkey test options. + + Args: + args: argparse.Namespace object. + + Returns: + A MonkeyOptions named tuple which contains all options relevant to + monkey tests. + """ + # TODO(jbudorick): Handle this directly in argparse with nargs='+' + category = args.category + if category: + category = args.category.split(',') + + # TODO(jbudorick): Get rid of MonkeyOptions. + return monkey_test_options.MonkeyOptions( + args.verbose_count, + args.package, + args.event_count, + category, + args.throttle, + args.seed, + args.extra_args) + +def AddUirobotTestOptions(parser): + """Adds uirobot test options to |option_parser|.""" + group = parser.add_argument_group('Uirobot Test Options') + + group.add_argument('--app-under-test', required=True, + help='APK to run tests on.') + group.add_argument( + '--repeat', dest='repeat', type=int, default=0, + help='Number of times to repeat the uirobot test.') + group.add_argument( + '--minutes', default=5, type=int, + help='Number of minutes to run uirobot test [default: %(default)s].') + + AddCommonOptions(parser) + AddDeviceOptions(parser) + AddRemoteDeviceOptions(parser) + +def AddPerfTestOptions(parser): + """Adds perf test options to |parser|.""" + + group = parser.add_argument_group('Perf Test Options') + + class SingleStepAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + if values and not namespace.single_step: + parser.error('single step command provided, ' + 'but --single-step not specified.') + elif namespace.single_step and not values: + parser.error('--single-step specified, ' + 'but no single step command provided.') + setattr(namespace, self.dest, values) + + step_group = group.add_mutually_exclusive_group(required=True) + # TODO(jbudorick): Revise --single-step to use argparse.REMAINDER. + # This requires removing "--" from client calls. + step_group.add_argument( + '--single-step', action='store_true', + help='Execute the given command with retries, but only print the result ' + 'for the "most successful" round.') + step_group.add_argument( + '--steps', + help='JSON file containing the list of commands to run.') + step_group.add_argument( + '--print-step', + help='The name of a previously executed perf step to print.') + + group.add_argument( + '--output-json-list', + help='Write a simple list of names from --steps into the given file.') + group.add_argument( + '--collect-chartjson-data', + action='store_true', + help='Cache the chartjson output from each step for later use.') + group.add_argument( + '--output-chartjson-data', + default='', + help='Write out chartjson into the given file.') + # TODO(rnephew): Remove this when everything moves to new option in platform + # mode. + group.add_argument( + '--get-output-dir-archive', metavar='FILENAME', + help='Write the cached output directory archived by a step into the' + ' given ZIP file.') + group.add_argument( + '--output-dir-archive-path', metavar='FILENAME', + help='Write the cached output directory archived by a step into the' + ' given ZIP file.') + group.add_argument( + '--flaky-steps', + help=('A JSON file containing steps that are flaky ' + 'and will have its exit code ignored.')) + group.add_argument( + '--no-timeout', action='store_true', + help=('Do not impose a timeout. Each perf step is responsible for ' + 'implementing the timeout logic.')) + group.add_argument( + '-f', '--test-filter', + help=('Test filter (will match against the names listed in --steps).')) + group.add_argument( + '--dry-run', action='store_true', + help='Just print the steps without executing.') + # Uses 0.1 degrees C because that's what Android does. + group.add_argument( + '--max-battery-temp', type=int, + help='Only start tests when the battery is at or below the given ' + 'temperature (0.1 C)') + group.add_argument( + 'single_step_command', nargs='*', action=SingleStepAction, + help='If --single-step is specified, the command to run.') + group.add_argument( + '--min-battery-level', type=int, + help='Only starts tests when the battery is charged above ' + 'given level.') + group.add_argument('--known-devices-file', help='Path to known device list.') + group.add_argument( + '--repeat', dest='repeat', type=int, default=0, + help='Number of times to repeat the specified set of tests.') + group.add_argument( + '--break-on-failure', '--break_on_failure', dest='break_on_failure', + action='store_true', help='Whether to break on failure.') + group.add_argument( + '--write-buildbot-json', action='store_true', + help='Whether to output buildbot json.') + AddCommonOptions(parser) + AddDeviceOptions(parser) + + +def AddPythonTestOptions(parser): + group = parser.add_argument_group('Python Test Options') + group.add_argument( + '-s', '--suite', dest='suite_name', metavar='SUITE_NAME', + choices=constants.PYTHON_UNIT_TEST_SUITES.keys(), + help='Name of the test suite to run.') + AddCommonOptions(parser) + + +def _RunLinkerTests(args, devices): + """Subcommand of RunTestsCommands which runs linker tests.""" + runner_factory, tests = linker_setup.Setup(args, devices) + + results, exit_code = test_dispatcher.RunTests( + tests, runner_factory, devices, shard=True, test_timeout=60, + num_retries=args.num_retries) + + report_results.LogFull( + results=results, + test_type='Linker test', + test_package='ChromiumLinkerTest') + + if args.json_results_file: + json_results.GenerateJsonResultsFile([results], args.json_results_file) + + return exit_code + + +def _RunJUnitTests(args): + """Subcommand of RunTestsCommand which runs junit tests.""" + runner_factory, tests = junit_setup.Setup(args) + results, exit_code = junit_dispatcher.RunTests(tests, runner_factory) + + report_results.LogFull( + results=results, + test_type='JUnit', + test_package=args.test_suite) + + if args.json_results_file: + json_results.GenerateJsonResultsFile([results], args.json_results_file) + + return exit_code + + +def _RunMonkeyTests(args, devices): + """Subcommand of RunTestsCommands which runs monkey tests.""" + monkey_options = ProcessMonkeyTestOptions(args) + + runner_factory, tests = monkey_setup.Setup(monkey_options) + + results, exit_code = test_dispatcher.RunTests( + tests, runner_factory, devices, shard=False, test_timeout=None, + num_retries=args.num_retries) + + report_results.LogFull( + results=results, + test_type='Monkey', + test_package='Monkey') + + if args.json_results_file: + json_results.GenerateJsonResultsFile([results], args.json_results_file) + + return exit_code + + +def _RunPythonTests(args): + """Subcommand of RunTestsCommand which runs python unit tests.""" + suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name] + suite_path = suite_vars['path'] + suite_test_modules = suite_vars['test_modules'] + + sys.path = [suite_path] + sys.path + try: + suite = unittest.TestSuite() + suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m) + for m in suite_test_modules) + runner = unittest.TextTestRunner(verbosity=1+args.verbose_count) + return 0 if runner.run(suite).wasSuccessful() else 1 + finally: + sys.path = sys.path[1:] + + +def _GetAttachedDevices(blacklist_file, test_device, enable_cache, num_retries): + """Get all attached devices. + + Args: + blacklist_file: Path to device blacklist. + test_device: Name of a specific device to use. + enable_cache: Whether to enable checksum caching. + + Returns: + A list of attached devices. + """ + blacklist = (device_blacklist.Blacklist(blacklist_file) + if blacklist_file + else None) + + attached_devices = device_utils.DeviceUtils.HealthyDevices( + blacklist, enable_device_files_cache=enable_cache, + default_retries=num_retries) + if test_device: + test_device = [d for d in attached_devices if d == test_device] + if not test_device: + raise device_errors.DeviceUnreachableError( + 'Did not find device %s among attached device. Attached devices: %s' + % (test_device, ', '.join(attached_devices))) + return test_device + + else: + if not attached_devices: + raise device_errors.NoDevicesError() + return sorted(attached_devices) + + +_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation', 'perf'] + + +def RunTestsCommand(args): # pylint: disable=too-many-return-statements + """Checks test type and dispatches to the appropriate function. + + Args: + args: argparse.Namespace object. + + Returns: + Integer indicated exit code. + + Raises: + Exception: Unknown command name passed in, or an exception from an + individual test runner. + """ + command = args.command + + ProcessCommonOptions(args) + logging.info('command: %s', ' '.join(sys.argv)) + if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS: + return RunTestsInPlatformMode(args) + + forwarder.Forwarder.RemoveHostLog() + if not ports.ResetTestServerPortAllocation(): + raise Exception('Failed to reset test server port.') + + # pylint: disable=protected-access + if os.path.exists(ports._TEST_SERVER_PORT_LOCKFILE): + os.unlink(ports._TEST_SERVER_PORT_LOCKFILE) + # pylint: enable=protected-access + + def get_devices(): + return _GetAttachedDevices(args.blacklist_file, args.test_device, + args.enable_device_cache, args.num_retries) + + if command == 'linker': + return _RunLinkerTests(args, get_devices()) + elif command == 'junit': + return _RunJUnitTests(args) + elif command == 'monkey': + return _RunMonkeyTests(args, get_devices()) + elif command == 'python': + return _RunPythonTests(args) + else: + raise Exception('Unknown test type.') + + +_SUPPORTED_IN_PLATFORM_MODE = [ + # TODO(jbudorick): Add support for more test types. + 'gtest', + 'instrumentation', + 'perf', + 'uirobot', +] + + +def RunTestsInPlatformMode(args): + + def infra_error(message): + logging.fatal(message) + sys.exit(constants.INFRA_EXIT_CODE) + + if args.command not in _SUPPORTED_IN_PLATFORM_MODE: + infra_error('%s is not yet supported in platform mode' % args.command) + + with environment_factory.CreateEnvironment(args, infra_error) as env: + with test_instance_factory.CreateTestInstance(args, infra_error) as test: + with test_run_factory.CreateTestRun( + args, env, test, infra_error) as test_run: + + # TODO(jbudorick): Rewrite results handling. + + # all_raw_results is a list of lists of base_test_result.TestRunResults + # objects. Each instance of TestRunResults contains all test results + # produced by a single try, while each list of TestRunResults contains + # all tries in a single iteration. + all_raw_results = [] + # all_iteration_results is a list of base_test_result.TestRunResults + # objects. Each instance of TestRunResults contains the last test result + # for each test run in that iteration. + all_iteration_results = [] + + repetitions = (xrange(args.repeat + 1) if args.repeat >= 0 + else itertools.count()) + result_counts = collections.defaultdict( + lambda: collections.defaultdict(int)) + iteration_count = 0 + for _ in repetitions: + raw_results = test_run.RunTests() + if not raw_results: + continue + + all_raw_results.append(raw_results) + + iteration_results = base_test_result.TestRunResults() + for r in reversed(raw_results): + iteration_results.AddTestRunResults(r) + all_iteration_results.append(iteration_results) + + iteration_count += 1 + for r in iteration_results.GetAll(): + result_counts[r.GetName()][r.GetType()] += 1 + report_results.LogFull( + results=iteration_results, + test_type=test.TestType(), + test_package=test_run.TestPackage(), + annotation=getattr(args, 'annotations', None), + flakiness_server=getattr(args, 'flakiness_dashboard_server', + None)) + if args.break_on_failure and not iteration_results.DidRunPass(): + break + + if iteration_count > 1: + # display summary results + # only display results for a test if at least one test did not pass + all_pass = 0 + tot_tests = 0 + for test_name in result_counts: + tot_tests += 1 + if any(result_counts[test_name][x] for x in ( + base_test_result.ResultType.FAIL, + base_test_result.ResultType.CRASH, + base_test_result.ResultType.TIMEOUT, + base_test_result.ResultType.UNKNOWN)): + logging.critical( + '%s: %s', + test_name, + ', '.join('%s %s' % (str(result_counts[test_name][i]), i) + for i in base_test_result.ResultType.GetTypes())) + else: + all_pass += 1 + + logging.critical('%s of %s tests passed in all %s runs', + str(all_pass), + str(tot_tests), + str(iteration_count)) + + if args.json_results_file: + json_results.GenerateJsonResultsFile( + all_raw_results, args.json_results_file) + + if args.command == 'perf' and (args.steps or args.single_step): + return 0 + + return (0 if all(r.DidRunPass() for r in all_iteration_results) + else constants.ERROR_EXIT_CODE) + + +CommandConfigTuple = collections.namedtuple( + 'CommandConfigTuple', + ['add_options_func', 'help_txt']) +VALID_COMMANDS = { + 'gtest': CommandConfigTuple( + AddGTestOptions, + 'googletest-based C++ tests'), + 'instrumentation': CommandConfigTuple( + AddInstrumentationTestOptions, + 'InstrumentationTestCase-based Java tests'), + 'junit': CommandConfigTuple( + AddJUnitTestOptions, + 'JUnit4-based Java tests'), + 'monkey': CommandConfigTuple( + AddMonkeyTestOptions, + "Tests based on Android's monkey"), + 'perf': CommandConfigTuple( + AddPerfTestOptions, + 'Performance tests'), + 'python': CommandConfigTuple( + AddPythonTestOptions, + 'Python tests based on unittest.TestCase'), + 'linker': CommandConfigTuple( + AddLinkerTestOptions, + 'Linker tests'), + 'uirobot': CommandConfigTuple( + AddUirobotTestOptions, + 'Uirobot test'), +} + + +def DumpThreadStacks(_signal, _frame): + for thread in threading.enumerate(): + reraiser_thread.LogThreadStack(thread) + + +def main(): + signal.signal(signal.SIGUSR1, DumpThreadStacks) + + parser = argparse.ArgumentParser() + command_parsers = parser.add_subparsers(title='test types', + dest='command') + + for test_type, config in sorted(VALID_COMMANDS.iteritems(), + key=lambda x: x[0]): + subparser = command_parsers.add_parser( + test_type, usage='%(prog)s [options]', help=config.help_txt) + config.add_options_func(subparser) + + args = parser.parse_args() + + try: + return RunTestsCommand(args) + except base_error.BaseError as e: + logging.exception('Error occurred.') + if e.is_infra_error: + return constants.INFRA_EXIT_CODE + return constants.ERROR_EXIT_CODE + except: # pylint: disable=W0702 + logging.exception('Unrecognized error occurred.') + return constants.ERROR_EXIT_CODE + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/test_runner.pydeps b/build/android/test_runner.pydeps new file mode 100644 index 00000000000..bb3c86827ce --- /dev/null +++ b/build/android/test_runner.pydeps @@ -0,0 +1,143 @@ +# Generated by running: +# build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py +../../third_party/appurify-python/src/appurify/__init__.py +../../third_party/appurify-python/src/appurify/api.py +../../third_party/appurify-python/src/appurify/constants.py +../../third_party/appurify-python/src/appurify/utils.py +../../third_party/catapult/catapult_base/catapult_base/__init__.py +../../third_party/catapult/catapult_base/catapult_base/cloud_storage.py +../../third_party/catapult/catapult_base/catapult_base/util.py +../../third_party/catapult/dependency_manager/dependency_manager/__init__.py +../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py +../../third_party/catapult/dependency_manager/dependency_manager/base_config.py +../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py +../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py +../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py +../../third_party/catapult/dependency_manager/dependency_manager/manager.py +../../third_party/catapult/dependency_manager/dependency_manager/uploader.py +../../third_party/catapult/devil/devil/__init__.py +../../third_party/catapult/devil/devil/android/__init__.py +../../third_party/catapult/devil/devil/android/apk_helper.py +../../third_party/catapult/devil/devil/android/battery_utils.py +../../third_party/catapult/devil/devil/android/constants/__init__.py +../../third_party/catapult/devil/devil/android/constants/chrome.py +../../third_party/catapult/devil/devil/android/constants/file_system.py +../../third_party/catapult/devil/devil/android/decorators.py +../../third_party/catapult/devil/devil/android/device_blacklist.py +../../third_party/catapult/devil/devil/android/device_errors.py +../../third_party/catapult/devil/devil/android/device_list.py +../../third_party/catapult/devil/devil/android/device_signal.py +../../third_party/catapult/devil/devil/android/device_temp_file.py +../../third_party/catapult/devil/devil/android/device_utils.py +../../third_party/catapult/devil/devil/android/flag_changer.py +../../third_party/catapult/devil/devil/android/forwarder.py +../../third_party/catapult/devil/devil/android/install_commands.py +../../third_party/catapult/devil/devil/android/logcat_monitor.py +../../third_party/catapult/devil/devil/android/md5sum.py +../../third_party/catapult/devil/devil/android/ports.py +../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../third_party/catapult/devil/devil/android/sdk/aapt.py +../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py +../../third_party/catapult/devil/devil/android/sdk/build_tools.py +../../third_party/catapult/devil/devil/android/sdk/gce_adb_wrapper.py +../../third_party/catapult/devil/devil/android/sdk/intent.py +../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../third_party/catapult/devil/devil/android/sdk/split_select.py +../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../third_party/catapult/devil/devil/android/tools/__init__.py +../../third_party/catapult/devil/devil/android/tools/device_recovery.py +../../third_party/catapult/devil/devil/android/tools/device_status.py +../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py +../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py +../../third_party/catapult/devil/devil/base_error.py +../../third_party/catapult/devil/devil/constants/__init__.py +../../third_party/catapult/devil/devil/constants/exit_codes.py +../../third_party/catapult/devil/devil/devil_env.py +../../third_party/catapult/devil/devil/utils/__init__.py +../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../third_party/catapult/devil/devil/utils/file_utils.py +../../third_party/catapult/devil/devil/utils/host_utils.py +../../third_party/catapult/devil/devil/utils/lazy/__init__.py +../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py +../../third_party/catapult/devil/devil/utils/lsusb.py +../../third_party/catapult/devil/devil/utils/parallelizer.py +../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../third_party/catapult/devil/devil/utils/reset_usb.py +../../third_party/catapult/devil/devil/utils/run_tests_helper.py +../../third_party/catapult/devil/devil/utils/signal_handler.py +../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../third_party/catapult/devil/devil/utils/watchdog_timer.py +../../third_party/catapult/devil/devil/utils/zip_utils.py +../util/lib/common/unittest_util.py +devil_chromium.py +pylib/__init__.py +pylib/base/__init__.py +pylib/base/base_test_result.py +pylib/base/base_test_runner.py +pylib/base/environment.py +pylib/base/environment_factory.py +pylib/base/test_collection.py +pylib/base/test_dispatcher.py +pylib/base/test_instance.py +pylib/base/test_instance_factory.py +pylib/base/test_run.py +pylib/base/test_run_factory.py +pylib/base/test_server.py +pylib/chrome_test_server_spawner.py +pylib/constants/__init__.py +pylib/constants/host_paths.py +pylib/gtest/__init__.py +pylib/gtest/gtest_test_instance.py +pylib/instrumentation/__init__.py +pylib/instrumentation/instrumentation_parser.py +pylib/instrumentation/instrumentation_test_instance.py +pylib/instrumentation/test_result.py +pylib/junit/__init__.py +pylib/junit/setup.py +pylib/junit/test_dispatcher.py +pylib/junit/test_runner.py +pylib/linker/__init__.py +pylib/linker/setup.py +pylib/linker/test_case.py +pylib/linker/test_runner.py +pylib/local/__init__.py +pylib/local/device/__init__.py +pylib/local/device/local_device_environment.py +pylib/local/device/local_device_gtest_run.py +pylib/local/device/local_device_instrumentation_test_run.py +pylib/local/device/local_device_perf_test_run.py +pylib/local/device/local_device_test_run.py +pylib/local/local_test_server_spawner.py +pylib/monkey/__init__.py +pylib/monkey/setup.py +pylib/monkey/test_options.py +pylib/monkey/test_runner.py +pylib/perf/__init__.py +pylib/perf/perf_test_instance.py +pylib/remote/__init__.py +pylib/remote/device/__init__.py +pylib/remote/device/appurify_constants.py +pylib/remote/device/appurify_sanitized.py +pylib/remote/device/remote_device_environment.py +pylib/remote/device/remote_device_gtest_run.py +pylib/remote/device/remote_device_helper.py +pylib/remote/device/remote_device_instrumentation_test_run.py +pylib/remote/device/remote_device_test_run.py +pylib/remote/device/remote_device_uirobot_test_run.py +pylib/results/__init__.py +pylib/results/flakiness_dashboard/__init__.py +pylib/results/flakiness_dashboard/json_results_generator.py +pylib/results/flakiness_dashboard/results_uploader.py +pylib/results/json_results.py +pylib/results/report_results.py +pylib/uirobot/__init__.py +pylib/uirobot/uirobot_test_instance.py +pylib/utils/__init__.py +pylib/utils/isolator.py +pylib/utils/proguard.py +pylib/utils/repo_utils.py +pylib/valgrind_tools.py +test_runner.py +tombstones.py diff --git a/build/android/test_wrapper/logdog_wrapper.py b/build/android/test_wrapper/logdog_wrapper.py new file mode 100644 index 00000000000..6daea499d43 --- /dev/null +++ b/build/android/test_wrapper/logdog_wrapper.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wrapper for adding logdog streaming support to swarming tasks.""" + +import argparse +import logging +import os +import subprocess +import sys +import urllib + + +def CommandParser(): + # Parses the command line arguments being passed in + parser = argparse.ArgumentParser() + parser.add_argument('--logdog-bin-cmd', required=True, + help='Command for running logdog butler binary') + parser.add_argument('--project', required=True, + help='Name of logdog project') + parser.add_argument('--logdog-server', + default='services-dot-luci-logdog.appspot.com', + help='URL of logdog server, https:// is assumed.') + parser.add_argument('--service-account-json', required=True, + help='Location of authentication json') + parser.add_argument('--prefix', required=True, + help='Prefix to be used for logdog stream') + parser.add_argument('--source', required=True, + help='Location of file for logdog to stream') + parser.add_argument('--name', required=True, + help='Name to be used for logdog stream') + return parser + + +def CreateUrl(server, project, prefix, name): + stream_name = '%s/%s/+/%s' % (project, prefix, name) + return 'https://%s/v/?s=%s' % (server, urllib.quote_plus(stream_name)) + + +def main(): + parser = CommandParser() + args, test_cmd = parser.parse_known_args(sys.argv[1:]) + logging.basicConfig(level=logging.INFO) + if not test_cmd: + parser.error('Must specify command to run after the logdog flags') + result = subprocess.call(test_cmd) + if '${SWARMING_TASK_ID}' in args.prefix: + args.prefix = args.prefix.replace('${SWARMING_TASK_ID}', + os.environ.get('SWARMING_TASK_ID')) + url = CreateUrl('luci-logdog.appspot.com', args.project, args.prefix, + args.name) + logdog_cmd = [args.logdog_bin_cmd, '-project', args.project, + '-output', 'logdog,host=%s' % args.logdog_server, + '-prefix', args.prefix, + '-service-account-json', args.service_account_json, + 'stream', '-source', args.source, + '-stream', '-name=%s' % args.name] + if os.path.exists(args.logdog_bin_cmd): + subprocess.call(logdog_cmd) + logging.info('Logcats are located at: %s', url) + return result + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/tests/symbolize/Makefile b/build/android/tests/symbolize/Makefile new file mode 100644 index 00000000000..5178a04bb74 --- /dev/null +++ b/build/android/tests/symbolize/Makefile @@ -0,0 +1,11 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +TOOLCHAIN=../../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi- +CXX=$(TOOLCHAIN)g++ + +lib%.so: %.cc + $(CXX) -nostdlib -g -fPIC -shared $< -o $@ + +all: liba.so libb.so diff --git a/build/android/tests/symbolize/a.cc b/build/android/tests/symbolize/a.cc new file mode 100644 index 00000000000..f0c7ca4c67f --- /dev/null +++ b/build/android/tests/symbolize/a.cc @@ -0,0 +1,14 @@ +// Copyright 2013 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +class A { + public: + A(); + void Foo(int i); + void Bar(const char* c); +}; + +A::A() {} +void A::Foo(int i) {} +void A::Bar(const char* c) {} diff --git a/build/android/tests/symbolize/b.cc b/build/android/tests/symbolize/b.cc new file mode 100644 index 00000000000..db8752099aa --- /dev/null +++ b/build/android/tests/symbolize/b.cc @@ -0,0 +1,14 @@ +// Copyright 2013 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +class B { + public: + B(); + void Baz(float f); + void Qux(double d); +}; + +B::B() {} +void B::Baz(float f) {} +void B::Qux(double d) {} diff --git a/build/android/tests/symbolize/liba.so b/build/android/tests/symbolize/liba.so new file mode 100644 index 00000000000..79cb7391212 Binary files /dev/null and b/build/android/tests/symbolize/liba.so differ diff --git a/build/android/tests/symbolize/libb.so b/build/android/tests/symbolize/libb.so new file mode 100644 index 00000000000..7cf01d43c58 Binary files /dev/null and b/build/android/tests/symbolize/libb.so differ diff --git a/build/android/tombstones.py b/build/android/tombstones.py new file mode 100644 index 00000000000..cba1e5a9f51 --- /dev/null +++ b/build/android/tombstones.py @@ -0,0 +1,304 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Find the most recent tombstone file(s) on all connected devices +# and prints their stacks. +# +# Assumes tombstone file was created with current symbols. + +import argparse +import datetime +import logging +import multiprocessing +import os +import re +import subprocess +import sys + +import devil_chromium + +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_utils +from devil.utils import run_tests_helper +from pylib import constants + + +_TZ_UTC = {'TZ': 'UTC'} + + +def _ListTombstones(device): + """List the tombstone files on the device. + + Args: + device: An instance of DeviceUtils. + + Yields: + Tuples of (tombstone filename, date time of file on device). + """ + try: + if not device.PathExists('/data/tombstones', as_root=True): + return + entries = device.StatDirectory('/data/tombstones', as_root=True) + for entry in entries: + if 'tombstone' in entry['filename']: + yield (entry['filename'], + datetime.datetime.fromtimestamp(entry['st_mtime'])) + except device_errors.CommandFailedError: + logging.exception('Could not retrieve tombstones.') + except device_errors.CommandTimeoutError: + logging.exception('Timed out retrieving tombstones.') + + +def _GetDeviceDateTime(device): + """Determine the date time on the device. + + Args: + device: An instance of DeviceUtils. + + Returns: + A datetime instance. + """ + device_now_string = device.RunShellCommand( + ['date'], check_return=True, env=_TZ_UTC) + return datetime.datetime.strptime( + device_now_string[0], '%a %b %d %H:%M:%S %Z %Y') + + +def _GetTombstoneData(device, tombstone_file): + """Retrieve the tombstone data from the device + + Args: + device: An instance of DeviceUtils. + tombstone_file: the tombstone to retrieve + + Returns: + A list of lines + """ + return device.ReadFile( + '/data/tombstones/' + tombstone_file, as_root=True).splitlines() + + +def _EraseTombstone(device, tombstone_file): + """Deletes a tombstone from the device. + + Args: + device: An instance of DeviceUtils. + tombstone_file: the tombstone to delete. + """ + return device.RunShellCommand( + ['rm', '/data/tombstones/' + tombstone_file], + as_root=True, check_return=True) + + +def _DeviceAbiToArch(device_abi): + # The order of this list is significant to find the more specific match (e.g., + # arm64) before the less specific (e.g., arm). + arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips'] + for arch in arches: + if arch in device_abi: + return arch + raise RuntimeError('Unknown device ABI: %s' % device_abi) + +def _ResolveSymbols(tombstone_data, include_stack, device_abi): + """Run the stack tool for given tombstone input. + + Args: + tombstone_data: a list of strings of tombstone data. + include_stack: boolean whether to include stack data in output. + device_abi: the default ABI of the device which generated the tombstone. + + Yields: + A string for each line of resolved stack output. + """ + # Check if the tombstone data has an ABI listed, if so use this in preference + # to the device's default ABI. + for line in tombstone_data: + found_abi = re.search('ABI: \'(.+?)\'', line) + if found_abi: + device_abi = found_abi.group(1) + arch = _DeviceAbiToArch(device_abi) + if not arch: + return + + stack_tool = os.path.join(os.path.dirname(__file__), '..', '..', + 'third_party', 'android_platform', 'development', + 'scripts', 'stack') + cmd = [stack_tool, '--arch', arch, '--output-directory', + constants.GetOutDirectory()] + proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + output = proc.communicate(input='\n'.join(tombstone_data))[0] + for line in output.split('\n'): + if not include_stack and 'Stack Data:' in line: + break + yield line + + +def _ResolveTombstone(tombstone): + lines = [] + lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) + + ', about this long ago: ' + + (str(tombstone['device_now'] - tombstone['time']) + + ' Device: ' + tombstone['serial'])] + logging.info('\n'.join(lines)) + logging.info('Resolving...') + lines += _ResolveSymbols(tombstone['data'], tombstone['stack'], + tombstone['device_abi']) + return lines + + +def _ResolveTombstones(jobs, tombstones): + """Resolve a list of tombstones. + + Args: + jobs: the number of jobs to use with multiprocess. + tombstones: a list of tombstones. + """ + if not tombstones: + logging.warning('No tombstones to resolve.') + return [] + if len(tombstones) == 1: + data = [_ResolveTombstone(tombstones[0])] + else: + pool = multiprocessing.Pool(processes=jobs) + data = pool.map(_ResolveTombstone, tombstones) + resolved_tombstones = [] + for tombstone in data: + resolved_tombstones.extend(tombstone) + return resolved_tombstones + +def _GetTombstonesForDevice(device, resolve_all_tombstones, + include_stack_symbols, + wipe_tombstones): + """Returns a list of tombstones on a given device. + + Args: + device: An instance of DeviceUtils. + resolve_all_tombstone: Whether to resolve every tombstone. + include_stack_symbols: Whether to include symbols for stack data. + wipe_tombstones: Whether to wipe tombstones. + """ + ret = [] + all_tombstones = list(_ListTombstones(device)) + if not all_tombstones: + logging.warning('No tombstones.') + return ret + + # Sort the tombstones in date order, descending + all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1])) + + # Only resolve the most recent unless --all-tombstones given. + tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]] + + device_now = _GetDeviceDateTime(device) + try: + for tombstone_file, tombstone_time in tombstones: + ret += [{'serial': str(device), + 'device_abi': device.product_cpu_abi, + 'device_now': device_now, + 'time': tombstone_time, + 'file': tombstone_file, + 'stack': include_stack_symbols, + 'data': _GetTombstoneData(device, tombstone_file)}] + except device_errors.CommandFailedError: + for entry in device.StatDirectory( + '/data/tombstones', as_root=True, timeout=60): + logging.info('%s: %s', str(device), entry) + raise + + # Erase all the tombstones if desired. + if wipe_tombstones: + for tombstone_file, _ in all_tombstones: + _EraseTombstone(device, tombstone_file) + + return ret + +def ClearAllTombstones(device): + """Clear all tombstones in the device. + + Args: + device: An instance of DeviceUtils. + """ + all_tombstones = list(_ListTombstones(device)) + if not all_tombstones: + logging.warning('No tombstones to clear.') + + for tombstone_file, _ in all_tombstones: + _EraseTombstone(device, tombstone_file) + +def ResolveTombstones(device, resolve_all_tombstones, include_stack_symbols, + wipe_tombstones, jobs=4): + """Resolve tombstones in the device. + + Args: + device: An instance of DeviceUtils. + resolve_all_tombstone: Whether to resolve every tombstone. + include_stack_symbols: Whether to include symbols for stack data. + wipe_tombstones: Whether to wipe tombstones. + jobs: Number of jobs to use when processing multiple crash stacks. + """ + return _ResolveTombstones(jobs, + _GetTombstonesForDevice(device, + resolve_all_tombstones, + include_stack_symbols, + wipe_tombstones)) + +def main(): + custom_handler = logging.StreamHandler(sys.stdout) + custom_handler.setFormatter(run_tests_helper.CustomFormatter()) + logging.getLogger().addHandler(custom_handler) + logging.getLogger().setLevel(logging.INFO) + + parser = argparse.ArgumentParser() + parser.add_argument('--device', + help='The serial number of the device. If not specified ' + 'will use all devices.') + parser.add_argument('--blacklist-file', help='Device blacklist JSON file.') + parser.add_argument('-a', '--all-tombstones', action='store_true', + help='Resolve symbols for all tombstones, rather than ' + 'just the most recent.') + parser.add_argument('-s', '--stack', action='store_true', + help='Also include symbols for stack data') + parser.add_argument('-w', '--wipe-tombstones', action='store_true', + help='Erase all tombstones from device after processing') + parser.add_argument('-j', '--jobs', type=int, + default=4, + help='Number of jobs to use when processing multiple ' + 'crash stacks.') + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('--adb-path', type=os.path.abspath, + help='Path to the adb binary.') + args = parser.parse_args() + + devil_chromium.Initialize(adb_path=args.adb_path) + + blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + # Do an up-front test that the output directory is known. + constants.CheckOutputDirectory() + + if args.device: + devices = [device_utils.DeviceUtils(args.device)] + else: + devices = device_utils.DeviceUtils.HealthyDevices(blacklist) + + # This must be done serially because strptime can hit a race condition if + # used for the first time in a multithreaded environment. + # http://bugs.python.org/issue7980 + for device in devices: + resolved_tombstones = ResolveTombstones( + device, args.all_tombstones, + args.stack, args.wipe_tombstones, args.jobs) + for line in resolved_tombstones: + logging.info(line) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/update_deps/update_third_party_deps.py b/build/android/update_deps/update_third_party_deps.py new file mode 100644 index 00000000000..612be8943bf --- /dev/null +++ b/build/android/update_deps/update_third_party_deps.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Uploads or downloads third party libraries to or from google cloud storage. +""" + +import argparse +import logging +import os +import sys + + +sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir))) +from pylib import constants +from pylib.constants import host_paths + +sys.path.append(os.path.abspath( + os.path.join(host_paths.DIR_SOURCE_ROOT, 'build'))) +import find_depot_tools # pylint: disable=import-error,unused-import +import download_from_google_storage +import upload_to_google_storage + + +def _AddBasicArguments(parser): + parser.add_argument( + '--sdk-root', default=constants.ANDROID_SDK_ROOT, + help='base path to the Android SDK root') + parser.add_argument( + '-v', '--verbose', action='store_true', help='print debug information') + parser.add_argument( + '-b', '--bucket-path', required=True, + help='The path of the lib file in Google Cloud Storage.') + parser.add_argument( + '-l', '--local-path', required=True, + help='The base path of the third_party directory') + + +def _CheckPaths(bucket_path, local_path): + if bucket_path.startswith('gs://'): + bucket_url = bucket_path + else: + bucket_url = 'gs://%s' % bucket_path + local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path) + if not os.path.isdir(local_path): + raise IOError( + 'The library local path is not a valid directory: %s' % local_path) + return bucket_url, local_path + + +def _CheckFileList(local_path, file_list): + local_path = os.path.abspath(local_path) + abs_path_list = [os.path.abspath(f) for f in file_list] + for f in abs_path_list: + if os.path.commonprefix([f, local_path]) != local_path: + raise IOError( + '%s in the arguments are not decendants of the specified directory %s' + % (f, local_path)) + return abs_path_list + + +def Upload(arguments): + """Upload files in a third_party directory to google storage""" + bucket_url, local_path = _CheckPaths(arguments.bucket_path, + arguments.local_path) + file_list = _CheckFileList(local_path, arguments.file_list) + upload_to_google_storage.upload_to_google_storage( + input_filenames=file_list, + base_url=bucket_url, + gsutil=arguments.gsutil, + force=False, + use_md5=False, + num_threads=1, + skip_hashing=False, + gzip=None) + + +def Download(arguments): + """Download files based on sha1 files in a third_party dir from gcs""" + bucket_url, local_path = _CheckPaths(arguments.bucket_path, + arguments.local_path) + download_from_google_storage.download_from_google_storage( + local_path, + bucket_url, + gsutil=arguments.gsutil, + num_threads=1, + directory=True, + recursive=True, + force=False, + output=None, + ignore_errors=False, + sha1_file=None, + verbose=arguments.verbose, + auto_platform=False, + extract=False) + + +def main(argv): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(title='commands') + download_parser = subparsers.add_parser( + 'download', help='download the library from the cloud storage') + _AddBasicArguments(download_parser) + download_parser.set_defaults(func=Download) + + upload_parser = subparsers.add_parser( + 'upload', help='find all jar files in a third_party directory and ' + + 'upload them to cloud storage') + _AddBasicArguments(upload_parser) + upload_parser.set_defaults(func=Upload) + upload_parser.add_argument( + '-f', '--file-list', nargs='+', required=True, + help='A list of base paths for files in third_party to upload.') + + arguments = parser.parse_args(argv) + if not os.path.isdir(arguments.sdk_root): + logging.debug('Did not find the Android SDK root directory at "%s".', + arguments.sdk_root) + logging.info('Skipping, not on an android checkout.') + return 0 + + arguments.gsutil = download_from_google_storage.Gsutil( + download_from_google_storage.GSUTIL_DEFAULT_PATH) + return arguments.func(arguments) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/update_verification.py b/build/android/update_verification.py new file mode 100644 index 00000000000..40cb64ac5da --- /dev/null +++ b/build/android/update_verification.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs semi-automated update testing on a non-rooted device. + +This script will help verify that app data is preserved during an update. +To use this script first run it with the create_app_data option. + +./update_verification.py create_app_data --old-apk --app-data + +The script will then install the old apk, prompt you to create some app data +(bookmarks, etc.), and then save the app data in the path you gave it. + +Next, once you have some app data saved, run this script with the test_update +option. + +./update_verification.py test_update --old-apk --new-apk +--app-data + +This will install the old apk, load the saved app data, install the new apk, +and ask the user to verify that all of the app data was preserved. +""" + +import argparse +import logging +import sys + +import devil_chromium + +from devil.android import apk_helper +from devil.android import device_blacklist +from devil.android import device_errors +from devil.android import device_utils +from devil.utils import run_tests_helper + +def CreateAppData(device, old_apk, app_data, package_name): + device.Install(old_apk) + raw_input('Set the application state. Once ready, press enter and ' + 'select "Backup my data" on the device.') + device.adb.Backup(app_data, packages=[package_name]) + logging.critical('Application data saved to %s', app_data) + +def TestUpdate(device, old_apk, new_apk, app_data, package_name): + device.Install(old_apk) + device.adb.Restore(app_data) + # Restore command is not synchronous + raw_input('Select "Restore my data" on the device. Then press enter to ' + 'continue.') + device_path = device.GetApplicationPaths(package_name) + if not device_path: + raise Exception('Expected package %s to already be installed. ' + 'Package name might have changed!' % package_name) + + logging.info('Verifying that %s can be overinstalled.', new_apk) + device.adb.Install(new_apk, reinstall=True) + logging.critical('Successfully updated to the new apk. Please verify that ' + 'the application data is preserved.') + +def main(): + parser = argparse.ArgumentParser( + description="Script to do semi-automated upgrade testing.") + parser.add_argument('-v', '--verbose', action='count', + help='Print verbose log information.') + parser.add_argument('--blacklist-file', help='Device blacklist JSON file.') + command_parsers = parser.add_subparsers(dest='command') + + subparser = command_parsers.add_parser('create_app_data') + subparser.add_argument('--old-apk', required=True, + help='Path to apk to update from.') + subparser.add_argument('--app-data', required=True, + help='Path to where the app data backup should be ' + 'saved to.') + subparser.add_argument('--package-name', + help='Chrome apk package name.') + + subparser = command_parsers.add_parser('test_update') + subparser.add_argument('--old-apk', required=True, + help='Path to apk to update from.') + subparser.add_argument('--new-apk', required=True, + help='Path to apk to update to.') + subparser.add_argument('--app-data', required=True, + help='Path to where the app data backup is saved.') + subparser.add_argument('--package-name', + help='Chrome apk package name.') + + args = parser.parse_args() + run_tests_helper.SetLogLevel(args.verbose) + + devil_chromium.Initialize() + + blacklist = (device_blacklist.Blacklist(args.blacklist_file) + if args.blacklist_file + else None) + + devices = device_utils.DeviceUtils.HealthyDevices(blacklist) + if not devices: + raise device_errors.NoDevicesError() + device = devices[0] + logging.info('Using device %s for testing.', str(device)) + + package_name = (args.package_name if args.package_name + else apk_helper.GetPackageName(args.old_apk)) + if args.command == 'create_app_data': + CreateAppData(device, args.old_apk, args.app_data, package_name) + elif args.command == 'test_update': + TestUpdate( + device, args.old_apk, args.new_apk, args.app_data, package_name) + else: + raise Exception('Unknown test command: %s' % args.command) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/v8_external_startup_data_arch_suffix.gypi b/build/android/v8_external_startup_data_arch_suffix.gypi new file mode 100644 index 00000000000..7af2443fc1a --- /dev/null +++ b/build/android/v8_external_startup_data_arch_suffix.gypi @@ -0,0 +1,21 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + 'arch_suffix': '<(arch_suffix)', + 'variables': { + # This help to find out if target_arch is set to something else. + 'arch_suffix': '<(target_arch)', + 'conditions': [ + ['target_arch=="arm" or target_arch=="ia32" or target_arch=="mipsel"', { + 'arch_suffix': '32', + }], + ['target_arch=="arm64" or target_arch=="x64" or target_arch=="mips64el"', { + 'arch_suffix':'64' + }], + ], + } + } +} diff --git a/build/android/video_recorder.py b/build/android/video_recorder.py new file mode 100644 index 00000000000..b21759a35a9 --- /dev/null +++ b/build/android/video_recorder.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys + +import devil_chromium +from devil.android.tools import video_recorder + +if __name__ == '__main__': + devil_chromium.Initialize() + sys.exit(video_recorder.main()) diff --git a/build/android/write_ordered_libraries.gypi b/build/android/write_ordered_libraries.gypi new file mode 100644 index 00000000000..1b52e71e423 --- /dev/null +++ b/build/android/write_ordered_libraries.gypi @@ -0,0 +1,43 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# generates a json file with the list of dependent libraries needed for a given +# shared library or executable. +# +# To use this, create a gyp target with the following form: +# { +# 'actions': [ +# 'variables': { +# 'input_libraries': 'shared library or executable to process', +# 'ordered_libraries_file': 'file to generate' +# }, +# 'includes': [ '../../build/android/write_ordered_libraries.gypi' ], +# ], +# }, +# + +{ + 'action_name': 'ordered_libraries_<(_target_name)<(subtarget)', + 'message': 'Writing dependency ordered libraries for <(_target_name)', + 'variables': { + 'input_libraries%': [], + 'subtarget%': '', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/write_ordered_libraries.py', + '<@(input_libraries)', + ], + 'outputs': [ + '<(ordered_libraries_file)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/write_ordered_libraries.py', + '--input-libraries=<(input_libraries)', + '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)', + '--readelf=<(android_readelf)', + '--output=<(ordered_libraries_file)', + ], +} diff --git a/build/apk_browsertest.gypi b/build/apk_browsertest.gypi new file mode 100644 index 00000000000..52cb9e0ca68 --- /dev/null +++ b/build/apk_browsertest.gypi @@ -0,0 +1,44 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to build APK-based browser test suites. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'test_suite_name_apk', +# 'type': 'none', +# 'variables': { +# 'test_suite_name': 'test_suite_name', # string +# 'java_in_dir': 'path/to/java/dir', +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# + +{ + 'dependencies': [ + '<(DEPTH)/base/base.gyp:base_java', + '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands', + '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:require_remote_device_dummy_apk', + '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java', + '<(DEPTH)/testing/android/native_test.gyp:native_test_java', + '<(DEPTH)/tools/android/android_tools.gyp:android_tools', + ], + 'conditions': [ + ['OS == "android"', { + 'variables': { + # These are used to configure java_apk.gypi included below. + 'test_type': 'gtest', + 'apk_name': '<(test_suite_name)', + 'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk', + 'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk', + 'native_lib_target': 'lib<(test_suite_name)', + # TODO(yfriedman, cjhopman): Support managed installs for gtests. + 'gyp_managed_install': 0, + }, + 'includes': [ 'java_apk.gypi', 'android/test_runner.gypi' ], + }], # 'OS == "android" + ], # conditions +} diff --git a/build/apk_fake_jar.gypi b/build/apk_fake_jar.gypi new file mode 100644 index 00000000000..128b84cc2fa --- /dev/null +++ b/build/apk_fake_jar.gypi @@ -0,0 +1,15 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to build Java in a consistent manner. + +{ + 'all_dependent_settings': { + 'variables': { + 'input_jars_paths': ['>(apk_output_jar_path)'], + 'library_dexed_jars_paths': ['>(apk_output_jar_path)'], + }, + }, +} diff --git a/build/apk_test.gypi b/build/apk_test.gypi new file mode 100644 index 00000000000..872c5e38350 --- /dev/null +++ b/build/apk_test.gypi @@ -0,0 +1,83 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to build APK based test suites. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'test_suite_name_apk', +# 'type': 'none', +# 'variables': { +# 'test_suite_name': 'test_suite_name', # string +# 'input_jars_paths': ['/path/to/test_suite.jar', ... ], # list +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# + +{ + 'dependencies': [ + '<(DEPTH)/base/base.gyp:base_java', + '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands', + '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:require_remote_device_dummy_apk', + '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java', + '<(DEPTH)/testing/android/on_device_instrumentation.gyp:reporter_java', + '<(DEPTH)/testing/android/native_test.gyp:native_test_java', + '<(DEPTH)/tools/android/android_tools.gyp:android_tools', + ], + 'conditions': [ + ['OS == "android"', { + 'variables': { + # These are used to configure java_apk.gypi included below. + 'test_type': 'gtest', + 'apk_name': '<(test_suite_name)', + 'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk', + 'generated_src_dirs': [ '<(SHARED_INTERMEDIATE_DIR)/<(test_suite_name)_jinja', ], + 'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk', + 'java_in_dir': '<(DEPTH)/build/android/empty', + 'native_lib_target': 'lib<(test_suite_name)', + # TODO(yfriedman, cjhopman): Support managed installs for gtests. + 'gyp_managed_install': 0, + 'variables': { + 'use_native_activity%': "false", + 'android_manifest_path%': '', + }, + 'use_native_activity%': '<(use_native_activity)', + 'jinja_variables': [ + 'native_library_name=<(test_suite_name)', + 'use_native_activity=<(use_native_activity)', + ], + 'conditions': [ + ['component == "shared_library"', { + 'jinja_variables': [ + 'is_component_build=true', + ], + }, { + 'jinja_variables': [ + 'is_component_build=false', + ], + }], + ['android_manifest_path == ""', { + 'android_manifest_path': '<(SHARED_INTERMEDIATE_DIR)/<(test_suite_name)_jinja/AndroidManifest.xml', + 'manifest_template': '<(DEPTH)/testing/android/native_test/java', + }, { + 'android_manifest_path%': '<(android_manifest_path)', + 'manifest_template': '', + }], + ], + }, + 'conditions': [ + ['manifest_template != ""', { + 'variables': { + 'jinja_inputs': '<(manifest_template)/AndroidManifest.xml.jinja2', + 'jinja_output': '<(android_manifest_path)', + }, + 'includes': ['android/jinja_template.gypi'], + }], + ], + 'includes': [ 'java_apk.gypi', 'android/test_runner.gypi' ], + }], # 'OS == "android" + ], # conditions +} diff --git a/build/apply_locales.py b/build/apply_locales.py new file mode 100644 index 00000000000..6af7280fadc --- /dev/null +++ b/build/apply_locales.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# Copyright (c) 2009 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# TODO: remove this script when GYP has for loops + +import sys +import optparse + +def main(argv): + + parser = optparse.OptionParser() + usage = 'usage: %s [options ...] format_string locale_list' + parser.set_usage(usage.replace('%s', '%prog')) + parser.add_option('-d', dest='dash_to_underscore', action="store_true", + default=False, + help='map "en-US" to "en" and "-" to "_" in locales') + + (options, arglist) = parser.parse_args(argv) + + if len(arglist) < 3: + print 'ERROR: need string and list of locales' + return 1 + + str_template = arglist[1] + locales = arglist[2:] + + results = [] + for locale in locales: + # For Cocoa to find the locale at runtime, it needs to use '_' instead + # of '-' (http://crbug.com/20441). Also, 'en-US' should be represented + # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578). + if options.dash_to_underscore: + if locale == 'en-US': + locale = 'en' + locale = locale.replace('-', '_') + results.append(str_template.replace('ZZLOCALE', locale)) + + # Quote each element so filename spaces don't mess up GYP's attempt to parse + # it into a list. + print ' '.join(["'%s'" % x for x in results]) + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/args/README.txt b/build/args/README.txt new file mode 100644 index 00000000000..825bf64c693 --- /dev/null +++ b/build/args/README.txt @@ -0,0 +1,31 @@ +This directory is here to hold .gni files that contain sets of GN build +arguments for given configurations. + +(Currently this directory is empty because we removed the only thing here, but +this has come up several times so I'm confident we'll need this again. If this +directory is still empty by 2017, feel free to delete it. --Brett) + +Some projects or bots may have build configurations with specific combinations +of flags. Rather than making a new global flag for your specific project and +adding it all over the build to each arg it should affect, you can add a .gni +file here with the variables. + +For example, for project foo you may put in build/args/foo.gni: + + target_os = "android" + use_pulseaudio = false + use_ozone = true + system_libdir = "foo" + +Users wanting to build this configuration would run: + + $ gn args out/mybuild + +And add the following line to their args for that build directory: + + import("//build/args/foo.gni") + # You can set any other args here like normal. + is_component_build = false + +This way everybody can agree on a set of flags for a project, and their builds +stay in sync as the flags in foo.gni are modified. diff --git a/build/args/blimp_client.gn b/build/args/blimp_client.gn new file mode 100644 index 00000000000..37fc5ac8dd1 --- /dev/null +++ b/build/args/blimp_client.gn @@ -0,0 +1,12 @@ +# GN args template for blimp Android client. +# +# Add import to arg.gn in out directory and run gn gen on the directory to use. +# E.g. for out directory out/foo: +# echo "import(\"//build/args/blimp_client.gn\")" > out/foo/args.gn +# gn gen out/foo +# +# Use gn args to add your own build preference args. + +target_os = "android" +is_component_build = true +enable_blimp_client = true diff --git a/build/args/blimp_engine.gn b/build/args/blimp_engine.gn new file mode 100644 index 00000000000..32dd68231a0 --- /dev/null +++ b/build/args/blimp_engine.gn @@ -0,0 +1,29 @@ +# GN args template for a blimp engine. Works within a docker container. +# +# Add import to arg.gn in out directory and run gn gen on the directory to use. +# E.g. for out directory out/foo: +# echo "import(\"//build/args/blimp_engine.gn\")" > out/foo/args.gn +# gn gen out/foo +# +# This file contains Blimp engine build args common to both +# official builds and personal development builds. +# Use gn args to add your own build preference args. + +use_aura = true +use_ozone = true +ozone_auto_platforms = false +ozone_platform = "headless" +ozone_platform_headless = true +metrics_use_blimp = true +use_low_quality_image_interpolation = true +use_external_popup_menu = true + +# Not available within docker container. +use_alsa = false +use_pulseaudio = false +use_cups = false +use_glib = false + +# udev is not supported on targeted platform and also not something that will +# be used by Blimp Engine. +use_udev = false diff --git a/build/args/bots/chromium.fyi/headless_linux_dbg.gn b/build/args/bots/chromium.fyi/headless_linux_dbg.gn new file mode 100644 index 00000000000..2e8224dfc07 --- /dev/null +++ b/build/args/bots/chromium.fyi/headless_linux_dbg.gn @@ -0,0 +1,2 @@ +import("//build/args/headless.gn") +is_debug = true diff --git a/build/args/bots/chromium.linux/blimp_linux_dbg.gn b/build/args/bots/chromium.linux/blimp_linux_dbg.gn new file mode 100644 index 00000000000..a0bbe745513 --- /dev/null +++ b/build/args/bots/chromium.linux/blimp_linux_dbg.gn @@ -0,0 +1,4 @@ +import("//build/args/blimp_engine.gn") +is_debug = true +use_goma = true +is_component_build = true diff --git a/build/args/bots/official.android/blimp-engine.gn b/build/args/bots/official.android/blimp-engine.gn new file mode 100644 index 00000000000..6fbc7b6f4fe --- /dev/null +++ b/build/args/bots/official.android/blimp-engine.gn @@ -0,0 +1,16 @@ +import("//build/args/blimp_engine.gn") + +# Use release build to improve engine performance, especially WebP encoding. +is_debug = false + +# Designate this as an official build to pick up breakpad symbols. +is_official_build = true + +# is_official_build will by default build with loads of symbols that we don't +# use, thus bloating the engine binary size. Build with symbol_level = 1 to keep +# binary size reasonable while preserving enough to generate backtraces with +# source line numbers. +symbol_level = 1 + +# Turn on DCHECK to find bugs. +dcheck_always_on = true diff --git a/build/args/headless.gn b/build/args/headless.gn new file mode 100644 index 00000000000..0c0f6e58c42 --- /dev/null +++ b/build/args/headless.gn @@ -0,0 +1,32 @@ +# GN args template for the Headless Chrome library +# +# Add import to arg.gn in out directory and run gn gen on the directory to use. +# E.g. for out directory out/foo: +# echo 'import("//build/args/headless.gn")' > out/foo/args.gn +# gn gen out/foo +# +# Use gn args to add your own build preference args. + +use_ozone = true +ozone_auto_platforms = false +ozone_platform = "headless" +ozone_platform_headless = true + +# In order to simplify deployment we build ICU data file +# into binary. +icu_use_data_file = false + +enable_basic_printing = false +enable_nacl = false +enable_print_preview = false +enable_remoting = false +use_alsa = false +use_ash = false +use_cups = false +use_dbus = false +use_gconf = false +use_gio = false +use_kerberos = false +use_libpci = false +use_pulseaudio = false +use_udev = false diff --git a/build/branding_value.sh b/build/branding_value.sh new file mode 100644 index 00000000000..9fcb550caa2 --- /dev/null +++ b/build/branding_value.sh @@ -0,0 +1,51 @@ +#!/bin/sh + +# Copyright (c) 2008 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a wrapper for fetching values from the BRANDING files. Pass the +# value of GYP's branding variable followed by the key you want and the right +# file is checked. +# +# branding_value.sh Chromium COPYRIGHT +# branding_value.sh Chromium PRODUCT_FULLNAME +# + +set -e + +if [ $# -ne 2 ] ; then + echo "error: expect two arguments, branding and key" >&2 + exit 1 +fi + +BUILD_BRANDING=$1 +THE_KEY=$2 + +pushd $(dirname "${0}") > /dev/null +BUILD_DIR=$(pwd) +popd > /dev/null + +TOP="${BUILD_DIR}/.." + +case ${BUILD_BRANDING} in + Chromium) + BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING" + ;; + Chrome) + BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING" + ;; + *) + echo "error: unknown branding: ${BUILD_BRANDING}" >&2 + exit 1 + ;; +esac + +BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}") + +if [ -z "${BRANDING_VALUE}" ] ; then + echo "error: failed to find key '${THE_KEY}'" >&2 + exit 1 +fi + +echo "${BRANDING_VALUE}" diff --git a/build/build-ctags.sh b/build/build-ctags.sh new file mode 100644 index 00000000000..61e017e3298 --- /dev/null +++ b/build/build-ctags.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then + cat < /dev/null || fail $1 + mv -f .tmp_tags tags +} + +# We always build the top level but leave all submodules as optional. +build_dir --extra-excludes "" "top level" + +# Build any other directies that are listed on the command line. +for dir in $@; do + build_dir "$1" + shift +done diff --git a/build/build_config.h b/build/build_config.h new file mode 100644 index 00000000000..5785abf61e8 --- /dev/null +++ b/build/build_config.h @@ -0,0 +1,171 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file adds defines about the platform we're currently building on. +// Operating System: +// OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) / +// OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI +// Compiler: +// COMPILER_MSVC / COMPILER_GCC +// Processor: +// ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64) +// ARCH_CPU_32_BITS / ARCH_CPU_64_BITS + +#ifndef BUILD_BUILD_CONFIG_H_ +#define BUILD_BUILD_CONFIG_H_ + +// A set of macros to use for platform detection. +#if defined(__native_client__) +// __native_client__ must be first, so that other OS_ defines are not set. +#define OS_NACL 1 +// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI. +// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build +// mode, while it does not in SFI build mode. +#if defined(__native_client_nonsfi__) +#define OS_NACL_NONSFI +#else +#define OS_NACL_SFI +#endif +#elif defined(ANDROID) +#define OS_ANDROID 1 +#elif defined(__APPLE__) +// only include TargetConditions after testing ANDROID as some android builds +// on mac don't have this header available and it's not needed unless the target +// is really mac/ios. +#include +#define OS_MACOSX 1 +#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE +#define OS_IOS 1 +#endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE +#elif defined(__linux__) +#define OS_LINUX 1 +// include a system header to pull in features.h for glibc/uclibc macros. +#include +#if defined(__GLIBC__) && !defined(__UCLIBC__) +// we really are using glibc, not uClibc pretending to be glibc +#define LIBC_GLIBC 1 +#endif +#elif defined(_WIN32) +#define OS_WIN 1 +#define TOOLKIT_VIEWS 1 +#elif defined(__FreeBSD__) +#define OS_FREEBSD 1 +#elif defined(__NetBSD__) +#define OS_NETBSD 1 +#elif defined(__OpenBSD__) +#define OS_OPENBSD 1 +#elif defined(__sun) +#define OS_SOLARIS 1 +#elif defined(__QNXNTO__) +#define OS_QNX 1 +#else +#error Please add support for your platform in build/build_config.h +#endif + +#if defined(USE_OPENSSL_CERTS) && defined(USE_NSS_CERTS) +#error Cannot use both OpenSSL and NSS for certificates +#endif + +// For access to standard BSD features, use OS_BSD instead of a +// more specific macro. +#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD) +#define OS_BSD 1 +#endif + +// For access to standard POSIXish features, use OS_POSIX instead of a +// more specific macro. +#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) || \ + defined(OS_NETBSD) || defined(OS_OPENBSD) || defined(OS_SOLARIS) || \ + defined(OS_ANDROID) || defined(OS_OPENBSD) || defined(OS_SOLARIS) || \ + defined(OS_ANDROID) || defined(OS_NACL) || defined(OS_QNX) +#define OS_POSIX 1 +#endif + +// Use tcmalloc +#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \ + !defined(NO_TCMALLOC) +#define USE_TCMALLOC 1 +#endif + +// Compiler detection. +#if defined(__GNUC__) +#define COMPILER_GCC 1 +#elif defined(_MSC_VER) +#define COMPILER_MSVC 1 +#else +#error Please add support for your compiler in build/build_config.h +#endif + +// Processor architecture detection. For more info on what's defined, see: +// http://msdn.microsoft.com/en-us/library/b0084kay.aspx +// http://www.agner.org/optimize/calling_conventions.pdf +// or with gcc, run: "echo | gcc -E -dM -" +#if defined(_M_X64) || defined(__x86_64__) +#define ARCH_CPU_X86_FAMILY 1 +#define ARCH_CPU_X86_64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(_M_IX86) || defined(__i386__) +#define ARCH_CPU_X86_FAMILY 1 +#define ARCH_CPU_X86 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__ARMEL__) +#define ARCH_CPU_ARM_FAMILY 1 +#define ARCH_CPU_ARMEL 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__aarch64__) +#define ARCH_CPU_ARM_FAMILY 1 +#define ARCH_CPU_ARM64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__pnacl__) +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__MIPSEL__) +#if defined(__LP64__) +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPS64EL 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#else +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPSEL 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#endif +#else +#error Please add support for your architecture in build/build_config.h +#endif + +// Type detection for wchar_t. +#if defined(OS_WIN) +#define WCHAR_T_IS_UTF16 +#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \ + defined(__WCHAR_MAX__) && \ + (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff) +#define WCHAR_T_IS_UTF32 +#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \ + defined(__WCHAR_MAX__) && \ + (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff) +// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to +// compile in this mode (in particular, Chrome doesn't). This is intended for +// other projects using base who manage their own dependencies and make sure +// short wchar works for them. +#define WCHAR_T_IS_UTF16 +#else +#error Please add support for your compiler in build/build_config.h +#endif + +#if defined(OS_ANDROID) +// The compiler thinks std::string::const_iterator and "const char*" are +// equivalent types. +#define STD_STRING_ITERATOR_IS_CHAR_POINTER +// The compiler thinks base::string16::const_iterator and "char16*" are +// equivalent types. +#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER +#endif + +#endif // BUILD_BUILD_CONFIG_H_ diff --git a/build/buildflag.h b/build/buildflag.h new file mode 100644 index 00000000000..5776a754c42 --- /dev/null +++ b/build/buildflag.h @@ -0,0 +1,47 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_BUILDFLAG_H_ +#define BUILD_BUILDFLAG_H_ + +// These macros un-mangle the names of the build flags in a way that looks +// natural, and gives errors if the flag is not defined. Normally in the +// preprocessor it's easy to make mistakes that interpret "you haven't done +// the setup to know what the flag is" as "flag is off". Normally you would +// include the generated header rather than include this file directly. +// +// This is for use with generated headers. See build/buildflag_header.gni. + +// This dance of two macros does a concatenation of two preprocessor args using +// ## doubly indirectly because using ## directly prevents macros in that +// parameter from being expanded. +#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b +#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b) + +// Accessor for build flags. +// +// To test for a value, if the build file specifies: +// +// ENABLE_FOO=true +// +// Then you would check at build-time in source code with: +// +// #include "foo_flags.h" // The header the build file specified. +// +// #if BUILDFLAG(ENABLE_FOO) +// ... +// #endif +// +// There will no #define called ENABLE_FOO so if you accidentally test for +// whether that is defined, it will always be negative. You can also use +// the value in expressions: +// +// const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME); +// +// Because the flag is accessed as a preprocessor macro with (), an error +// will be thrown if the proper header defining the internal flag value has +// not been included. +#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)()) + +#endif // BUILD_BUILDFLAG_H_ diff --git a/build/buildflag_header.gni b/build/buildflag_header.gni new file mode 100644 index 00000000000..4339996d415 --- /dev/null +++ b/build/buildflag_header.gni @@ -0,0 +1,138 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a header with preprocessor defines specified by the build file. +# The GYP version of this (with instructions) is build/buildflag_header.gypi. +# +# The flags are converted to function-style defines with mangled names and +# code uses an accessor macro to access the values. This is to try to +# minimize bugs where code checks whether something is defined or not, and +# the proper header isn't included, meaning the answer will always be silently +# false or might vary across the code base. +# +# In the GN template, specify build flags in the template as a list +# of strings that encode key/value pairs like this: +# +# flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ] +# +# The GN values "true" and "false" will be mapped to 0 and 1 for boolean +# #if flags to be expressed naturally. This means you can't directly make a +# define that generates C++ value of true or false for use in code. If you +# REALLY need this, you can also use the string "(true)" and "(false)" to +# prevent the rewriting. + +# To check the value of the flag in C code: +# +# #include "path/to/here/header_file.h" +# +# #if BUILDFLAG(ENABLE_FOO) +# ... +# #endif +# +# const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL); +# +# There will no #define called ENABLE_FOO so if you accidentally test for that +# in an ifdef it will always be negative. +# +# +# Template parameters +# +# flags [required, list of strings] +# Flag values as described above. +# +# header [required, string] +# File name for generated header. By default, this will go in the +# generated file directory for this target, and you would include it +# with: +# #include "/
" +# +# header_dir [optional, string] +# Override the default location of the generated header. The string will +# be treated as a subdirectory of the root_gen_dir. For example: +# header_dir = "foo/bar" +# Then you can include the header as: +# #include "foo/bar/baz.h" +# +# deps, public_deps, testonly, visibility +# Normal meaning. +# +# +# Grit defines +# +# If one .grd file uses a flag, just add to the grit target: +# +# defines = [ +# "enable_doom_melon=$enable_doom_melon", +# ] +# +# If multiple .grd files use it, you'll want to put the defines in a .gni file +# so it can be shared. Generally this .gni file should include all grit defines +# for a given module (for some definition of "module"). Then do: +# +# defines = ui_grit_defines +# +# If you forget to do this, the flag will be implicitly false in the .grd file +# and those resources won't be compiled. You'll know because the resource +# #define won't be generated and any code that uses it won't compile. If you +# see a missing IDS_* string, this is probably the reason. +# +# +# Example +# +# buildflag_header("foo_features") { +# header = "foo_features.h" +# +# flags = [ +# # This uses the GN build flag enable_doom_melon as the definition. +# "ENABLE_DOOM_MELON=$enable_doom_melon", +# +# # This force-enables the flag. +# "ENABLE_SPACE_LASER=true", +# +# # This will expand to the quoted C string when used in source code. +# "SPAM_SERVER_URL=\"http://www.example.com/\"", +# ] +# } +template("buildflag_header") { + action(target_name) { + script = "//build/write_buildflag_header.py" + + if (defined(invoker.header_dir)) { + header_file = "${invoker.header_dir}/${invoker.header}" + } else { + # Compute the path from the root to this file. + header_file = rebase_path(".", "//") + "/${invoker.header}" + } + + outputs = [ + "$root_gen_dir/$header_file", + ] + + # Always write --flags to the file so it's not empty. Empty will confuse GN + # into thinking the response file isn't used. + response_file_contents = [ "--flags" ] + if (defined(invoker.flags)) { + response_file_contents += invoker.flags + } + + args = [ + "--output", + header_file, # Not rebased, Python script puts it inside gen-dir. + "--rulename", + get_label_info(":$target_name", "label_no_toolchain"), + "--gen-dir", + rebase_path(root_gen_dir, root_build_dir), + "--definitions", + "{{response_file_name}}", + ] + + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + "visibility", + ]) + } +} diff --git a/build/buildflag_header.gypi b/build/buildflag_header.gypi new file mode 100644 index 00000000000..83b505a4f3e --- /dev/null +++ b/build/buildflag_header.gypi @@ -0,0 +1,123 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a header with preprocessor defines specified by the build file. +# +# The canonical documentation is in build/buildflag_header.gni. You should +# write the GN build, get it working, and then transform it into GYP. +# +# In every target that uses your generated header you must include a dependency +# on the GYP target that generates the header (this is implicit in GN). +# Otherwise, clean builds may not necessarily create the header before the +# source code is compiled. +# +# Assuming your GN code looks like this: +# +# buildflag_header("foo_features") { +# header = "foo_features.h" +# flags = [ +# "ENABLE_DOOM_MELON=$enable_doom_melon", +# "ENABLE_SPACE_LASER=true", +# "SPAM_SERVER_URL=\"http://www.example.com/\"", +# ] +# } +# +# Write a GYP target like this: +# +# { +# # GN version: //foo:foo_features +# 'target_name': 'foo_foo_features', +# 'includes': [ '../build/buildflag_header.gypi' ], +# 'variables': { +# 'buildflag_header_path': 'foo/foo_features.h', +# 'buildflag_flags': [ +# 'ENABLE_DOOM_MELON=<(enable_doom_melon)', +# 'ENABLE_SPACE_LASER=true', +# 'SPAM_SERVER_URL="http://www.example.com/"', +# ], +# }, +# } +# +# Variables +# +# target_name +# Base this on the GN label, replacing / and : with _ to make it globally +# unique. +# +# buildflag_header_path +# This must be the full path to the header from the source root. In GN +# you only say "features.h" and it uses the BUILD file's path implicitly. +# Use the path to BUILD.gn followed by your header name to produce the +# same output file. +# +# buildflag_flags (optional) +# List of the same format as GN's "flags". To expand variables, use +# "<(foo)" where GN would have used "$foo". +# +# includes +# List the relative path to build/buildflag_header.gypi from the .gyp +# file including this code, Note: If your code is in a .gypi file in a +# different directory, this must be relative to the .gyp including your +# file. +# +# +# Grit defines +# +# Follow the same advice as in the buildflag_header.gni, except on the grit +# action use the variable name 'grit_additional_defines' and explicitly add a +# '-D' in front: +# +# 'grit_grd_file': 'foo.grd', +# 'grit_additional_defines': [ +# '-D', 'enable_doom_melon=<(enable_doom_melon)', +# ], +# +# Put shared lists of defines in a .gypi. + +{ + 'type': 'none', + 'hard_dependency': 1, + + 'actions': [ + { + 'action_name': 'buildflag_header', + 'variables': { + # Default these values to empty if they're not defined. + 'variables': { + 'buildflag_flags%': [], + }, + + # Writes the flags to a response file with a name based on the name of + # this target. + 'response_file_name': '<|(<(_target_name)_buildflag_header.rsp --flags <@(buildflag_flags))', + + 'build_header_script': '<(DEPTH)/build/write_buildflag_header.py', + }, + + 'message': 'Generating build header.', + + 'inputs': [ + '<(build_header_script)', + '<(response_file_name)', + ], + + 'outputs': [ + '<(SHARED_INTERMEDIATE_DIR)/<(buildflag_header_path)', + ], + + 'action': [ + 'python', '<(build_header_script)', + '--output', '<(buildflag_header_path)', + '--rulename', '<(_target_name)', + '--gen-dir', '<(SHARED_INTERMEDIATE_DIR)', + '--definitions', '<(response_file_name)', + ], + } + ], + + # Allow the file to be included based on the given buildflag_header_path. + 'direct_dependent_settings': { + 'include_dirs': [ '<(SHARED_INTERMEDIATE_DIR)' ], + }, +} diff --git a/build/check_return_value.py b/build/check_return_value.py new file mode 100644 index 00000000000..c659d1e9674 --- /dev/null +++ b/build/check_return_value.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This program wraps an arbitrary command and prints "1" if the command ran +successfully.""" + +import os +import subprocess +import sys + +devnull = open(os.devnull, 'wb') +if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull): + print 1 +else: + print 0 diff --git a/build/chrome_settings.gypi b/build/chrome_settings.gypi new file mode 100644 index 00000000000..ce06b2d3b5e --- /dev/null +++ b/build/chrome_settings.gypi @@ -0,0 +1,32 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains settings for ../chrome/chrome.gyp that other gyp files +# also use. +{ + 'variables': { + # TODO: remove this helper when we have loops in GYP + 'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'], + 'grit_defines': ['-D', 'version=<(version_full)'], + 'includes': ['util/version.gypi'], + + 'conditions': [ + ['OS=="mac"', { + 'conditions': [ + ['branding=="Chrome"', { + 'mac_bundle_id': 'com.google.Chrome', + 'mac_creator': 'rimZ', + # The policy .grd file also needs the bundle id. + 'grit_defines': ['-D', 'mac_bundle_id=com.google.Chrome'], + }, { # else: branding!="Chrome" + 'mac_bundle_id': 'io.nwjs.nw', + 'mac_creator': 'Cr24', + # The policy .grd file also needs the bundle id. + 'grit_defines': ['-D', 'mac_bundle_id=io.nwjs.nw'], + }], # branding + ], # conditions + }], # OS=="mac" + ], # conditions + }, # variables +} diff --git a/build/clobber.py b/build/clobber.py new file mode 100644 index 00000000000..1229f7bf8d4 --- /dev/null +++ b/build/clobber.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This script provides methods for clobbering build directories.""" + +import argparse +import os +import shutil +import subprocess +import sys + + +def extract_gn_build_commands(build_ninja_file): + """Extracts from a build.ninja the commands to run GN. + + The commands to run GN are the gn rule and build.ninja build step at the + top of the build.ninja file. We want to keep these when deleting GN builds + since we want to preserve the command-line flags to GN. + + On error, returns the empty string.""" + result = "" + with open(build_ninja_file, 'r') as f: + # Read until the second blank line. The first thing GN writes to the file + # is the "rule gn" and the second is the section for "build build.ninja", + # separated by blank lines. + num_blank_lines = 0 + while num_blank_lines < 2: + line = f.readline() + if len(line) == 0: + return '' # Unexpected EOF. + result += line + if line[0] == '\n': + num_blank_lines = num_blank_lines + 1 + return result + + +def delete_dir(build_dir): + # For unknown reasons (anti-virus?) rmtree of Chromium build directories + # often fails on Windows. + if sys.platform.startswith('win'): + subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True) + else: + shutil.rmtree(build_dir) + + +def delete_build_dir(build_dir): + # GN writes a build.ninja.d file. Note that not all GN builds have args.gn. + build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d') + if not os.path.exists(build_ninja_d_file): + delete_dir(build_dir) + return + + # GN builds aren't automatically regenerated when you sync. To avoid + # messing with the GN workflow, erase everything but the args file, and + # write a dummy build.ninja file that will automatically rerun GN the next + # time Ninja is run. + build_ninja_file = os.path.join(build_dir, 'build.ninja') + build_commands = extract_gn_build_commands(build_ninja_file) + + try: + gn_args_file = os.path.join(build_dir, 'args.gn') + with open(gn_args_file, 'r') as f: + args_contents = f.read() + except IOError: + args_contents = '' + + e = None + try: + # delete_dir and os.mkdir() may fail, such as when chrome.exe is running, + # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch + # the exception and rethrow it later. + delete_dir(build_dir) + os.mkdir(build_dir) + except Exception as e: + pass + + # Put back the args file (if any). + if args_contents != '': + with open(gn_args_file, 'w') as f: + f.write(args_contents) + + # Write the build.ninja file sufficiently to regenerate itself. + with open(os.path.join(build_dir, 'build.ninja'), 'w') as f: + if build_commands != '': + f.write(build_commands) + else: + # Couldn't parse the build.ninja file, write a default thing. + f.write('''rule gn +command = gn -q gen //out/%s/ +description = Regenerating ninja files + +build build.ninja: gn +generator = 1 +depfile = build.ninja.d +''' % (os.path.split(build_dir)[1])) + + # Write a .d file for the build which references a nonexistant file. This + # will make Ninja always mark the build as dirty. + with open(build_ninja_d_file, 'w') as f: + f.write('build.ninja: nonexistant_file.gn\n') + + if e: + # Rethrow the exception we caught earlier. + raise e + +def clobber(out_dir): + """Clobber contents of build directory. + + Don't delete the directory itself: some checkouts have the build directory + mounted.""" + for f in os.listdir(out_dir): + path = os.path.join(out_dir, f) + if os.path.isfile(path): + os.unlink(path) + elif os.path.isdir(path): + delete_build_dir(path) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('out_dir', help='The output directory to clobber') + args = parser.parse_args() + clobber(args.out_dir) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/common.croc b/build/common.croc new file mode 100644 index 00000000000..fde7a8b2982 --- /dev/null +++ b/build/common.croc @@ -0,0 +1,127 @@ +# -*- python -*- +# Crocodile config file for Chromium - settings common to all platforms +# +# This should be speicified before the platform-specific config, for example: +# croc -c chrome_common.croc -c linux/chrome_linux.croc + +{ + # List of root directories, applied in order + 'roots' : [ + # Sub-paths we specifically care about and want to call out + { + 'root' : '_/src', + 'altname' : 'CHROMIUM', + }, + ], + + # List of rules, applied in order + # Note that any 'include':0 rules here will be overridden by the 'include':1 + # rules in the platform-specific configs. + 'rules' : [ + # Don't scan for executable lines in uninstrumented C++ header files + { + 'regexp' : '.*\\.(h|hpp)$', + 'add_if_missing' : 0, + }, + + # Groups + { + 'regexp' : '', + 'group' : 'source', + }, + { + 'regexp' : '.*_(test|unittest|uitest|browsertest)\\.', + 'group' : 'test', + }, + + # Languages + { + 'regexp' : '.*\\.(c|h)$', + 'language' : 'C', + }, + { + 'regexp' : '.*\\.(cc|cpp|hpp)$', + 'language' : 'C++', + }, + + # Files/paths to include. Specify these before the excludes, since rules + # are in order. + { + 'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/', + 'include' : 1, + }, + # Don't include subversion or mercurial SCM dirs + { + 'regexp' : '.*/(\\.svn|\\.hg)/', + 'include' : 0, + }, + # Don't include output dirs + { + 'regexp' : '.*/(Debug|Release|out|xcodebuild)/', + 'include' : 0, + }, + # Don't include third-party source + { + 'regexp' : '.*/third_party/', + 'include' : 0, + }, + # We don't run the V8 test suite, so we don't care about V8 coverage. + { + 'regexp' : '.*/v8/', + 'include' : 0, + }, + ], + + # Paths to add source from + 'add_files' : [ + 'CHROMIUM' + ], + + # Statistics to print + 'print_stats' : [ + { + 'stat' : 'files_executable', + 'format' : '*RESULT FilesKnown: files_executable= %d files', + }, + { + 'stat' : 'files_instrumented', + 'format' : '*RESULT FilesInstrumented: files_instrumented= %d files', + }, + { + 'stat' : '100.0 * files_instrumented / files_executable', + 'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent', + }, + { + 'stat' : 'lines_executable', + 'format' : '*RESULT LinesKnown: lines_known= %d lines', + }, + { + 'stat' : 'lines_instrumented', + 'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines', + }, + { + 'stat' : 'lines_covered', + 'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines', + 'group' : 'source', + }, + { + 'stat' : 'lines_covered', + 'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines', + 'group' : 'test', + }, + { + 'stat' : '100.0 * lines_covered / lines_executable', + 'format' : '*RESULT PercentCovered: percent_covered= %g percent', + }, + { + 'stat' : '100.0 * lines_covered / lines_executable', + 'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent', + 'group' : 'source', + }, + { + 'stat' : '100.0 * lines_covered / lines_executable', + 'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent', + 'group' : 'test', + }, + ], +} diff --git a/build/common.gypi b/build/common.gypi new file mode 100644 index 00000000000..b3a3523b7b9 --- /dev/null +++ b/build/common.gypi @@ -0,0 +1,6360 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# IMPORTANT: +# Please don't directly include this file if you are building via gyp_chromium, +# since gyp_chromium is automatically forcing its inclusion. +{ + # Variables expected to be overriden on the GYP command line (-D) or by + # ~/.gyp/include.gypi. + 'variables': { + # Putting a variables dict inside another variables dict looks kind of + # weird. This is done so that 'host_arch', 'chromeos', etc are defined as + # variables within the outer variables dict here. This is necessary + # to get these variables defined for the conditions within this variables + # dict that operate on these variables. + 'variables': { + 'variables': { + 'variables': { + 'variables': { + # Whether we're building a ChromeOS build. + 'chromeos%': 0, + + # Whether we're building the cast (chromecast) shell + 'chromecast%': 0, + + # Whether or not we are using the Aura windowing framework. + 'use_aura%': 0, + + # Whether or not we are building the Ash shell. + 'use_ash%': 0, + + # Whether or not we are using CRAS, the ChromeOS Audio Server. + 'use_cras%': 0, + + # Use a raw surface abstraction. + 'use_ozone%': 0, + + # Configure the build for small devices. See crbug.com/318413 + 'embedded%': 0, + + 'conditions': [ + # Compute the architecture that we're building on. + ['OS=="win" or OS=="ios"', { + 'host_arch%': 'ia32', + }, { + 'host_arch%': '= 4.8 or clang. + # http://gcc.gnu.org/wiki/DebugFission + ['OS=="linux" and target_arch=="x64"', { + 'linux_use_debug_fission%': 1, + }, { + 'linux_use_debug_fission%': 0, + }], + + ['OS=="android" or OS=="ios"', { + 'enable_captive_portal_detection%': 0, + }, { + 'enable_captive_portal_detection%': 1, + }], + + # Enable Skia UI text drawing incrementally on different platforms. + # http://crbug.com/105550 + # + # On Aura, this allows per-tile painting to be used in the browser + # compositor. + ['OS!="android" and OS!="ios"', { + 'use_canvas_skia%': 1, + }], + + ['chromeos==1', { + 'enable_basic_printing%': 1, + 'enable_print_preview%': 1, + }], + + # Whether tests targets should be run, archived or just have the + # dependencies verified. All the tests targets have the '_run' suffix, + # e.g. base_unittests_run runs the target base_unittests. The test + # target always calls tools/swarming_client/isolate.py. See the script's + # --help for more information. Meant to be overriden with GYP_DEFINES. + # TODO(maruel): Remove the conditions as more configurations are + # supported. + ['OS!="ios" and OS!="android" and chromeos==0 and OS!="openbsd" and OS!="freebsd"', { + 'test_isolation_mode%': 'check', + }, { + 'test_isolation_mode%': 'noop', + }], + # Whether Android build uses OpenMAX DL FFT. + ['OS=="android" and ((target_arch=="arm" and arm_version >= 7) or target_arch=="ia32" or target_arch=="x64" or target_arch=="arm64" or target_arch=="mipsel")', { + # Currently only supported on Android ARMv7+, ARM64, ia32, x64 and mipsel. + # When enabled, this will also enable WebAudio support on + # Android for these architectures. Default is enabled. Whether + # WebAudio is actually available depends on runtime settings + # and flags. + 'use_openmax_dl_fft%': 1, + }, { + 'use_openmax_dl_fft%': 0, + }], + ['OS=="win" or OS=="linux" or OS=="openbsd" or OS=="freebsd"', { + 'enable_mdns%' : 1, + }], + + # Disable various features by default on embedded. + ['embedded==1', { + 'remoting%': 0, + 'enable_basic_printing%': 0, + 'enable_print_preview%': 0, + }], + + ['sysroot!=""', { + 'pkg-config': '<(chroot_cmd) <(DEPTH)/build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)" "<(system_libdir)"', + }, { + 'pkg-config': 'pkg-config' + }], + + # Enable WebVR support by default on Android + # Still requires command line flag to access API + ['OS=="android"', { + 'enable_webvr%': 1, + }, { + 'enable_webvr%': 0, + }], + + ['order_profiling==0', { + # Set to 1 to enable fast builds. Set to 2 for even faster builds + # (it disables debug info for fastest compilation - only for use + # on compile-only bots). + 'fastbuild%': 0, + }, { + # With instrumentation enabled, debug info puts libchrome.so over 4gb, + # which causes the linker to produce an invalid ELF. + # http://crbug.com/574476 + 'fastbuild%': 2, + }], + ], + + # Kasko reporting is disabled by default, but may get enabled below. + 'kasko%': 0, + + # Setting this to '0' will cause V8's startup snapshot to be + # embedded in the binary instead of being a external files. + 'v8_use_external_startup_data%': 1, + + # Set this to 1 to enable use of concatenated impulse responses + # for the HRTF panner in WebAudio. + 'use_concatenated_impulse_responses': 1, + + # You can set the variable 'use_official_google_api_keys' to 1 + # to use the Google-internal file containing official API keys + # for Google Chrome even in a developer build. Setting this + # variable explicitly to 1 will cause your build to fail if the + # internal file is missing. + # + # The variable is documented here, but not handled in this file; + # see //google_apis/determine_use_official_keys.gypi for the + # implementation. + # + # Set the variable to 0 to not use the internal file, even when + # it exists in your checkout. + # + # Leave it unset in your include.gypi to have the variable + # implicitly set to 1 if you have + # src/google_apis/internal/google_chrome_api_keys.h in your + # checkout, and implicitly set to 0 if not. + # + # Note that official builds always behave as if the variable + # was explicitly set to 1, i.e. they always use official keys, + # and will fail to build if the internal file is missing. + # + # NOTE: You MUST NOT explicitly set the variable to 2 in your + # include.gypi or by other means. Due to subtleties of GYP, this + # is not the same as leaving the variable unset, even though its + # default value in + # //google_apis/determine_use_official_keys.gypi is 2. + + # Set these to bake the specified API keys and OAuth client + # IDs/secrets into your build. + # + # If you create a build without values baked in, you can instead + # set environment variables to provide the keys at runtime (see + # src/google_apis/google_api_keys.h for details). Features that + # require server-side APIs may fail to work if no keys are + # provided. + # + # Note that if you are building an official build or if + # use_official_google_api_keys has been set to 1 (explicitly or + # implicitly), these values will be ignored and the official + # keys will be used instead. + 'google_api_key%': '', + 'google_default_client_id%': '', + 'google_default_client_secret%': '', + # Native Client is enabled by default. + 'disable_nacl%': '1', + + # Native Client toolchains, enabled by default. + 'disable_pnacl%': 0, + 'disable_newlib%': 0, + + # Sets the default version name and code for Android app, by default we + # do a developer build. + 'android_app_version_name%': 'Developer Build', + 'android_app_version_code%': 1, + + # Use the internal version of the framework to build Android WebView. + 'use_webview_internal_framework%': 0, + }, + + # Copy conditionally-set variables out one scope. + 'nwjs_sdk%': '<(nwjs_sdk)', + 'branding%': '<(branding)', + 'branding_path_component%': '<(branding_path_component)', + 'buildtype%': '<(buildtype)', + 'target_arch%': '<(target_arch)', + 'target_subarch%': '<(target_subarch)', + 'mips_arch_variant%': '<(mips_arch_variant)', + 'mips_dsp_rev%': '<(mips_dsp_rev)', + 'mips_msa%': '<(mips_msa)', + 'host_arch%': '<(host_arch)', + 'toolkit_views%': '<(toolkit_views)', + 'ui_compositor_image_transport%': '<(ui_compositor_image_transport)', + 'use_aura%': '<(use_aura)', + 'use_ash%': '<(use_ash)', + 'use_cras%': '<(use_cras)', + 'use_libpci%': '<(use_libpci)', + 'use_openssl_certs%': '<(use_openssl_certs)', + 'use_external_popup_menu%': '<(use_external_popup_menu)', + 'use_nss_certs%': '<(use_nss_certs)', + 'use_udev%': '<(use_udev)', + 'os_bsd%': '<(os_bsd)', + 'os_posix%': '<(os_posix)', + 'use_dbus%': '<(use_dbus)', + 'use_glib%': '<(use_glib)', + 'use_pango%': '<(use_pango)', + 'use_cairo%': '<(use_cairo)', + 'use_ozone%': '<(use_ozone)', + 'use_ozone_evdev%': '<(use_ozone_evdev)', + 'use_xkbcommon%': '<(use_xkbcommon)', + 'use_gtk3%': '<(use_gtk3)', + 'use_clipboard_aurax11%': '<(use_clipboard_aurax11)', + 'desktop_linux%': '<(desktop_linux)', + 'use_x11%': '<(use_x11)', + 'use_gnome_keyring%': '<(use_gnome_keyring)', + 'linux_fpic%': '<(linux_fpic)', + 'chromeos%': '<(chromeos)', + 'chromecast%': '<(chromecast)', + 'is_cast_desktop_build%': '<(is_cast_desktop_build)', + 'enable_viewport%': '<(enable_viewport)', + 'enable_hidpi%': '<(enable_hidpi)', + 'enable_wayland_server%': '<(enable_wayland_server)', + 'enable_wifi_display%': '<(enable_wifi_display)', + 'image_loader_extension%': '<(image_loader_extension)', + 'fastbuild%': '<(fastbuild)', + 'win_z7%': '<(win_z7)', + 'dcheck_always_on%': '<(dcheck_always_on)', + 'tracing_like_official_build%': '<(tracing_like_official_build)', + 'fieldtrial_testing_like_official_build%': '<(fieldtrial_testing_like_official_build)', + 'arm_version%': '<(arm_version)', + 'arm_neon%': '<(arm_neon)', + 'arm_neon_optional%': '<(arm_neon_optional)', + 'sysroot%': '<(sysroot)', + 'use_sysroot%': '<(use_sysroot)', + 'pkg-config%': '<(pkg-config)', + 'chroot_cmd%': '<(chroot_cmd)', + 'system_libdir%': '<(system_libdir)', + 'component%': '<(component)', + 'win_analyze%': '<(win_analyze)', + 'win_fastlink%': '<(win_fastlink)', + 'chrome_pgo_phase%': '<(chrome_pgo_phase)', + 'full_wpo_on_official%': '<(full_wpo_on_official)', + 'enable_resource_whitelist_generation%': '<(enable_resource_whitelist_generation)', + 'use_titlecase_in_grd%': '<(use_titlecase_in_grd)', + 'remoting%': '<(remoting)', + 'enable_one_click_signin%': '<(enable_one_click_signin)', + 'enable_media_router%': '<(enable_media_router)', + 'enable_webrtc%': '<(enable_webrtc)', + 'chromium_win_pch%': '<(chromium_win_pch)', + 'configuration_policy': '<(configuration_policy)', + 'safe_browsing%': '<(safe_browsing)', + 'enable_web_speech%': '<(enable_web_speech)', + 'enable_hotwording%': '<(enable_hotwording)', + 'notifications%': '<(notifications)', + 'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)', + 'mac_want_real_dsym%': '<(mac_want_real_dsym)', + 'asan%': '<(asan)', + 'asan_blacklist%': '<(asan_blacklist)', + 'sanitizer_coverage%': '<(sanitizer_coverage)', + 'asan_field_padding%': '<(asan_field_padding)', + 'use_sanitizer_options%': '<(use_sanitizer_options)', + 'syzyasan%': '<(syzyasan)', + 'kasko%': '<(kasko)', + 'syzygy_optimize%': '<(syzygy_optimize)', + 'lsan%': '<(lsan)', + 'msan%': '<(msan)', + 'msan_blacklist%': '<(msan_blacklist)', + 'msan_track_origins%': '<(msan_track_origins)', + 'tsan%': '<(tsan)', + 'tsan_blacklist%': '<(tsan_blacklist)', + 'ubsan%': '<(ubsan)', + 'ubsan_blacklist%': '<(ubsan_blacklist)', + 'ubsan_security%': '<(ubsan_security)', + 'ubsan_security_blacklist%': '<(ubsan_security_blacklist)', + 'ubsan_vptr%': '<(ubsan_vptr)', + 'ubsan_vptr_blacklist%': '<(ubsan_vptr_blacklist)', + 'use_instrumented_libraries%': '<(use_instrumented_libraries)', + 'use_prebuilt_instrumented_libraries%': '<(use_prebuilt_instrumented_libraries)', + 'use_custom_libcxx%': '<(use_custom_libcxx)', + 'order_profiling%': '<(order_profiling)', + 'order_text_section%': '<(order_text_section)', + 'enable_extensions%': '<(enable_extensions)', + 'enable_pdf%': '<(enable_pdf)', + 'pdf_enable_v8%': '<(pdf_enable_v8)', + 'pdf_enable_xfa%': '<(pdf_enable_xfa)', + 'enable_plugin_installation%': '<(enable_plugin_installation)', + 'enable_plugins%': '<(enable_plugins)', + 'enable_session_service%': '<(enable_session_service)', + 'enable_themes%': '<(enable_themes)', + 'linux_use_bundled_gold%': '<(linux_use_bundled_gold)', + 'linux_use_bundled_binutils%': '<(linux_use_bundled_binutils)', + 'linux_use_gold_flags%': '<(linux_use_gold_flags)', + 'linux_use_debug_fission%': '<(linux_use_debug_fission)', + 'use_canvas_skia%': '<(use_canvas_skia)', + 'test_isolation_mode%': '<(test_isolation_mode)', + 'enable_basic_printing%': '<(enable_basic_printing)', + 'enable_print_preview%': '<(enable_print_preview)', + 'enable_spellcheck%': '<(enable_spellcheck)', + 'use_browser_spellchecker%': '<(use_browser_spellchecker)', + 'use_minikin_hyphenation%': '<(use_minikin_hyphenation)', + 'cld2_table_size%': '<(cld2_table_size)', + 'enable_captive_portal_detection%': '<(enable_captive_portal_detection)', + 'disable_file_support%': '<(disable_file_support)', + 'disable_ftp_support%': '<(disable_ftp_support)', + 'use_platform_icu_alternatives%': '<(use_platform_icu_alternatives)', + 'disable_brotli_filter%': '<(disable_brotli_filter)', + 'enable_task_manager%': '<(enable_task_manager)', + 'wix_path%': '<(wix_path)', + 'use_libjpeg_turbo%': '<(use_libjpeg_turbo)', + 'use_system_libjpeg%': '<(use_system_libjpeg)', + 'android_channel%': '<(android_channel)', + 'icu_use_data_file_flag%': '<(icu_use_data_file_flag)', + 'gyp_managed_install%': 0, + 'create_standalone_apk%': 1, + 'enable_app_list%': '<(enable_app_list)', + 'use_default_render_theme%': '<(use_default_render_theme)', + 'google_api_key%': '<(google_api_key)', + 'google_default_client_id%': '<(google_default_client_id)', + 'google_default_client_secret%': '<(google_default_client_secret)', + 'enable_supervised_users%': '<(enable_supervised_users)', + 'enable_mdns%' : '<(enable_mdns)', + 'enable_service_discovery%' : '<(enable_service_discovery)', + 'enable_hangout_services_extension%' : '<(enable_hangout_services_extension)', + 'proprietary_codecs%': '<(proprietary_codecs)', + 'use_goma%': '<(use_goma)', + 'gomadir%': '<(gomadir)', + 'use_lto%': '<(use_lto)', + 'use_lto_o2%': '<(use_lto_o2)', + 'gold_icf_level%': '<(gold_icf_level)', + 'video_hole%': '<(video_hole)', + 'v8_use_external_startup_data%': '<(v8_use_external_startup_data)', + 'cfi_vptr%': '<(cfi_vptr)', + 'cfi_cast%': '<(cfi_cast)', + 'cfi_diag%': '<(cfi_diag)', + 'cfi_blacklist%': '<(cfi_blacklist)', + 'mac_views_browser%': '<(mac_views_browser)', + 'android_app_version_name%': '<(android_app_version_name)', + 'android_app_version_code%': '<(android_app_version_code)', + 'use_webview_internal_framework%': '<(use_webview_internal_framework)', + 'enable_webvr%': '<(enable_webvr)', + + # Turns on compiler optimizations in V8 in Debug build. + 'v8_optimized_debug%': 1, + + # Use system protobuf instead of bundled one. + 'use_system_protobuf%': 0, + + # Use system yasm instead of bundled one. + 'use_system_yasm%': 0, + + # Use system ICU instead of bundled one. + 'use_system_icu%' : 0, + + # Default to enabled PIE; this is important for ASLR but we may need to be + # able to turn it off for various reasons. + 'linux_disable_pie%': 0, + + # The release channel that this build targets. This is used to restrict + # channel-specific build options, like which installer packages to create. + # The default is 'all', which does no channel-specific filtering. + 'channel%': 'all', + + # Override chromium_mac_pch and set it to 0 to suppress the use of + # precompiled headers on the Mac. Prefix header injection may still be + # used, but prefix headers will not be precompiled. This is useful when + # using distcc to distribute a build to compile slaves that don't + # share the same compiler executable as the system driving the compilation, + # because precompiled headers rely on pointers into a specific compiler + # executable's image. Setting this to 0 is needed to use an experimental + # Linux-Mac cross compiler distcc farm. + 'chromium_mac_pch%': 1, + + # The default value for mac_strip in target_defaults. This cannot be + # set there, per the comment about variable% in a target_defaults. + 'mac_strip_release%': 1, + + # Set to 1 to enable java code coverage. Instruments classes during build + # to produce .ec files during runtime. + 'emma_coverage%': 0, + + # EMMA filter string consisting of a list of inclusion/exclusion patterns + # separated with whitespace and/or comma. Only has effect if + # 'emma_coverage=1'. + 'emma_filter%': '', + + # Set to 1 to enable running Android lint on java/class files. + 'android_lint%': 1, + + # Although base/allocator lets you select a heap library via an + # environment variable, the shim it uses sometimes gets in the way. + # To disable it entirely, and switch to normal msvcrt, do e.g. + # 'win_use_allocator_shim': 0, + # 'win_release_RuntimeLibrary': 2 + # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build. + 'win_use_allocator_shim%': 1, # 1 = shim allocator; 0 = msvcrt + + # Enables the unified allocator shim (experimental) which routes all the + # alloc calls to base/. Right now is supported on Linux Desktop only. + # http://crbug.com/550886 . + 'use_experimental_allocator_shim%': 0, + + # TODO(bradnelson): eliminate this when possible. + # To allow local gyp files to prevent release.vsprops from being included. + # Yes(1) means include release.vsprops. + # Once all vsprops settings are migrated into gyp, this can go away. + 'msvs_use_common_release%': 1, + + # TODO(bradnelson): eliminate this when possible. + # To allow local gyp files to override additional linker options for msvs. + # Yes(1) means set use the common linker options. + 'msvs_use_common_linker_extras%': 1, + + # TODO(sgk): eliminate this if possible. + # It would be nicer to support this via a setting in 'target_defaults' + # in chrome/app/locales/locales.gypi overriding the setting in the + # 'Debug' configuration in the 'target_defaults' dict below, + # but that doesn't work as we'd like. + 'msvs_debug_link_incremental%': '2', + + # Needed for some of the largest modules. + 'msvs_debug_link_nonincremental%': '1', + + # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows + # to get incremental linking to be faster in debug builds. + 'incremental_chrome_dll%': '0', + + # Experimental setting to break chrome.dll into multiple pieces based on + # process type. + 'chrome_multiple_dll%': '0', + + # Whether the VS xtree header has been patched to disable warning 4702. If + # it has, then we don't need to disable 4702 (unreachable code warning). + # The patch is preapplied to the internal toolchain and hence all bots. + 'msvs_xtree_patched%': '. Additional + # documentation on these macros is available at + # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3 + # Chrome normally builds with the Mac OS X 10.10 SDK and sets the + # deployment target to 10.7. Other projects, such as O3D, may + # override these defaults. + + # Normally, mac_sdk_min is used to find an SDK that Xcode knows + # about that is at least the specified version. In official builds, + # the SDK must match mac_sdk_min exactly. If the SDK is installed + # someplace that Xcode doesn't know about, set mac_sdk_path to the + # path to the SDK; when set to a non-empty string, SDK detection + # based on mac_sdk_min will be bypassed entirely. + 'mac_deployment_target%': '10.7', + 'mac_sdk_min%': '10.10', + 'mac_sdk_path%': '', + }, + + 'mac_sdk_min': '<(mac_sdk_min)', + 'mac_sdk_path': '<(mac_sdk_path)', + 'mac_deployment_target': '<(mac_deployment_target)', + + # Compile in Breakpad support by default so that it can be + # tested, even if it is not enabled by default at runtime. + 'mac_breakpad_compiled_in%': 1, + 'conditions': [ + # mac_product_name is set to the name of the .app bundle as it should + # appear on disk. This duplicates data from + # chrome/app/theme/chromium/BRANDING and + # chrome/app/theme/google_chrome/BRANDING, but is necessary to get + # these names into the build system. + ['branding=="Chrome"', { + 'mac_product_name%': 'Google Chrome', + }, { # else: branding!="Chrome" + 'mac_product_name%': 'nwjs', + }], + # Official mac builds require a specific OS X SDK, but iOS and + # non-official mac builds do not. + ['branding=="Chrome" and buildtype=="Official" and OS=="mac"', { + 'mac_sdk%': '(nacl_untrusted_build)==1', { + 'defines': [ + 'USE_OPENSSL_CERTS=1', + ], + }], + ['<(use_glib)==1 and >(nacl_untrusted_build)==0', { + 'defines': ['USE_GLIB=1'], + }], + ['<(use_nss_certs)==1 and >(nacl_untrusted_build)==0', { + 'defines': ['USE_NSS_CERTS=1'], + }], + ['<(chromeos)==1 and >(nacl_untrusted_build)==0', { + 'defines': ['OS_CHROMEOS=1'], + }], + ['<(asan)==1 and >(nacl_untrusted_build)==0', { + 'defines': [ + 'ADDRESS_SANITIZER', + 'MEMORY_TOOL_REPLACES_ALLOCATOR', + 'MEMORY_SANITIZER_INITIAL_SIZE', + ], + }], + ['enable_wexit_time_destructors==1 and OS!="win"', { + # TODO: Enable on Windows too, http://crbug.com/404525 + 'variables': { 'clang_warning_flags': ['-Wexit-time-destructors']}, + }], + ['chromium_code==0', { + 'variables': { + 'clang_warning_flags': [ + # Lots of third-party libraries have unused variables. Instead of + # suppressing them individually, we just blanket suppress them here. + '-Wno-unused-variable', + ], + }, + 'conditions': [ + [ 'os_posix==1 and OS!="mac" and OS!="ios"', { + # Remove -Wextra for third-party code. + 'cflags!': [ '-Wextra' ], + 'cflags_cc': [ + # Don't warn about hash_map in third-party code. + '-Wno-deprecated', + ], + }], + [ 'os_posix==1 and clang!=1 and OS!="mac" and OS!="ios"', { + # When we don't control the compiler, don't use -Wall for + # third-party code either. + 'cflags!': [ '-Wall' ], + }], + # TODO: Fix all warnings on chromeos too. + [ 'os_posix==1 and OS!="mac" and OS!="ios" and (clang!=1 or chromeos==1)', { + 'cflags!': [ + '-Werror', + ], + }], + [ 'OS=="win"', { + 'defines': [ + '_CRT_SECURE_NO_DEPRECATE', + '_CRT_NONSTDC_NO_WARNINGS', + '_CRT_NONSTDC_NO_DEPRECATE', + '_SCL_SECURE_NO_DEPRECATE', + ], + 'msvs_disabled_warnings': [ + 4800, 4275, 4267, 4090, 4146, 4334, 4068 + ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'WarningLevel': '3', + 'WarnAsError': 'true', + 'Detect64BitPortabilityProblems': 'false', + }, + }, + 'conditions': [ + ['buildtype=="Official"', { + 'msvs_settings': { + 'VCCLCompilerTool': { 'WarnAsError': 'false' }, + } + }], + [ '1 == 1', { + # TODO(darin): Unfortunately, some third_party code depends on base. + 'msvs_disabled_warnings': [ + 4251, # class 'std::xx' needs to have dll-interface. + ], + }], + ], + }], + + [ 'OS=="mac" or OS=="ios"', { + 'xcode_settings': { + 'WARNING_CFLAGS!': ['-Wextra'], + }, + 'conditions': [ + ['buildtype=="Official"', { + 'xcode_settings': { + 'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO', # -Werror + }, + }], + ], + }], + [ 'OS=="ios"', { + 'xcode_settings': { + 'RUN_CLANG_STATIC_ANALYZER': 'NO', + # Several internal ios directories generate numerous warnings for + # -Wobjc-missing-property-synthesis. + 'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO', + }, + }], + ], + }, { + 'includes': [ + # Rules for excluding e.g. foo_win.cc from the build on non-Windows. + 'filename_rules.gypi', + ], + # In Chromium code, we define __STDC_foo_MACROS in order to get the + # C99 macros on Mac and Linux. + 'defines': [ + '__STDC_CONSTANT_MACROS', + '__STDC_FORMAT_MACROS', + ], + 'conditions': [ + ['OS=="win"', { + # turn on warnings for signed/unsigned mismatch on chromium code. + 'msvs_settings': { + 'VCCLCompilerTool': { + 'AdditionalOptions': ['/we4389'], + }, + }, + }], + ['1 == 1', { + 'msvs_disabled_warnings': [ + 4251, # class 'std::xx' needs to have dll-interface. + ], + }], + ], + }], + ], # target_conditions for 'target_defaults' + 'default_configuration': 'Debug', + 'configurations': { + # VCLinkerTool LinkIncremental values below: + # 0 == default + # 1 == /INCREMENTAL:NO + # 2 == /INCREMENTAL + # Debug links incremental, Release does not. + # + # Abstract base configurations to cover common attributes. + # + 'Common_Base': { + 'abstract': 1, + 'msvs_configuration_attributes': { + 'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)', + 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)', + 'CharacterSet': '1', + }, + 'msvs_settings':{ + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + '/bigobj', + # Tell the compiler to crash on failures. This is undocumented + # and unsupported but very handy. + '/d2FastFail', + ], + }, + 'VCLinkerTool': { + # Add the default import libs. + 'AdditionalDependencies': [ + 'kernel32.lib', + 'gdi32.lib', + 'winspool.lib', + 'comdlg32.lib', + 'advapi32.lib', + 'shell32.lib', + 'ole32.lib', + 'oleaut32.lib', + 'user32.lib', + 'uuid.lib', + 'odbc32.lib', + 'odbccp32.lib', + 'delayimp.lib', + 'credui.lib', + ], + 'AdditionalOptions': [ + # Suggested by Microsoft Devrel to avoid + # LINK : fatal error LNK1248: image size (80000000) exceeds maximum allowable size (80000000) + # which started happening more regularly after VS2013 Update 4. + # Needs to be a bit lower for VS2015, or else errors out. + '/maxilksize:0x7ff00000', + # Tell the linker to crash on failures. + '/fastfail', + ], + }, + }, + 'conditions': [ + ['OS=="win" and win_fastlink==1 and MSVS_VERSION != "2013"', { + 'msvs_settings': { + 'VCLinkerTool': { + # /PROFILE is incompatible with /debug:fastlink + 'Profile': 'false', + 'AdditionalOptions': [ + # Tell VS 2015+ to create a PDB that references debug + # information in .obj and .lib files instead of copying + # it all. + '/DEBUG:FASTLINK', + ], + }, + }, + }], + ['OS=="win" and MSVS_VERSION == "2015"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + # Work around crbug.com/526851, bug in VS 2015 RTM compiler. + '/Zc:sizedDealloc-', + # Disable thread-safe statics to avoid overhead and because + # they are disabled on other platforms. See crbug.com/587210 + # and -fno-threadsafe-statics. + '/Zc:threadSafeInit-', + ], + }, + }, + }], + ], + }, + 'x86_Base': { + 'abstract': 1, + 'msvs_settings': { + 'VCLinkerTool': { + 'MinimumRequiredVersion': '5.01', # XP. + 'TargetMachine': '1', + }, + 'VCLibrarianTool': { + 'TargetMachine': '1', + }, + }, + 'msvs_configuration_platform': 'Win32', + }, + 'x64_Base': { + 'abstract': 1, + 'msvs_configuration_platform': 'x64', + 'msvs_settings': { + 'VCLinkerTool': { + # Make sure to understand http://crbug.com/361720 if you want to + # increase this. + 'MinimumRequiredVersion': '5.02', # Server 2003. + 'TargetMachine': '17', # x86 - 64 + 'AdditionalLibraryDirectories!': + ['<(windows_sdk_path)/Lib/10.0.10586.0/um/x86'], + 'AdditionalLibraryDirectories': + ['<(windows_sdk_path)/Lib/10.0.10586.0/um/x64'], + # Doesn't exist x64 SDK. Should use oleaut32 in any case. + 'IgnoreDefaultLibraryNames': [ 'olepro32.lib' ], + }, + 'VCLibrarianTool': { + 'AdditionalLibraryDirectories!': + ['<(windows_sdk_path)/Lib/10.0.10586.0/um/x86'], + 'AdditionalLibraryDirectories': + ['<(windows_sdk_path)/Lib/10.0.10586.0/um/x64'], + 'TargetMachine': '17', # x64 + }, + }, + }, + 'Debug_Base': { + 'abstract': 1, + 'defines': [ + 'DYNAMIC_ANNOTATIONS_ENABLED=1', + 'WTF_USE_DYNAMIC_ANNOTATIONS=1', + ], + 'xcode_settings': { + 'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)', + 'OTHER_CFLAGS': [ + '<@(debug_extra_cflags)', + ], + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'Optimization': '<(win_debug_Optimization)', + 'PreprocessorDefinitions': ['_DEBUG'], + 'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)', + 'RuntimeLibrary': '<(win_debug_RuntimeLibrary)', + 'conditions': [ + # According to MSVS, InlineFunctionExpansion=0 means + # "default inlining", not "/Ob0". + # Thus, we have to handle InlineFunctionExpansion==0 separately. + ['win_debug_InlineFunctionExpansion==0', { + 'AdditionalOptions': ['/Ob0'], + }], + ['win_debug_InlineFunctionExpansion!=""', { + 'InlineFunctionExpansion': + '<(win_debug_InlineFunctionExpansion)', + }], + ['win_debug_disable_iterator_debugging==1', { + 'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'], + }], + + # if win_debug_OmitFramePointers is blank, leave as default + ['win_debug_OmitFramePointers==1', { + 'OmitFramePointers': 'true', + }], + ['win_debug_OmitFramePointers==0', { + 'OmitFramePointers': 'false', + # The above is not sufficient (http://crbug.com/106711): it + # simply eliminates an explicit "/Oy", but both /O2 and /Ox + # perform FPO regardless, so we must explicitly disable. + # We still want the false setting above to avoid having + # "/Oy /Oy-" and warnings about overriding. + 'AdditionalOptions': ['/Oy-'], + }], + ], + 'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ], + }, + 'VCLinkerTool': { + 'LinkIncremental': '<(msvs_debug_link_incremental)', + # ASLR makes debugging with windbg difficult because Chrome.exe and + # Chrome.dll share the same base name. As result, windbg will + # name the Chrome.dll module like chrome_, where + # typically changes with each launch. This in turn + # means that breakpoints in Chrome.dll don't stick from one launch + # to the next. For this reason, we turn ASLR off in debug builds. + # Note that this is a three-way bool, where 0 means to pick up + # the default setting, 1 is off and 2 is on. + 'RandomizedBaseAddress': 1, + }, + 'VCResourceCompilerTool': { + 'PreprocessorDefinitions': ['_DEBUG'], + }, + }, + 'variables': { + 'clang_warning_flags': [ + # Allow comparing the address of references and 'this' against 0 + # in debug builds. Technically, these can never be null in + # well-defined C/C++ and Clang can optimize such checks away in + # release builds, but they may be used in asserts in debug builds. + '-Wno-undefined-bool-conversion', + '-Wno-tautological-undefined-compare', + ], + }, + 'conditions': [ + ['OS=="linux" or OS=="android"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '<@(debug_extra_cflags)', + ], + }], + ], + }], + ['OS=="linux" and target_arch!="ia32" and disable_glibcxx_debug==0', { + # Enable libstdc++ debugging facilities to help catch problems + # early, see http://crbug.com/65151 . + # TODO(phajdan.jr): Should we enable this for all of POSIX? + 'defines': ['_GLIBCXX_DEBUG=1',], + }], + ['release_valgrind_build==0', { + 'xcode_settings': { + 'OTHER_CFLAGS': [ + '-fstack-protector-strong', # Implies -fstack-protector + ], + }, + }], + ], + }, + 'Release_Base': { + 'abstract': 1, + 'defines': [ + 'NDEBUG', + ], + 'xcode_settings': { + 'DEAD_CODE_STRIPPING': 'YES', # -Wl,-dead_strip + 'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)', + 'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ], + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': '<(win_release_RuntimeLibrary)', + 'conditions': [ + # In official builds, each target will self-select + # an optimization level. + ['buildtype!="Official"', { + 'Optimization': '<(win_release_Optimization)', + }, + ], + # According to MSVS, InlineFunctionExpansion=0 means + # "default inlining", not "/Ob0". + # Thus, we have to handle InlineFunctionExpansion==0 separately. + ['win_release_InlineFunctionExpansion==0', { + 'AdditionalOptions': ['/Ob0'], + }], + ['win_release_InlineFunctionExpansion!=""', { + 'InlineFunctionExpansion': + '<(win_release_InlineFunctionExpansion)', + }], + + # if win_release_OmitFramePointers is blank, leave as default + ['win_release_OmitFramePointers==1', { + 'OmitFramePointers': 'true', + }], + ['win_release_OmitFramePointers==0', { + 'OmitFramePointers': 'false', + # The above is not sufficient (http://crbug.com/106711): it + # simply eliminates an explicit "/Oy", but both /O2 and /Ox + # perform FPO regardless, so we must explicitly disable. + # We still want the false setting above to avoid having + # "/Oy /Oy-" and warnings about overriding. + 'AdditionalOptions': ['/Oy-'], + }], + ['asan==0', { + # Put data in separate COMDATs. This allows the linker + # to put bit-identical constants at the same address even if + # they're unrelated constants, which saves binary size. + # This optimization can't be used when ASan is enabled because + # it is not compatible with the ASan ODR checker. + 'AdditionalOptions': ['/Gw'], + }], + ], + 'AdditionalOptions': [ + '/d2Zi+', # Improve debugging of Release builds. + '/Zc:inline', # Remove unreferenced COMDAT (faster links). + '<@(win_release_extra_cflags)', + ], + }, + 'VCLinkerTool': { + # LinkIncremental is a tri-state boolean, where 0 means default + # (i.e., inherit from parent solution), 1 means false, and + # 2 means true. + 'LinkIncremental': '1', + # This corresponds to the /PROFILE flag which ensures the PDB + # file contains FIXUP information (growing the PDB file by about + # 5%) but does not otherwise alter the output binary. This + # information is used by the Syzygy optimization tool when + # decomposing the release image. + 'Profile': 'true', + }, + }, + 'conditions': [ + ['msvs_use_common_release', { + 'includes': ['release.gypi'], + }], + ['release_valgrind_build==0 and tsan==0', { + 'defines': [ + 'NVALGRIND', + 'DYNAMIC_ANNOTATIONS_ENABLED=0', + ], + }, { + 'defines': [ + 'MEMORY_TOOL_REPLACES_ALLOCATOR', + 'MEMORY_SANITIZER_INITIAL_SIZE', + 'DYNAMIC_ANNOTATIONS_ENABLED=1', + 'WTF_USE_DYNAMIC_ANNOTATIONS=1', + ], + }], + ['OS=="win" and win_use_allocator_shim==1', { + 'defines': [ + 'ALLOCATOR_SHIM' + ], + }], + # _FORTIFY_SOURCE isn't really supported by Clang now, see + # http://llvm.org/bugs/show_bug.cgi?id=16821. + # It seems to work fine with Ubuntu 12 headers though, so use it + # in official builds. + ['os_posix==1 and (asan!=1 and msan!=1 and tsan!=1 and lsan!=1 and ubsan!=1) and (OS!="linux" or clang!=1 or buildtype=="Official")', { + 'target_conditions': [ + ['chromium_code==1', { + # Non-chromium code is not guaranteed to compile cleanly + # with _FORTIFY_SOURCE. Also, fortified build may fail + # when optimizations are disabled, so only do that for Release + # build. + 'defines': [ + '_FORTIFY_SOURCE=2', + ], + }], + ], + }], + ['OS=="linux" or OS=="android"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '<@(release_extra_cflags)', + ], + 'conditions': [ + ['enable_resource_whitelist_generation==1', { + 'cflags': [ + '-Wunknown-pragmas -Wno-error=unknown-pragmas', + ], + }], + ], + }], + ], + }], + ['OS=="ios"', { + 'defines': [ + 'NS_BLOCK_ASSERTIONS=1', + ], + }], + ], + }, + # + # Concrete configurations + # + 'Debug': { + 'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'], + }, + 'Release': { + 'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'], + }, + 'conditions': [ + [ 'OS=="ios"', { + 'Profile': { + 'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'], + 'target_conditions': [ + [ '_type=="executable"', { + # To get a real .dSYM bundle produced by dsymutil, set the + # debug information format to dwarf-with-dsym. Since + # strip_from_xcode will not be used, set Xcode to do the + # stripping as well. + 'xcode_settings': { + 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym', + 'DEPLOYMENT_POSTPROCESSING': 'YES', + 'STRIP_INSTALLED_PRODUCT': 'YES', + }, + }], + ], + }, + }], + [ 'OS=="win"', { + # TODO(bradnelson): add a gyp mechanism to make this more graceful. + 'Debug_x64': { + 'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'], + }, + 'Release_x64': { + 'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'], + }, + }], + ], + }, + }, + 'conditions': [ + ['os_posix==1', { + 'target_defaults': { + 'ldflags': [ + '-Wl,-z,now', + '-Wl,-z,relro', + ], + # TODO(glider): enable the default options on other systems. + 'conditions': [ + ['use_sanitizer_options==1 and ((OS=="linux" and (chromeos==0 or target_arch!="ia32")) or OS=="mac")', { + 'dependencies': [ + '<(DEPTH)/build/sanitizers/sanitizers.gyp:sanitizer_options', + ], + }], + ], + }, + }], + # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580 + ['os_posix==1 and disable_fatal_linker_warnings==0 and use_evdev_gestures==0 and (chromeos==0 or target_arch!="arm")', { + 'target_defaults': { + 'ldflags': [ + '-Wl,--fatal-warnings', + ], + }, + }], + # -Wl,-z,-defs doesn't work with the sanitiziers, http://crbug.com/452065 + ['(OS=="linux" or OS=="android") and asan==0 and msan==0 and tsan==0 and ubsan==0 and ubsan_security==0 and ubsan_vptr==0 and cfi_diag==0', { + 'target_defaults': { + 'ldflags': [ + '-Wl,-z,defs', + ], + }, + }], + ['os_posix==1 and chromeos==0', { + # Chrome OS enables -fstack-protector-strong via its build wrapper, + # and we want to avoid overriding this, so stack-protector is only + # enabled when not building on Chrome OS. + # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc + # supports it. See also https://crbug.com/533294 + 'target_defaults': { + 'cflags': [ + '-fstack-protector', + '--param=ssp-buffer-size=4', + ], + }, + }], + ['os_posix==1 and OS=="linux"', { + 'defines': [ + '_LARGEFILE_SOURCE', + '_LARGEFILE64_SOURCE', + '_FILE_OFFSET_BITS=64', + ], + }], + ['os_posix==1 and OS!="mac" and OS!="ios"', { + 'target_defaults': { + # Enable -Werror by default, but put it in a variable so it can + # be disabled in ~/.gyp/include.gypi on the valgrind builders. + 'variables': { + 'werror%': '-Werror', + 'libraries_for_target%': '', + 'conditions' : [ + # Enable -Wextra for chromium_code when we control the compiler. + ['clang==1', { 'wextra': '-Wextra' }, { 'wextra': '-Wno-extra' }], + ], + }, + 'defines': [ + '_FILE_OFFSET_BITS=64', + ], + 'cflags': [ + '<(werror)', # See note above about the werror variable. + '-pthread', + '-fno-strict-aliasing', # See http://crbug.com/32204 + '-Wall', + '<(wextra)', + # Don't warn about unused function params. We use those everywhere. + '-Wno-unused-parameter', + # Don't warn about the "struct foo f = {0};" initialization pattern. + '-Wno-missing-field-initializers', + # Don't export any symbols (for example, to plugins we dlopen()). + # Note: this is *required* to make some plugins work. + '-fvisibility=hidden', + '-pipe', + ], + 'cflags_cc': [ + '-fno-exceptions', + '-fno-rtti', + # If this is removed then remove the corresponding /Zc:threadSafeInit- + # for Windows. + '-fno-threadsafe-statics', + # Make inline functions have hidden visiblity by default. + # Surprisingly, not covered by -fvisibility=hidden. + '-fvisibility-inlines-hidden', + ], + 'ldflags': [ + '-pthread', '-Wl,-z,noexecstack', + ], + 'libraries' : [ + '<(libraries_for_target)', + ], + 'configurations': { + 'Debug_Base': { + 'variables': { + 'debug_optimize%': '0', + }, + 'defines': [ + '_DEBUG', + ], + 'cflags': [ + '-O>(debug_optimize)', + '-g', + ], + 'conditions' : [ + ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', { + # TODO(jdduke) Re-enable on mips after resolving linking + # issues with libc++ (crbug.com/456380). + 'ldflags': [ + # Warn in case of text relocations. + '-Wl,--warn-shared-textrel', + ], + }], + ['OS=="android" and android_full_debug==0', { + # Some configurations are copied from Release_Base to reduce + # the binary size. + 'variables': { + 'debug_optimize%': 's', + }, + 'cflags': [ + '-fdata-sections', + '-ffunction-sections', + ], + 'ldflags': [ + '-Wl,-O1', + '-Wl,--as-needed', + ], + }], + ['OS=="android" and android_full_debug==0 and target_arch!="arm64"', { + # We don't omit frame pointers on arm64 since they are required + # to correctly unwind stackframes which contain system library + # function frames (crbug.com/391706). + 'cflags': [ + '-fomit-frame-pointer', + ], + }], + ['OS=="linux" and target_arch=="ia32"', { + 'ldflags': [ + '-Wl,--no-as-needed', + ], + }], + ['debug_unwind_tables==1', { + 'cflags': ['-funwind-tables'], + }, { + 'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'], + 'defines': ['NO_UNWIND_TABLES'], + }], + ['linux_use_debug_fission==1 and linux_use_gold_flags==1 and binutils_version>=223', { + 'cflags': ['-gsplit-dwarf'], + }], + ], + }, + 'Release_Base': { + 'variables': { + 'release_optimize%': '2', + # Binaries become big and gold is unable to perform GC + # and remove unused sections for some of test targets + # on 32 bit platform. + # (This is currently observed only in chromeos valgrind bots) + # The following flag is to disable --gc-sections linker + # option for these bots. + 'no_gc_sections%': 0, + + # TODO(bradnelson): reexamine how this is done if we change the + # expansion of configurations + 'release_valgrind_build%': 0, + }, + 'cflags': [ + '-O<(release_optimize)', + # Don't emit the GCC version ident directives, they just end up + # in the .comment section taking up binary size. + '-fno-ident', + # Put data and code in their own sections, so that unused symbols + # can be removed at link time with --gc-sections. + '-fdata-sections', + '-ffunction-sections', + ], + 'ldflags': [ + # Specifically tell the linker to perform optimizations. + # See http://lwn.net/Articles/192624/ . + '-Wl,-O1', + '-Wl,--as-needed', + ], + 'conditions' : [ + ['no_gc_sections==0', { + 'ldflags': [ + '-Wl,--gc-sections', + ], + }], + ['OS=="android" and target_arch!="arm64"', { + # We don't omit frame pointers on arm64 since they are required + # to correctly unwind stackframes which contain system library + # function frames (crbug.com/391706). + 'cflags': [ + '-fomit-frame-pointer', + ] + }], + ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', { + # TODO(jdduke) Re-enable on mips after resolving linking + # issues with libc++ (crbug.com/456380). + 'ldflags': [ + # Warn in case of text relocations. + '-Wl,--warn-shared-textrel', + ], + }], + ['OS=="android"', { + 'variables': { + 'release_optimize%': 's', + }, + }, { + 'ldflags': [ + # TODO(pcc): Fix linker bug which requires us to link pthread + # unconditionally here (crbug.com/623236). + '-Wl,--no-as-needed', + '-lpthread', + '-Wl,--as-needed', + ], + }], + ['profiling==1', { + 'cflags': [ + '-fno-omit-frame-pointer', + '-g', + ], + 'conditions' : [ + ['profiling_full_stack_frames==1', { + 'cflags': [ + '-fno-inline', + '-fno-optimize-sibling-calls', + ], + }], + ], + }], + ['release_unwind_tables==1', { + 'cflags': ['-funwind-tables'], + }, { + 'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'], + 'defines': ['NO_UNWIND_TABLES'], + }], + ], + }, + }, + 'conditions': [ + ['target_arch=="ia32"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'asflags': [ + # Needed so that libs with .s files (e.g. libicudata.a) + # are compatible with the general 32-bit-ness. + '-32', + ], + # All floating-point computations on x87 happens in 80-bit + # precision. Because the C and C++ language standards allow + # the compiler to keep the floating-point values in higher + # precision than what's specified in the source and doing so + # is more efficient than constantly rounding up to 64-bit or + # 32-bit precision as specified in the source, the compiler, + # especially in the optimized mode, tries very hard to keep + # values in x87 floating-point stack (in 80-bit precision) + # as long as possible. This has important side effects, that + # the real value used in computation may change depending on + # how the compiler did the optimization - that is, the value + # kept in 80-bit is different than the value rounded down to + # 64-bit or 32-bit. There are possible compiler options to + # make this behavior consistent (e.g. -ffloat-store would keep + # all floating-values in the memory, thus force them to be + # rounded to its original precision) but they have significant + # runtime performance penalty. + # + # -mfpmath=sse -msse2 makes the compiler use SSE instructions + # which keep floating-point values in SSE registers in its + # native precision (32-bit for single precision, and 64-bit + # for double precision values). This means the floating-point + # value used during computation does not change depending on + # how the compiler optimized the code, since the value is + # always kept in its specified precision. + # + # Refer to http://crbug.com/348761 for rationale behind SSE2 + # being a minimum requirement for 32-bit Linux builds and + # http://crbug.com/313032 for an example where this has "bit" + # us in the past. + 'cflags': [ + '-msse2', + '-mfpmath=sse', + '-mmmx', # Allows mmintrin.h for MMX intrinsics. + '-m32', + ], + 'ldflags': [ + '-m32', + ], + 'conditions': [ + # Use gold linker for Android ia32 target. + ['OS=="android"', { + # Use gold linker for Android ia32 target. + 'ldflags': [ + '-fuse-ld=gold', + ], + # Use -mstackrealign due to a bug on ia32 Jelly Bean. + # See crbug.com/521527 + 'cflags': [ + '-mstackrealign', + ], + }], + ], + }], + ], + }], + ['target_arch=="x64"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + # Use gold linker for Android x64 target. + ['OS=="android"', { + 'ldflags': [ + '-fuse-ld=gold', + ], + }], + ], + 'cflags': [ + '-m64', + '-march=x86-64', + ], + 'ldflags': [ + '-m64', + ], + }], + ], + }], + ['target_arch=="arm"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['clang==0', { + 'cflags': [ + # Don't warn about "maybe" uninitialized. Clang doesn't + # include this in -Wall but gcc does, and it gives false + # positives. + '-Wno-maybe-uninitialized', + ], + 'cflags_cc': [ + # The codesourcery arm-2009q3 toolchain warns at that the ABI + # has changed whenever it encounters a varargs function. This + # silences those warnings, as they are not helpful and + # clutter legitimate warnings. + '-Wno-abi', + ], + }], + ['clang==1 and arm_arch!="" and OS!="android"', { + 'cflags': [ + '-target arm-linux-gnueabihf', + ], + 'ldflags': [ + '-target arm-linux-gnueabihf', + ], + }], + ['arm_arch!=""', { + 'cflags': [ + '-march=<(arm_arch)', + ], + 'conditions': [ + ['use_lto==1 or use_lto_o2==1', { + 'ldflags': [ + '-march=<(arm_arch)', + ], + }], + ], + }], + ['arm_tune!=""', { + 'cflags': [ + '-mtune=<(arm_tune)', + ], + 'conditions': [ + ['use_lto==1 or use_lto_o2==1', { + 'ldflags': [ + '-mtune=<(arm_tune)', + ], + }], + ], + }], + ['arm_fpu!=""', { + 'cflags': [ + '-mfpu=<(arm_fpu)', + ], + 'conditions': [ + ['use_lto==1 or use_lto_o2==1', { + 'ldflags': [ + '-mfpu=<(arm_fpu)', + ], + }], + ], + }], + ['arm_float_abi!=""', { + 'cflags': [ + '-mfloat-abi=<(arm_float_abi)', + ], + 'conditions': [ + ['use_lto==1 or use_lto_o2==1', { + 'ldflags': [ + '-mfloat-abi=<(arm_float_abi)', + ], + }], + ], + }], + ['arm_thumb==1', { + 'cflags': [ + '-mthumb', + ], + 'conditions': [ + ['use_lto==1 or use_lto_o2==1', { + 'ldflags': [ + '-mthumb', + ], + }], + ], + }], + ['OS=="android"', { + # Most of the following flags are derived from what Android + # uses by default when building for arm, reference for which + # can be found in the following file in the Android NDK: + # toolchains/arm-linux-androideabi-4.9/setup.mk + 'cflags': [ + # The tree-sra optimization (scalar replacement for + # aggregates enabling subsequent optimizations) leads to + # invalid code generation when using the Android NDK's + # compiler (r5-r7). This can be verified using + # webkit_unit_tests' WTF.Checked_int8_t test. + '-fno-tree-sra', + # The following option is disabled to improve binary + # size and performance in gcc 4.9. + '-fno-caller-saves', + '-Wno-psabi', + ], + # Android now supports .relro sections properly. + # NOTE: While these flags enable the generation of .relro + # sections, the generated libraries can still be loaded on + # older Android platform versions. + 'ldflags': [ + '-Wl,-z,relro', + '-Wl,-z,now', + '-fuse-ld=gold', + ], + 'conditions': [ + ['arm_thumb==1', { + 'cflags': [ '-mthumb-interwork' ], + }], + ['profiling==1', { + 'cflags': [ + # Thumb code with frame pointer makes chrome crash + # early. + '-marm', + '-mapcs-frame', # Required by -fno-omit-frame-pointer. + # The perf report sometimes incorrectly attributes + # code from tail calls. + '-fno-optimize-sibling-calls', + ], + 'cflags!': [ + '-fomit-frame-pointer', + ], + }], + ['clang==1', { + 'cflags!': [ + # Clang does not support the following options. + '-mapcs-frame', + '-mthumb-interwork', + '-finline-limit=64', + '-fno-tree-sra', + '-fno-caller-saves', + '-Wno-psabi', + ], + }], + ['clang==1 and linux_use_bundled_gold==0', { + 'ldflags': [ + # Let clang find the ld.gold in the NDK. + '--gcc-toolchain=<(android_toolchain)/..', + ], + }], + ['asan==1', { + 'cflags': [ + '-marm', # Required for frame pointer based stack traces. + ], + }], + ], + }], + ['chromecast==1', { + 'cflags': [ + # We set arm_arch to "" so that -march compiler option + # is not set. Otherwise a gcc bug that would complain + # about it conflicting with '-mcpu=cortex-a9'. The flag + # '-march=armv7-a' is actually redundant anyway because + # it is enabled by default when we built the toolchain. + # And using '-mcpu=cortex-a9' should be sufficient. + '-mcpu=cortex-a9', + '-funwind-tables', + # Breakpad requires symbols with debugging information + '-g', + ], + 'ldflags': [ + # We want to statically link libstdc++/libgcc. + '-static-libstdc++', + '-static-libgcc', + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://b/26390825 + '-Wl,--exclude-libs=libffmpeg.a', + ], + 'cflags!': [ + # Some components in Chromium (e.g. v8, skia, ffmpeg) + # define their own cflags for arm builds that could + # conflict with the flags we set here (e.g. + # '-mcpu=cortex-a9'). Remove these flags explicitly. + '-march=armv7-a', + '-mtune=cortex-a8', + ], + 'target_conditions': [ + [ '_type=="executable" and OS!="android"', { + # Statically link whole libstdc++ and libgcc in + # executables to ensure only one copy at runtime. + 'ldflags': [ + # Note executables also get -static-stdlibc++/libgcc. + # Despite including libstdc++/libgcc archives, we + # still need to specify static linking for them in + # order to prevent the executable from having a + # dynamic dependency on them. + + # Export stdlibc++ and libgcc symbols to force shlibs + # to refer to these symbols from the executable. + '-Wl,--export-dynamic', + + '-lm', # stdlibc++ requires math.h + + # In case we redefined stdlibc++ symbols + # (e.g. tc_malloc) + '-Wl,--allow-multiple-definition', + + '-Wl,--whole-archive', + '-l:libstdc++.a', + '-l:libgcc.a', + '-Wl,--no-whole-archive', + ], + }] + ], + }], + ], + }], + ], + }], + ['target_arch=="arm64"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['OS=="android"', { + 'cflags!': [ + '-fstack-protector', # stack protector is always enabled on arm64. + ], + }], + ['clang==1 and arm_arch!="" and OS!="android"', { + 'cflags': [ + '-target aarch64-linux-gnu', + ], + 'ldflags': [ + '-target aarch64-linux-gnu', + ], + }], + ], + }], + ], + }], + ['target_arch=="mipsel"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['mips_arch_variant=="r6"', { + 'conditions': [ + ['clang==1', { + 'conditions': [ + ['OS=="android"', { + 'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32r6', ], + 'ldflags': [ '-target mipsel-linux-android', ], + }], + ], + }, { # clang==0 + 'cflags': ['-mips32r6', '-Wa,-mips32r6', ], + }], + ['clang==0 and OS=="android"', { + 'ldflags': ['-mips32r6', '-Wl,-melf32ltsmip',], + }], + ['mips_msa==1', { + 'cflags': ['-mmsa', '-mfp64', '-msched-weight', '-mload-store-pairs'], + }], + ], + 'cflags': [ '-mfp64', '-mno-odd-spreg' ], + 'ldflags': [ '-mfp64', '-mno-odd-spreg' ], + }], + ['mips_arch_variant=="r2"', { + 'conditions': [ + ['mips_float_abi=="hard" and mips_fpu_mode!=""', { + 'cflags': ['-m<(mips_fpu_mode)'], + }], + ['clang==1', { + 'conditions': [ + ['OS=="android"', { + 'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32r2'], + 'ldflags': [ '-target mipsel-linux-android', ], + }, { + 'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32r2'], + 'ldflags': [ '-target mipsel-linux-gnu', ], + }], + ], + }, { # clang==0 + 'cflags': ['-mips32r2', '-Wa,-mips32r2', ], + }], + ], + }], + ['mips_arch_variant=="r1"', { + 'conditions': [ + ['clang==1', { + 'conditions': [ + ['OS=="android"', { + 'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32'], + 'ldflags': [ '-target mipsel-linux-android', ], + }, { + 'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32'], + 'ldflags': [ '-target mipsel-linux-gnu', ], + }], + ], + }, { # clang==0 + 'cflags': ['-mips32', '-Wa,-mips32', ], + }], + ], + }], + ['clang==1', { + 'cflags!': [ + # Clang does not support the following options. + '-finline-limit=64', + ], + # TODO(gordanac) Enable integrated-as. + 'cflags': [ '-fno-integrated-as' ], + 'conditions': [ + ['OS=="android"', { + 'cflags': [ + # Else /usr/bin/as gets picked up. + '-B<(android_toolchain)', + ], + }], + ], + }], + ['clang==1 and OS=="android"', { + 'ldflags': [ + # Let clang find the ld in the NDK. + '--gcc-toolchain=<(android_toolchain)/..', + ], + }], + ['mips_dsp_rev==1', { + 'cflags': ['-mdsp'], + }], + ['mips_dsp_rev==2', { + 'cflags': ['-mdspr2'], + }], + ], + 'cflags': [ + '-m<(mips_float_abi)-float' + ], + 'ldflags': [ + '-Wl,--no-keep-memory' + ], + 'cflags_cc': [ + '-Wno-uninitialized', + ], + }], + ['_toolset=="target" and _type=="executable"', { + 'conditions': [ + ['OS=="linux"', { + 'ldflags': ['-pie'], + }], + ], + }], + ], + }], + ['target_arch=="mips64el"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['mips_arch_variant=="r6"', { + 'conditions': [ + ['clang==1', { + 'conditions': [ + ['OS=="android"', { + 'cflags': [ '-target mips64el-linux-android', '-march=mips64el', '-mcpu=mips64r6', ], + 'ldflags': [ '-target mips64el-linux-android', ], + }], + ], + }, { # clang==0 + 'cflags': ['-mips64r6', '-Wa,-mips64r6'], + 'ldflags': ['-mips64r6'], + }], + ['mips_msa==1', { + 'cflags': ['-mmsa', '-mfp64', '-msched-weight', '-mload-store-pairs'], + }], + ], + }], + ['mips_arch_variant=="r2"', { + 'cflags': ['-mips64r2', '-Wa,-mips64r2'], + 'ldflags': ['-mips64r2'], + }], + ['clang==1', { + 'cflags!': [ + # Clang does not support the following options. + '-finline-limit=64', + ], + # TODO(gordanac) Enable integrated-as. + 'cflags': [ '-fno-integrated-as' ], + 'conditions': [ + ['OS=="android"', { + 'cflags': [ + # Else /usr/bin/as gets picked up. + '-B<(android_toolchain)', + ], + }], + ], + }], + ['clang==1 and OS=="android"', { + 'ldflags': [ + # Let clang find the ld in the NDK. + '--gcc-toolchain=<(android_toolchain)/..', + ], + }], + ], + + 'cflags_cc': [ + '-Wno-uninitialized', + ], + }], + ], + }], + ['linux_fpic==1', { + 'cflags': [ + '-fPIC', + ], + 'ldflags': [ + '-fPIC', + ], + }], + ['sysroot!=""', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '--sysroot=<(sysroot)', + ], + 'ldflags': [ + '--sysroot=<(sysroot)', + '=223', { + # Newer binutils don't set DT_RPATH unless you disable "new" dtags + # and the new DT_RUNPATH doesn't work without --no-as-needed flag. + # FIXME(mithro): Figure out the --as-needed/--no-as-needed flags + # inside this file to allow usage of --no-as-needed and removal of + # this flag. + 'ldflags': [ + '-Wl,--disable-new-dtags', + ], + }], + ['clang==0', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags_cc': [ + '-std=gnu++11', + # See comment for -Wno-c++11-narrowing. + '-Wno-narrowing', + ], + }], + ], + }], + ['clang==0 and host_clang==0', { + 'target_conditions': [ + ['_toolset=="host"', { + 'cflags_cc': [ + '-std=gnu++11', + # See comment for -Wno-c++11-narrowing. + '-Wno-narrowing', + ], + }], + ], + }], + ['clang==0 and chromeos==1', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags_cc': [ + # TODO(thakis): Remove, http://crbug.com/263960 + '-Wno-literal-suffix', + ], + }], + ], + }], + ['clang==0 and host_clang==0 and chromeos==1', { + 'target_conditions': [ + ['_toolset=="host"', { + 'cflags_cc': [ + # TODO(thakis): Remove, http://crbug.com/263960 + '-Wno-literal-suffix', + ], + }], + ], + }], + ], + }, + }], + # *BSD-specific options; note that most *BSD options are set above, + # with Linux. + ['OS=="openbsd" or OS=="freebsd"', { + 'target_defaults': { + 'ldflags': [ + '-Wl,--no-keep-memory', + ], + }, + }], + # Android-specific options; note that most are set above with Linux. + ['OS=="android"', { + 'variables': { + # Placing this variable here prevents from forking libvpx, used + # by remoting. Remoting is off, so it needn't built, + # so forking it's deps seems like overkill. + # But this variable need defined to properly run gyp. + # A proper solution is to have an OS==android conditional + # in third_party/libvpx/libvpx.gyp to define it. + 'libvpx_path': 'lib/linux/arm', + }, + 'target_defaults': { + 'variables': { + 'release_extra_cflags%': '', + 'conditions': [ + # If we're using the components build, append "cr" to all shared + # libraries to avoid naming collisions with android system library + # versions with the same name (e.g. skia, icu). + ['component=="shared_library"', { + 'android_product_extension': 'cr.so', + }, { + 'android_product_extension': 'so', + } ], + ], + }, + 'target_conditions': [ + ['_type=="shared_library"', { + 'product_extension': '<(android_product_extension)', + }], + + # Settings for building device targets using Android's toolchain. + # These are based on the setup.mk file from the Android NDK. + # + # The NDK Android executable link step looks as follows: + # $LDFLAGS + # $(TARGET_CRTBEGIN_DYNAMIC_O) <-- crtbegin.o + # $(PRIVATE_OBJECTS) <-- The .o that we built + # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built + # $(TARGET_LIBGCC) <-- libgcc.a + # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built + # $(PRIVATE_LDLIBS) <-- System .so + # $(TARGET_CRTEND_O) <-- crtend.o + # + # For now the above are approximated for executables by adding + # crtbegin.o to the end of the ldflags and 'crtend.o' to the end + # of 'libraries'. + # + # The NDK Android shared library link step looks as follows: + # $LDFLAGS + # $(PRIVATE_OBJECTS) <-- The .o that we built + # -l,--whole-archive + # $(PRIVATE_WHOLE_STATIC_LIBRARIES) + # -l,--no-whole-archive + # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built + # $(TARGET_LIBGCC) <-- libgcc.a + # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built + # $(PRIVATE_LDLIBS) <-- System .so + # + # For now, assume that whole static libraries are not needed. + # + # For both executables and shared libraries, add the proper + # libgcc.a to the start of libraries which puts it in the + # proper spot after .o and .a files get linked in. + # + # TODO: The proper thing to do longer-tem would be proper gyp + # support for a custom link command line. + ['_toolset=="target"', { + 'cflags!': [ + '-pthread', # Not supported by Android toolchain. + ], + 'cflags': [ + '-ffunction-sections', + '-funwind-tables', + '-g', + '-fstack-protector', + '-fno-short-enums', + '-finline-limit=64', + '<@(release_extra_cflags)', + '--sysroot=<(android_ndk_sysroot)', + ], + 'cflags_cc': [ + # NOTE: The libc++ header include paths below are specified in + # cflags rather than include_dirs because they need to come + # after include_dirs. + # The include ordering here is important; change with caution. + '-isystem<(android_libcpp_include)', + '-isystem<(android_ndk_root)/sources/cxx-stl/llvm-libc++abi/libcxxabi/include', + '-isystem<(android_ndk_root)/sources/android/support/include', + ], + 'defines': [ + 'ANDROID', + '__GNU_SOURCE=1', # Necessary for clone() + # The NDK has these things, but doesn't define the constants + # to say that it does. Define them here instead. + 'HAVE_SYS_UIO_H', + 'ANDROID_NDK_VERSION=<(android_ndk_version)', + ], + 'ldflags!': [ + '-pthread', # Not supported by Android toolchain. + ], + 'ldflags': [ + '-Wl,--build-id=sha1', + '-Wl,--no-undefined', + '--sysroot=<(android_ndk_sysroot)', + '-nostdlib', + '-L<(android_libcpp_libs_dir)', + # Don't allow visible symbols from libgcc or libc++ to be + # re-exported. + '-Wl,--exclude-libs=libgcc.a', + '-Wl,--exclude-libs=libc++_static.a', + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://crbug.com/448386 + '-Wl,--exclude-libs=libcommon_audio.a', + '-Wl,--exclude-libs=libcommon_audio_neon.a', + '-Wl,--exclude-libs=libcommon_audio_sse2.a', + '-Wl,--exclude-libs=libiSACFix.a', + '-Wl,--exclude-libs=libisac_neon.a', + '-Wl,--exclude-libs=libopus.a', + '-Wl,--exclude-libs=libvpx.a', + ], + 'libraries': [ + '-l<(android_libcpp_library)', + '-latomic', + # Manually link the libgcc.a that the cross compiler uses. + '(_target_name).map', + ], + }, + }], + ], + }], + ['_mac_bundle', { + 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']}, + 'target_conditions': [ + ['_type=="executable"', { + 'conditions': [ + ['asan==1', { + 'postbuilds': [ + { + 'variables': { + # Define copy_asan_dylib_path in a variable ending in + # _path so that gyp understands it's a path and + # performs proper relativization during dict merging. + 'copy_asan_dylib_path': + 'mac/copy_asan_runtime_dylib.sh', + }, + 'postbuild_name': 'Copy ASan runtime dylib', + 'action': [ + '<(copy_asan_dylib_path)', + ], + }, + ], + }], + ], + }], + ], + }], + ], # target_conditions + }, # target_defaults + }], # OS=="mac" or OS=="ios" + ['OS=="mac"', { + 'target_defaults': { + 'defines': [ + # Prevent Mac OS X AssertMacros.h from defining macros that collide + # with common names, like 'check', 'require', and 'verify'. + # (Included by system header. Also exists on iOS but not included.) + # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h + '__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0', + ], + 'variables': { + # These should end with %, but there seems to be a bug with % in + # variables that are intended to be set to different values in + # different targets, like these. + # Strip debugging symbols from the target. + 'mac_strip': '<(mac_strip_release)', + 'conditions': [ + ['asan==1', { + 'conditions': [ + ['mac_want_real_dsym=="default"', { + 'mac_real_dsym': 1, + }, { + 'mac_real_dsym': '<(mac_want_real_dsym)' + }], + ], + }, { + 'conditions': [ + ['mac_want_real_dsym=="default"', { + 'mac_real_dsym': 0, # Fake .dSYMs are fine in most cases. + }, { + 'mac_real_dsym': '<(mac_want_real_dsym)' + }], + ], + }], + ], + }, + 'configurations': { + 'Release_Base': { + 'conditions': [ + ['mac_breakpad == 1', { + 'xcode_settings': { + 'OTHER_CFLAGS': [ + # The Google Chrome Framework dSYM generated by dsymutil has + # grown larger than 4GB, which dsymutil can't handle. Reduce + # the amount of debug symbols. + '-fno-standalone-debug', # See http://crbug.com/479841 + ] + }, + }], + ], + }, # configuration "Release" + }, # configurations + 'xcode_settings': { + # Tell the compiler to use libc++'s headers and the linker to link + # against libc++. The latter part normally requires OS X 10.7, + # but we still support running on 10.6. How does this work? Two + # parts: + # 1. Chromium's clang doesn't error on -mmacosx-version-min=10.6 + # combined with -stdlib=libc++ (it normally silently produced a + # binary that doesn't run on 10.6) + # 2. Further down, library_dirs is set to + # third_party/libc++-static, which contains a static + # libc++.a library. The linker then links against that instead + # of against /usr/lib/libc++.dylib when it sees the -lc++ flag + # added by the driver. + # + # In component builds, just link to the system libc++. This has + # the effect of making everything depend on libc++, which means + # component-build binaries won't run on 10.6 (no libc++ there), + # but for a developer-only configuration that's ok. (We don't + # want to raise the deployment target yet so that official and + # dev builds have the same deployment target. This affects + # things like which functions are considered deprecated.) + 'CLANG_CXX_LIBRARY': 'libc++', # -stdlib=libc++ + + 'GCC_DYNAMIC_NO_PIC': 'NO', # No -mdynamic-no-pic + # (Equivalent to -fPIC) + # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min + 'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)', + # Keep pch files below xcodebuild/. + 'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders', + 'OTHER_CFLAGS': [ + # Someday this can be replaced by an 'GCC_STRICT_ALIASING': 'NO' + # xcode_setting, but not until all downstream projects' mac bots are + # using xcode >= 4.6, because that's when the default value of the + # flag in the compiler switched. Pre-4.6, the value 'NO' for that + # setting is a no-op as far as xcode is concerned, but the compiler + # behaves differently based on whether -fno-strict-aliasing is + # specified or not. + '-fno-strict-aliasing', # See http://crbug.com/32204. + ], + }, + 'target_conditions': [ + ['>(nacl_untrusted_build)==0 and component=="static_library"', { + # See the comment for CLANG_CXX_LIBRARY above for what this does. + # The NaCl toolchains have their own toolchain and don't need this. + # ASan requires 10.7+ and clang implicitly adds -lc++abi in ASan + # mode. Our libc++.a contains both libc++ and libc++abi in one + # library, so it doesn't work in that mode. + 'conditions': [ + ['asan==0', { + 'library_dirs': [ '<(DEPTH)/third_party/libc++-static' ], + }], + ], + }], + ['_type=="executable"', { + # Turn on position-independence (ASLR) for executables. When + # PIE is on for the Chrome executables, the framework will + # also be subject to ASLR. + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-Wl,-pie', # Position-independent executable (MH_PIE) + ], + }, + }], + ['(_type=="executable" or _type=="shared_library" or \ + _type=="loadable_module") and mac_strip!=0', { + 'target_conditions': [ + ['mac_real_dsym == 1', { + # To get a real .dSYM bundle produced by dsymutil, set the + # debug information format to dwarf-with-dsym. Since + # strip_from_xcode will not be used, set Xcode to do the + # stripping as well. + 'configurations': { + 'Release_Base': { + 'xcode_settings': { + 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym', + 'DEPLOYMENT_POSTPROCESSING': 'YES', + 'STRIP_INSTALLED_PRODUCT': 'YES', + 'conditions': [ + # Only strip non-ASan builds. + ['asan==0', { + 'target_conditions': [ + ['_type=="shared_library" or _type=="loadable_module"', { + # The Xcode default is to strip debugging symbols + # only (-S). Local symbols should be stripped as + # well, which will be handled by -x. Xcode will + # continue to insert -S when stripping even when + # additional flags are added with STRIPFLAGS. + 'STRIPFLAGS': '-x', + }], # _type=="shared_library" or _type=="loadable_module" + ], # target_conditions + }, { # asan != 0 + 'STRIPFLAGS': '-S', + }], + ], + }, # xcode_settings + }, # configuration "Release" + }, # configurations + }, { # mac_real_dsym != 1 + # To get a fast fake .dSYM bundle, use a post-build step to + # produce the .dSYM and strip the executable. strip_from_xcode + # only operates in the Release configuration. + 'postbuilds': [ + { + 'variables': { + # Define strip_from_xcode in a variable ending in _path + # so that gyp understands it's a path and performs proper + # relativization during dict merging. + 'strip_from_xcode_path': 'mac/strip_from_xcode', + }, + 'postbuild_name': 'Strip If Needed', + 'action': ['<(strip_from_xcode_path)'], + }, + ], # postbuilds + }], # mac_real_dsym + ], # target_conditions + }], # (_type=="executable" or _type=="shared_library" or + # _type=="loadable_module") and mac_strip!=0 + ], # target_conditions + }, # target_defaults + }], # OS=="mac" + ['OS=="ios"', { + 'includes': [ + 'ios/coverage.gypi', + ], + 'target_defaults': { + 'xcode_settings' : { + 'ENABLE_BITCODE': 'NO', + 'CLANG_CXX_LIBRARY': 'libc++', # -stdlib=libc++ + + 'conditions': [ + # Older Xcodes do not support -Wno-deprecated-register, so pass an + # additional flag to suppress the "unknown compiler option" error. + # Restrict this flag to builds that are either compiling with Xcode + # or compiling with Xcode's Clang. This will allow Ninja builds to + # continue failing on unknown compiler options. + # TODO(rohitrao): This flag is temporary and should be removed as + # soon as the iOS bots are updated to use Xcode 5.1. + ['clang_xcode==1', { + 'WARNING_CFLAGS': [ + '-Wno-unknown-warning-option', + # It's not possible to achieve nullability completeness before + # all builders are running Xcode 7. crbug.com/499809 + '-Wno-nullability-completeness', + ], + 'OTHER_CPLUSPLUSFLAGS': [ + '$(inherited)', + # TODO(ios): Remove once Xcode's libc++ has LLVM r256325 + '-isystem (win_exe_compatibility_manifest)"!=""', { + 'VCManifestTool': { + 'AdditionalManifestFiles': [ + '>(win_exe_compatibility_manifest)', + ], + }, + }], + ], + 'conditions': [ + # Building with Clang on Windows is a work in progress and very + # experimental. See crbug.com/82385. + # Keep this in sync with the similar blocks in build/config/compiler/BUILD.gn + ['clang==1', { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + # Don't warn about unused function parameters. + # (This is also used on other platforms.) + '-Wno-unused-parameter', + # Don't warn about the "struct foo f = {0};" initialization + # pattern. + '-Wno-missing-field-initializers', + + # TODO(hans): Make this list shorter eventually, http://crbug.com/504657 + '-Wno-microsoft-enum-value', # http://crbug.com/505296 + '-Wno-unknown-pragmas', # http://crbug.com/505314 + '-Wno-microsoft-cast', # http://crbug.com/550065 + ], + }, + }], + ['clang==1 and clang_use_chrome_plugins==1', { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + '<@(clang_chrome_plugins_flags)', + ], + }, + }], + ['clang==1 and MSVS_VERSION == "2013"', { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + '-fmsc-version=1800', + ], + }, + }], + ['clang==1 and MSVS_VERSION == "2015"', { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + '-fmsc-version=1900', + ], + }, + }], + ['clang==1 and " version. + + +def Usage(program_name): + print '%s MODE TOOL' % os.path.basename(program_name) + print 'MODE: host or target.' + print 'TOOL: assembler or compiler or linker.' + return 1 + + +def ParseArgs(args): + if len(args) != 2: + raise Exception('Invalid number of arguments') + mode = args[0] + tool = args[1] + if mode not in ('host', 'target'): + raise Exception('Invalid mode: %s' % mode) + if tool not in ('assembler',): + raise Exception('Invalid tool: %s' % tool) + return mode, tool + + +def GetEnvironFallback(var_list, default): + """Look up an environment variable from a possible list of variable names.""" + for var in var_list: + if var in os.environ: + return os.environ[var] + return default + + +def GetVersion(compiler, tool): + tool_output = tool_error = None + cache_key = (compiler, tool) + cached_version = compiler_version_cache.get(cache_key) + if cached_version: + return cached_version + try: + # Note that compiler could be something tricky like "distcc g++". + if tool == "assembler": + compiler = compiler + " --version -Xassembler -x assembler -c /dev/null" + # Unmodified: GNU assembler (GNU Binutils) 2.24 + # Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22 + # Fedora: GNU assembler version 2.23.2 + version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M) + else: + raise Exception("Unknown tool %s" % tool) + + # Force the locale to C otherwise the version string could be localized + # making regex matching fail. + env = os.environ.copy() + env["LC_ALL"] = "C" + pipe = subprocess.Popen(compiler, shell=True, env=env, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + tool_output, tool_error = pipe.communicate() + if pipe.returncode: + raise subprocess.CalledProcessError(pipe.returncode, compiler) + + parsed_output = version_re.match(tool_output) + result = parsed_output.group(1) + parsed_output.group(2) + compiler_version_cache[cache_key] = result + return result + except Exception, e: + if tool_error: + sys.stderr.write(tool_error) + print >> sys.stderr, "compiler_version.py failed to execute:", compiler + print >> sys.stderr, e + return "" + + +def main(args): + try: + (mode, tool) = ParseArgs(args[1:]) + except Exception, e: + sys.stderr.write(e.message + '\n\n') + return Usage(args[0]) + + ret_code, result = ExtractVersion(mode, tool) + if ret_code == 0: + print result + return ret_code + + +def DoMain(args): + """Hook to be called from gyp without starting a separate python + interpreter.""" + (mode, tool) = ParseArgs(args) + ret_code, result = ExtractVersion(mode, tool) + if ret_code == 0: + return result + raise Exception("Failed to extract compiler version for args: %s" % args) + + +def ExtractVersion(mode, tool): + # Check if various CXX environment variables exist and use them if they + # exist. The preferences and fallback order is a close approximation of + # GenerateOutputForConfig() in GYP's ninja generator. + # The main difference being not supporting GYP's make_global_settings. + environments = ['CXX_target', 'CXX'] + if mode == 'host': + environments = ['CXX_host'] + environments; + compiler = GetEnvironFallback(environments, 'c++') + + if compiler: + compiler_version = GetVersion(compiler, tool) + if compiler_version != "": + return (0, compiler_version) + return (1, None) + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn new file mode 100644 index 00000000000..d4ecc4be3f3 --- /dev/null +++ b/build/config/BUILD.gn @@ -0,0 +1,503 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/allocator.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/crypto.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/config/features.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/ui.gni") +import("//build/toolchain/goma.gni") + +# One common error that happens is that GYP-generated headers within gen/ get +# included rather than the GN-generated ones within gen/ subdirectories. +# TODO(GYP_GONE): Remove once GYP is gone (as well as exec_script exception). +assert( + exec_script("//build/dir_exists.py", [ "obj.host" ], "string") == "False", + "GYP artifacts detected in $root_build_dir.$0x0A" + + "You must wipe this directory before building with GN.") + +declare_args() { + # When set (the default) enables C++ iterator debugging in debug builds. + # Iterator debugging is always off in release builds (technically, this flag + # affects the "debug" config, which is always available but applied by + # default only in debug builds). + # + # Iterator debugging is generally useful for catching bugs. But it can + # introduce extra locking to check the state of an iterator against the state + # of the current object. For iterator- and thread-heavy code, this can + # significantly slow execution. + enable_iterator_debugging = true + + # Normally we try to decide whether to use precompiled headers or + # not based on the other build arguments, but in some cases it is + # easiest to force them off explicitly. + disable_precompiled_headers = false +} + +# ============================================== +# PLEASE DO NOT ADD MORE THINGS TO THIS LIST +# ============================================== +# +# Legacy feature defines applied to all targets. +# +# These are applied to every single compile in the build and most of them are +# only relevant to a few files. This bloats command lines and causes +# unnecessary recompiles when flags are flipped. +# +# To pass defines to source code from the build, use the buildflag system which +# will write headers containing the defines you need. This isolates the define +# and means its definition can participate in the build graph, only recompiling +# things when it actually changes. +# +# See //build/buildflag_header.gni for inntructions on generating headers. +# +# This will also allow you to scope your build flag to a BUILD.gn file (or a +# .gni file if you need it from more than one place) rather than making global +# flags. See //build/config/BUILDCONFIG.gn for advice on where to define +# build flags. +config("feature_flags") { + # Don't use deprecated V8 APIs anywhere. + defines = [ "V8_DEPRECATION_WARNINGS" ] + if (enable_mdns) { + defines += [ "ENABLE_MDNS=1" ] + } + if (enable_notifications) { + defines += [ "ENABLE_NOTIFICATIONS" ] + } + if (enable_pepper_cdms) { + # TODO(brettw) should probably be "=1" + defines += [ "ENABLE_PEPPER_CDMS" ] + } + if (enable_browser_cdms) { + # TODO(brettw) should probably be "=1" + defines += [ "ENABLE_BROWSER_CDMS" ] + } + if (enable_plugins) { + defines += [ "ENABLE_PLUGINS=1" ] + } + if (enable_pdf) { + defines += [ "ENABLE_PDF=1" ] + } + if (enable_basic_printing || enable_print_preview) { + # Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW. + defines += [ "ENABLE_PRINTING=1" ] + if (enable_basic_printing) { + # Enable basic printing support and UI. + defines += [ "ENABLE_BASIC_PRINTING=1" ] + } + if (enable_print_preview) { + # Enable printing with print preview. + # Can be defined without ENABLE_BASIC_PRINTING. + defines += [ "ENABLE_PRINT_PREVIEW=1" ] + } + if ((enable_basic_printing && is_win) || enable_print_preview) { + # Windows basic printing or print preview requires pdf enabled. + assert(enable_pdf, + "Windows basic printing or print preview needs pdf: " + + "set enable_pdf=true.") + } + } + if (enable_spellcheck) { + defines += [ "ENABLE_SPELLCHECK=1" ] + } + if (use_browser_spellchecker) { + defines += [ "USE_BROWSER_SPELLCHECKER=1" ] + } + if (dcheck_always_on) { + defines += [ "DCHECK_ALWAYS_ON=1" ] + } + if (use_udev) { + # TODO(brettw) should probably be "=1". + defines += [ "USE_UDEV" ] + } + if (ui_compositor_image_transport) { + # TODO(brettw) should probably be "=1". + defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ] + } + if (use_ash) { + defines += [ "USE_ASH=1" ] + } + if (use_aura) { + defines += [ "USE_AURA=1" ] + } + if (use_pango) { + defines += [ "USE_PANGO=1" ] + } + if (use_cairo) { + defines += [ "USE_CAIRO=1" ] + } + if (use_clipboard_aurax11) { + defines += [ "USE_CLIPBOARD_AURAX11=1" ] + } + if (use_default_render_theme) { + defines += [ "USE_DEFAULT_RENDER_THEME=1" ] + } + if (use_glib) { + defines += [ "USE_GLIB=1" ] + } + if (use_openssl_certs) { + defines += [ "USE_OPENSSL_CERTS=1" ] + } + if (use_nss_certs) { + defines += [ "USE_NSS_CERTS=1" ] + } + if (use_ozone) { + defines += [ "USE_OZONE=1" ] + } + if (use_x11) { + defines += [ "USE_X11=1" ] + } + if (use_allocator != "tcmalloc") { + defines += [ "NO_TCMALLOC" ] + } + if (is_asan || is_lsan || is_tsan || is_msan) { + defines += [ + "MEMORY_TOOL_REPLACES_ALLOCATOR", + "MEMORY_SANITIZER_INITIAL_SIZE", + ] + } + if (is_asan) { + defines += [ "ADDRESS_SANITIZER" ] + } + if (is_lsan) { + defines += [ + "LEAK_SANITIZER", + "WTF_USE_LEAK_SANITIZER=1", + ] + } + if (is_tsan) { + defines += [ + "THREAD_SANITIZER", + "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1", + "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1", + ] + } + if (is_msan) { + defines += [ "MEMORY_SANITIZER" ] + } + if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) { + defines += [ "UNDEFINED_SANITIZER" ] + } + if (use_external_popup_menu) { + defines += [ "USE_EXTERNAL_POPUP_MENU=1" ] + } + if (enable_webrtc) { + defines += [ "ENABLE_WEBRTC=1" ] + } + if (!enable_nacl) { + defines += [ "DISABLE_NACL" ] + } + if (enable_extensions) { + defines += [ "ENABLE_EXTENSIONS=1" ] + } + if (enable_task_manager) { + defines += [ "ENABLE_TASK_MANAGER=1" ] + } + if (enable_themes) { + defines += [ "ENABLE_THEMES=1" ] + } + if (enable_captive_portal_detection) { + defines += [ "ENABLE_CAPTIVE_PORTAL_DETECTION=1" ] + } + if (enable_session_service) { + defines += [ "ENABLE_SESSION_SERVICE=1" ] + } + if (enable_rlz) { + defines += [ "ENABLE_RLZ" ] + } + if (enable_plugin_installation) { + defines += [ "ENABLE_PLUGIN_INSTALLATION=1" ] + } + if (enable_app_list) { + defines += [ "ENABLE_APP_LIST=1" ] + } + if (enable_supervised_users) { + defines += [ "ENABLE_SUPERVISED_USERS=1" ] + } + if (enable_service_discovery) { + defines += [ "ENABLE_SERVICE_DISCOVERY=1" ] + } + if (enable_image_loader_extension) { + defines += [ "IMAGE_LOADER_EXTENSION=1" ] + } + if (enable_wayland_server) { + defines += [ "ENABLE_WAYLAND_SERVER=1" ] + } + if (enable_wifi_display) { + defines += [ "ENABLE_WIFI_DISPLAY=1" ] + } + if (proprietary_codecs) { + defines += [ "USE_PROPRIETARY_CODECS" ] + } + if (enable_hangout_services_extension) { + defines += [ "ENABLE_HANGOUT_SERVICES_EXTENSION=1" ] + } + if (enable_video_hole) { + defines += [ "VIDEO_HOLE=1" ] + } + if (safe_browsing_mode == 1) { + defines += [ "FULL_SAFE_BROWSING" ] + defines += [ "SAFE_BROWSING_CSD" ] + defines += [ "SAFE_BROWSING_DB_LOCAL" ] + } else if (safe_browsing_mode == 2) { + defines += [ "SAFE_BROWSING_DB_REMOTE" ] + } + if (is_official_build) { + defines += [ "OFFICIAL_BUILD" ] + } + if (nwjs_sdk) { + defines += [ "NWJS_SDK" ] + } + if (is_chrome_branded) { + defines += [ "GOOGLE_CHROME_BUILD" ] + } else { + defines += [ "CHROMIUM_BUILD" ] + } + if (enable_media_router) { + defines += [ "ENABLE_MEDIA_ROUTER=1" ] + } + if (enable_webvr) { + defines += [ "ENABLE_WEBVR" ] + } + if (is_syzyasan) { + defines += [ + "SYZYASAN", + "MEMORY_TOOL_REPLACES_ALLOCATOR", + "MEMORY_SANITIZER_INITIAL_SIZE", + ] + } + if (!fieldtrial_testing_like_official_build && !is_chrome_branded) { + defines += [ "FIELDTRIAL_TESTING_ENABLED" ] + } + + # ============================================== + # PLEASE DO NOT ADD MORE THINGS TO THIS LIST + # ============================================== + # + # See the comment at the top. +} + +# Debug/release ---------------------------------------------------------------- + +config("debug") { + defines = [ + "_DEBUG", + "DYNAMIC_ANNOTATIONS_ENABLED=1", + "WTF_USE_DYNAMIC_ANNOTATIONS=1", + ] + + if (is_nacl) { + defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ] + } + + if (is_win) { + if (!enable_iterator_debugging) { + # Iterator debugging is enabled by default by the compiler on debug + # builds, and we have to tell it to turn it off. + defines += [ "_HAS_ITERATOR_DEBUGGING=0" ] + } + } else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) { + # Enable libstdc++ debugging facilities to help catch problems early, see + # http://crbug.com/65151 . + # TODO(phajdan.jr): Should we enable this for all of POSIX? + defines += [ "_GLIBCXX_DEBUG=1" ] + } +} + +config("release") { + defines = [ "NDEBUG" ] + + # Sanitizers. + if (is_tsan) { + defines += [ + "DYNAMIC_ANNOTATIONS_ENABLED=1", + "WTF_USE_DYNAMIC_ANNOTATIONS=1", + ] + } else { + defines += [ "NVALGRIND" ] + if (!is_nacl) { + # NaCl always enables dynamic annotations. Currently this value is set to + # 1 for all .nexes. + defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ] + } + } +} + +# Default libraries ------------------------------------------------------------ + +# This config defines the default libraries applied to all targets. +config("default_libs") { + if (is_win) { + # TODO(brettw) this list of defaults should probably be smaller, and + # instead the targets that use the less common ones (e.g. wininet or + # winspool) should include those explicitly. + libs = [ + "advapi32.lib", + "comdlg32.lib", + "dbghelp.lib", + "delayimp.lib", + "dnsapi.lib", + "gdi32.lib", + "kernel32.lib", + "msimg32.lib", + "odbc32.lib", + "odbccp32.lib", + "ole32.lib", + "oleaut32.lib", + "psapi.lib", + "shell32.lib", + "shlwapi.lib", + "user32.lib", + "usp10.lib", + "uuid.lib", + "version.lib", + "wininet.lib", + "winmm.lib", + "winspool.lib", + "ws2_32.lib", + + # Please don't add more stuff here. We should actually be making this + # list smaller, since all common things should be covered. If you need + # some extra libraries, please just add a libs = [ "foo.lib" ] to your + # target that needs it. + ] + } else if (is_android) { + libs = [ + "dl", + "m", + ] + } else if (is_mac) { + # Targets should choose to explicitly link frameworks they require. Since + # linking can have run-time side effects, nothing should be listed here. + libs = [] + } else if (is_ios) { + # The libraries listed here will be specified for both the target and the + # host. Only the common ones should be listed here. + libs = [ + "CoreFoundation.framework", + "CoreGraphics.framework", + "CoreText.framework", + "Foundation.framework", + ] + } else if (is_linux) { + libs = [ + "dl", + "rt", + ] + } +} + +# Executable configs ----------------------------------------------------------- + +# Windows linker setup for EXEs and DLLs. +if (is_win) { + _windows_linker_configs = [ + "//build/config/win:sdk_link", + "//build/config/win:common_linker_setup", + ] +} + +# This config defines the configs applied to all executables. +config("executable_config") { + configs = [] + + if (is_win) { + configs += _windows_linker_configs + } else if (is_mac) { + configs += [ + "//build/config/mac:mac_dynamic_flags", + "//build/config/mac:mac_executable_flags", + ] + } else if (is_ios) { + configs += [ "//build/config/ios:ios_dynamic_flags" ] + } else if (is_linux || is_android) { + configs += [ "//build/config/gcc:executable_ldconfig" ] + if (is_android) { + configs += [ "//build/config/android:executable_config" ] + } else if (is_chromecast) { + configs += [ "//build/config/chromecast:executable_config" ] + } + } + + # If we're using the prebuilt instrumented libraries with the sanitizers, we + # need to add ldflags to every binary to make sure they are picked up. + if (prebuilt_instrumented_libraries_available) { + configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ] + } + if (use_locally_built_instrumented_libraries) { + configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] + } + configs += [ "//build/config/sanitizers:link_executable" ] +} + +# Shared library configs ------------------------------------------------------- + +# This config defines the configs applied to all shared libraries. +config("shared_library_config") { + configs = [] + + if (is_win) { + configs += _windows_linker_configs + } else if (is_mac) { + configs += [ "//build/config/mac:mac_dynamic_flags" ] + } else if (is_ios) { + configs += [ "//build/config/ios:ios_dynamic_flags" ] + } else if (is_chromecast) { + configs += [ "//build/config/chromecast:shared_library_config" ] + } + + # If we're using the prebuilt instrumented libraries with the sanitizers, we + # need to add ldflags to every binary to make sure they are picked up. + if (prebuilt_instrumented_libraries_available) { + configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ] + } + if (use_locally_built_instrumented_libraries) { + configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] + } + configs += [ "//build/config/sanitizers:link_shared_library" ] +} + +# Add this config to your target to enable precompiled headers. +# +# Precompiled headers are done on a per-target basis. If you have just a couple +# of files, the time it takes to precompile (~2 seconds) can actually be longer +# than the time saved. On a Z620, a 100 file target compiles about 2 seconds +# faster with precompiled headers, with greater savings for larger targets. +# +# Recommend precompiled headers for targets with more than 50 .cc files. +config("precompiled_headers") { + if (!is_official_build && !use_goma && !disable_precompiled_headers) { + if (is_win) { + # This is a string rather than a file GN knows about. It has to match + # exactly what's in the /FI flag below, and what might appear in the + # source code in quotes for an #include directive. + precompiled_header = "build/precompile.h" + + # This is a file that GN will compile with the above header. It will be + # implicitly added to the sources (potentially multiple times, with one + # variant for each language used in the target). + precompiled_source = "//build/precompile.cc" + + # Force include the header. + cflags = [ "/FI$precompiled_header" ] + + # Disable warning for "this file was empty after preprocessing". This + # error is generated only in C mode for ANSI compatibility. It conflicts + # with precompiled headers since the source file that's "compiled" for + # making the precompiled header is empty. + # + # This error doesn't happen every time. In VS2013, it seems if the .pch + # file doesn't exist, no error will be generated (probably MS tested this + # case but forgot the other one?). To reproduce this error, do a build, + # then delete the precompile.c.obj file, then build again. + cflags_c = [ "/wd4206" ] + } else if (is_mac) { + precompiled_header = "build/precompile.h" + precompiled_source = "//build/precompile.h" + } + } +} diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn new file mode 100644 index 00000000000..812fa7bcb8f --- /dev/null +++ b/build/config/BUILDCONFIG.gn @@ -0,0 +1,650 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================================================= +# WHAT IS THIS FILE? +# ============================================================================= +# +# This is the master GN build configuration. This file is loaded after the +# build args (args.gn) for the build directory and after the toplevel ".gn" +# file (which points to this file as the build configuration). +# +# This file will be executed and the resulting context will be used to execute +# every other file in the build. So variables declared here (that don't start +# with an underscore) will be implicitly global. + +# ============================================================================= +# PLATFORM SELECTION +# ============================================================================= +# +# There are two main things to set: "os" and "cpu". The "toolchain" is the name +# of the GN thing that encodes combinations of these things. +# +# Users typically only set the variables "target_os" and "target_cpu" in "gn +# args", the rest are set up by our build and internal to GN. +# +# There are three different types of each of these things: The "host" +# represents the computer doing the compile and never changes. The "target" +# represents the main thing we're trying to build. The "current" represents +# which configuration is currently being defined, which can be either the +# host, the target, or something completely different (like nacl). GN will +# run the same build file multiple times for the different required +# configuration in the same build. +# +# This gives the following variables: +# - host_os, host_cpu, host_toolchain +# - target_os, target_cpu, default_toolchain +# - current_os, current_cpu, current_toolchain. +# +# Note the default_toolchain isn't symmetrical (you would expect +# target_toolchain). This is because the "default" toolchain is a GN built-in +# concept, and "target" is something our build sets up that's symmetrical with +# its GYP counterpart. Potentially the built-in default_toolchain variable +# could be renamed in the future. +# +# When writing build files, to do something only for the host: +# if (current_toolchain == host_toolchain) { ... + +if (target_os == "") { + target_os = host_os +} + +if (target_cpu == "") { + if (target_os == "android") { + # If we're building for Android, we should assume that we want to + # build for ARM by default, not the host_cpu (which is likely x64). + # This allows us to not have to specify both target_os and target_cpu + # on the command line. + target_cpu = "arm" + } else { + target_cpu = host_cpu + } +} + +if (current_cpu == "") { + current_cpu = target_cpu +} +if (current_os == "") { + current_os = target_os +} + +# ============================================================================= +# BUILD FLAGS +# ============================================================================= +# +# This block lists input arguments to the build, along with their default +# values. +# +# If a value is specified on the command line, it will overwrite the defaults +# given in a declare_args block, otherwise the default will be used. +# +# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in +# the build to declare build flags. If you need a flag for a single component, +# you can just declare it in the corresponding BUILD.gn file. +# +# - If your feature is a single target, say //components/foo, you can put +# a declare_args() block in //components/foo/BUILD.gn and use it there. +# Nobody else in the build needs to see the flag. +# +# - Defines based on build variables should be implemented via the generated +# build flag header system. See //build/buildflag_header.gni. You can put +# the buildflag_header target in the same file as the build flag itself. You +# should almost never set "defines" directly. +# +# - If your flag toggles a target on and off or toggles between different +# versions of similar things, write a "group" target that forwards to the +# right target (or no target) depending on the value of the build flag. This +# group can be in the same BUILD.gn file as the build flag, and targets can +# depend unconditionally on the group rather than duplicating flag checks +# across many targets. +# +# - If a semi-random set of build files REALLY needs to know about a define and +# the above pattern for isolating the build logic in a forwarding group +# doesn't work, you can put the argument in a .gni file. This should be put +# in the lowest level of the build that knows about this feature (which should +# almost always be outside of the //build directory!). +# +# Other flag advice: +# +# - Use boolean values when possible. If you need a default value that expands +# to some complex thing in the default case (like the location of the +# compiler which would be computed by a script), use a default value of -1 or +# the empty string. Outside of the declare_args block, conditionally expand +# the default value as necessary. +# +# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for +# your feature) rather than just "foo". +# +# - Write good comments directly above the declaration with no blank line. +# These comments will appear as documentation in "gn args --list". +# +# - Don't call exec_script inside declare_args. This will execute the script +# even if the value is overridden, which is wasteful. See first bullet. + +declare_args() { + # Set to enable the official build level of optimization. This has nothing + # to do with branding, but enables an additional level of optimization above + # release (!is_debug). This might be better expressed as a tri-state + # (debug, release, official) but for historical reasons there are two + # separate flags. + is_official_build = false + + # Debug build. Enabling official builds automatically sets is_debug to false. + is_debug = !is_official_build + + nwjs_sdk = true + + # Whether we're a traditional desktop unix. + is_desktop_linux = current_os == "linux" + + # Set to true when compiling with the Clang compiler. Typically this is used + # to configure warnings. + is_clang = current_os == "mac" || current_os == "ios" || + current_os == "linux" || current_os == "chromeos" + + # Allows the path to a custom target toolchain to be injected as a single + # argument, and set as the default toolchain. + custom_toolchain = "" + + # This should not normally be set as a build argument. It's here so that + # every toolchain can pass through the "global" value via toolchain_args(). + host_toolchain = "" + + # DON'T ADD MORE FLAGS HERE. Read the comment above. +} + +declare_args() { + # Component build. Setting to true compiles targets declared as "components" + # as shared libraries loaded dynamically. This speeds up development time. + # When false, components will be linked statically. + # + # For more information see + # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md + is_component_build = is_debug && current_os != "ios" +} + +assert(!(is_debug && is_official_build), "Can't do official debug builds") + +# ============================================================================== +# TOOLCHAIN SETUP +# ============================================================================== +# +# Here we set the default toolchain, as well as the variable host_toolchain +# which will identify the toolchain corresponding to the local system when +# doing cross-compiles. When not cross-compiling, this will be the same as the +# default toolchain. +# +# We do this before anything else to make sure we complain about any +# unsupported os/cpu combinations as early as possible. + +if (host_toolchain == "") { + # This should only happen in the top-level context. + # In a specific toolchain context, the toolchain_args() + # block should have propagated a value down. + # TODO(dpranke): Add some sort of assert here that verifies that + # no toolchain omitted host_toolchain from its toolchain_args(). + + if (host_os == "linux") { + if (target_os != "linux") { + # TODO(dpranke) - is_clang normally applies only to the target + # build, and there is no way to indicate that you want to override + # it for both the target build *and* the host build. Do we need to + # support this? + host_toolchain = "//build/toolchain/linux:clang_$host_cpu" + } else if (is_clang) { + host_toolchain = "//build/toolchain/linux:clang_$host_cpu" + } else { + host_toolchain = "//build/toolchain/linux:$host_cpu" + } + } else if (host_os == "mac") { + host_toolchain = "//build/toolchain/mac:clang_$host_cpu" + } else if (host_os == "win") { + # On Windows always use the target CPU for host builds. On the + # configurations we support this will always work and it saves build steps. + if (is_clang) { + host_toolchain = "//build/toolchain/win:clang_$target_cpu" + } else { + host_toolchain = "//build/toolchain/win:$target_cpu" + } + } else { + assert(false, "Unsupported host_os: $host_os") + } +} + +_default_toolchain = "" + +if (target_os == "android") { + assert(host_os == "linux" || host_os == "mac", + "Android builds are only supported on Linux and Mac hosts.") + if (is_clang) { + _default_toolchain = "//build/toolchain/android:clang_$target_cpu" + } else { + _default_toolchain = "//build/toolchain/android:$target_cpu" + } +} else if (target_os == "chromeos" || target_os == "linux") { + # See comments in build/toolchain/cros/BUILD.gn about board compiles. + if (is_clang) { + _default_toolchain = "//build/toolchain/linux:clang_$target_cpu" + } else { + _default_toolchain = "//build/toolchain/linux:$target_cpu" + } +} else if (target_os == "ios") { + _default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu" +} else if (target_os == "mac") { + assert(host_os == "mac", "Mac cross-compiles are unsupported.") + _default_toolchain = host_toolchain +} else if (target_os == "win") { + # On Windows we use the same toolchain for host and target by default. + assert(target_os == host_os, "Win cross-compiles only work on win hosts.") + if (is_clang) { + _default_toolchain = "//build/toolchain/win:clang_$target_cpu" + } else { + _default_toolchain = "//build/toolchain/win:$target_cpu" + } +} else if (target_os == "winrt_81" || target_os == "winrt_81_phone" || + target_os == "winrt_10") { + _default_toolchain = "//build/toolchain/win:winrt_$target_cpu" +} else { + assert(false, "Unsupported target_os: $target_os") +} + +# If a custom toolchain has been set in the args, set it as default. Otherwise, +# set the default toolchain for the platform (if any). +if (custom_toolchain != "") { + set_default_toolchain(custom_toolchain) +} else if (_default_toolchain != "") { + set_default_toolchain(_default_toolchain) +} + +# ============================================================================= +# OS DEFINITIONS +# ============================================================================= +# +# We set these various is_FOO booleans for convenience in writing OS-based +# conditions. +# +# - is_android, is_chromeos, is_ios, and is_win should be obvious. +# - is_mac is set only for desktop Mac. It is not set on iOS. +# - is_posix is true for mac and any Unix-like system (basically everything +# except Windows). +# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is +# generally too different despite being based on the Linux kernel). +# +# Do not add more is_* variants here for random lesser-used Unix systems like +# aix or one of the BSDs. If you need to check these, just check the +# current_os value directly. + +if (current_os == "win" || current_os == "winrt_81" || + current_os == "winrt_81_phone" || current_os == "winrt_10") { + is_android = false + is_chromeos = false + is_ios = false + is_linux = false + is_mac = false + is_nacl = false + is_posix = false + is_win = true +} else if (current_os == "mac") { + is_android = false + is_chromeos = false + is_ios = false + is_linux = false + is_mac = true + is_nacl = false + is_posix = true + is_win = false +} else if (current_os == "android") { + is_android = true + is_chromeos = false + is_ios = false + is_linux = false + is_mac = false + is_nacl = false + is_posix = true + is_win = false +} else if (current_os == "chromeos") { + is_android = false + is_chromeos = true + is_ios = false + is_linux = true + is_mac = false + is_nacl = false + is_posix = true + is_win = false +} else if (current_os == "nacl") { + # current_os == "nacl" will be passed by the nacl toolchain definition. + # It is not set by default or on the command line. We treat is as a + # Posix variant. + is_android = false + is_chromeos = false + is_ios = false + is_linux = false + is_mac = false + is_nacl = true + is_posix = true + is_win = false +} else if (current_os == "ios") { + is_android = false + is_chromeos = false + is_ios = true + is_linux = false + is_mac = false + is_nacl = false + is_posix = true + is_win = false +} else if (current_os == "linux") { + is_android = false + is_chromeos = false + is_ios = false + is_linux = true + is_mac = false + is_nacl = false + is_posix = true + is_win = false +} + +# ============================================================================= +# SOURCES FILTERS +# ============================================================================= +# +# These patterns filter out platform-specific files when assigning to the +# sources variable. The magic variable |sources_assignment_filter| is applied +# to each assignment or appending to the sources variable and matches are +# automatically removed. +# +# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path +# boundary = end of string or slash) are supported, and the entire string +# must match the pattern (so you need "*.cc" to match all .cc files, for +# example). + +# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call +# below. +sources_assignment_filter = [] +if (!is_posix) { + sources_assignment_filter += [ + "*_posix.h", + "*_posix.cc", + "*_posix_unittest.h", + "*_posix_unittest.cc", + "*\bposix/*", + ] +} +if (!is_win) { + sources_assignment_filter += [ + "*_win.cc", + "*_win.h", + "*_win_unittest.cc", + "*\bwin/*", + "*.def", + "*.rc", + ] +} +if (!is_mac) { + sources_assignment_filter += [ + "*_mac.h", + "*_mac.cc", + "*_mac.mm", + "*_mac_unittest.h", + "*_mac_unittest.cc", + "*_mac_unittest.mm", + "*\bmac/*", + "*_cocoa.h", + "*_cocoa.cc", + "*_cocoa.mm", + "*_cocoa_unittest.h", + "*_cocoa_unittest.cc", + "*_cocoa_unittest.mm", + "*\bcocoa/*", + ] +} +if (!is_ios) { + sources_assignment_filter += [ + "*_ios.h", + "*_ios.cc", + "*_ios.mm", + "*_ios_unittest.h", + "*_ios_unittest.cc", + "*_ios_unittest.mm", + "*\bios/*", + ] +} +if (!is_mac && !is_ios) { + sources_assignment_filter += [ "*.mm" ] +} +if (!is_linux) { + sources_assignment_filter += [ + "*_linux.h", + "*_linux.cc", + "*_linux_unittest.h", + "*_linux_unittest.cc", + "*\blinux/*", + ] +} +if (!is_android) { + sources_assignment_filter += [ + "*_android.h", + "*_android.cc", + "*_android_unittest.h", + "*_android_unittest.cc", + "*\bandroid/*", + ] +} +if (!is_chromeos) { + sources_assignment_filter += [ + "*_chromeos.h", + "*_chromeos.cc", + "*_chromeos_unittest.h", + "*_chromeos_unittest.cc", + "*\bchromeos/*", + ] +} + +# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call +# below. + +# Actually save this list. +# +# These patterns are executed for every file in the source tree of every run. +# Therefore, adding more patterns slows down the build for everybody. We should +# only add automatic patterns for configurations affecting hundreds of files +# across many projects in the tree. +# +# Therefore, we only add rules to this list corresponding to platforms on the +# Chromium waterfall. This is not for non-officially-supported platforms +# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases, +# write a conditional in the target to remove the file(s) from the list when +# your platform/toolkit/feature doesn't apply. +set_sources_assignment_filter(sources_assignment_filter) + +# ============================================================================= +# TARGET DEFAULTS +# ============================================================================= +# +# Set up the default configuration for every build target of the given type. +# The values configured here will be automatically set on the scope of the +# corresponding target. Target definitions can add or remove to the settings +# here as needed. + +# Holds all configs used for running the compiler. +default_compiler_configs = [ + "//build/config:feature_flags", + "//build/config/compiler:compiler", + "//build/config/compiler:clang_stackrealign", + "//build/config/compiler:compiler_arm_fpu", + "//build/config/compiler:chromium_code", + "//build/config/compiler:default_include_dirs", + "//build/config/compiler:default_optimization", + "//build/config/compiler:default_symbols", + "//build/config/compiler:no_rtti", + "//build/config/compiler:runtime_library", + "//build/config/sanitizers:default_sanitizer_flags", +] +if (is_win) { + default_compiler_configs += [ + "//build/config/win:lean_and_mean", + "//build/config/win:nominmax", + "//build/config/win:unicode", + "//build/config/win:winver", + "//build/config/win:vs_code_analysis", + ] +} +if (current_os == "winrt_81" || current_os == "winrt_81_phone" || + current_os == "winrt_10") { + default_compiler_configs += [ "//build/config/win:target_winrt" ] +} +if (is_posix) { + default_compiler_configs += [ + "//build/config/gcc:no_exceptions", + "//build/config/gcc:symbol_visibility_hidden", + ] +} + +if (is_android) { + default_compiler_configs += + [ "//build/config/android:default_cygprofile_instrumentation" ] +} + +if (is_clang && !is_nacl) { + default_compiler_configs += [ + "//build/config/clang:find_bad_constructs", + "//build/config/clang:extra_warnings", + ] +} + +# Debug/release-related defines. +if (is_debug) { + default_compiler_configs += [ "//build/config:debug" ] +} else { + default_compiler_configs += [ "//build/config:release" ] +} + +# Static libraries and source sets use only the compiler ones. +set_defaults("static_library") { + configs = default_compiler_configs +} +set_defaults("source_set") { + configs = default_compiler_configs +} + +# Compute the set of configs common to all linked targets (shared libraries, +# loadable modules, executables) to avoid duplication below. +if (is_win) { + # Many targets remove these configs, so they are not contained within + # //build/config:executable_config for easy removal. + _linker_configs = [ + "//build/config/win:default_incremental_linking", + + # Default to console-mode apps. Most of our targets are tests and such + # that shouldn't use the windows subsystem. + "//build/config/win:console", + ] +} else if (is_mac || is_ios) { + _linker_configs = [ "//build/config/mac:strip_all" ] +} else { + _linker_configs = [] +} + +# Executable defaults. +default_executable_configs = default_compiler_configs + [ + "//build/config:default_libs", + "//build/config:executable_config", + ] + _linker_configs +set_defaults("executable") { + configs = default_executable_configs +} + +# Shared library and loadable module defaults (also for components in component +# mode). +default_shared_library_configs = default_compiler_configs + [ + "//build/config:default_libs", + "//build/config:shared_library_config", + ] + _linker_configs +if (is_android) { + # Strip native JNI exports from shared libraries by default. Binaries that + # want this can remove this config. + default_shared_library_configs += + [ "//build/config/android:hide_native_jni_exports" ] +} +set_defaults("shared_library") { + configs = default_shared_library_configs +} +set_defaults("loadable_module") { + configs = default_shared_library_configs +} + +# ============================================================================== +# COMPONENT SETUP +# ============================================================================== + +# Defines a component, which equates to a shared_library when +# is_component_build == true and a static_library otherwise. +# +# Use static libraries for the static build rather than source sets because +# many of of our test binaries link many large dependencies but often don't +# use large portions of them. The static libraries are much more efficient to +# link in this situation since only the necessary object files are linked. +# +# The invoker can override the type of the target in the non-component-build +# case by setting static_component_type to either "source_set" or +# "static_library". If unset, the default will be used. +template("component") { + if (is_component_build) { + _component_mode = "shared_library" + } else if (defined(invoker.static_component_type)) { + assert(invoker.static_component_type == "static_library" || + invoker.static_component_type == "source_set") + _component_mode = invoker.static_component_type + } else if (is_android || !defined(invoker.sources)) { + # When there are no sources defined, use a source set to avoid creating + # an empty static library (which generally don't work). + # + # When we changed components to default from source sets to static + # libraries, an Android benchmark regressed slightly + # (https://crbug.com/619593). We don't have a good theory on why this might + # be since theoretically it should be the same. It could be something as + # silly as random code locality luck. + # + # There seems to be no build-time performance hit to using source sets on + # Android (the normal reason for defaulting to static libraries), so we + # make the default on Android to be source set. + # + # If it's been a long time since this was added and you're skeptical, + # please feel free to remove the Android exception and see if any + # benchmarks obviously regress. If not, it would be great to standardize + # with the rest of the platforms. + _component_mode = "source_set" + } else { + _component_mode = "static_library" + } + target(_component_mode, target_name) { + # Explicitly forward visibility, implicitly forward everything else. + # Forwarding "*" doesn't recurse into nested scopes (to avoid copying all + # globals into each template invocation), so won't pick up file-scoped + # variables. Normally this isn't too bad, but visibility is commonly + # defined at the file scope. Explicitly forwarding visibility and then + # excluding it from the "*" set works around this problem. + # See http://crbug.com/594610 + forward_variables_from(invoker, [ "visibility" ]) + forward_variables_from(invoker, "*", [ "visibility" ]) + + # All shared libraries must have the sanitizer deps to properly link in + # asan mode (this target will be empty in other cases). + if (!defined(deps)) { + deps = [] + } + deps += [ "//build/config/sanitizers:deps" ] + } +} + +# Component defaults +set_defaults("component") { + if (is_component_build) { + configs = default_shared_library_configs + if (is_android) { + configs -= [ "//build/config/android:hide_native_jni_exports" ] + } + } else { + configs = default_compiler_configs + } +} diff --git a/build/config/OWNERS b/build/config/OWNERS new file mode 100644 index 00000000000..bd53091f46a --- /dev/null +++ b/build/config/OWNERS @@ -0,0 +1,6 @@ +brettw@chromium.org +dpranke@chromium.org +scottmg@chromium.org + +per-file BUILDCONFIG.gn=brettw@chromium.org +per-file BUILDCONFIG.gn=set noparent diff --git a/build/config/allocator.gni b/build/config/allocator.gni new file mode 100644 index 00000000000..84f5eaf5590 --- /dev/null +++ b/build/config/allocator.gni @@ -0,0 +1,53 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors. +if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan || + is_lsan || is_tsan || is_msan || is_win || is_syzyasan || + (is_linux && target_cpu == "arm64")) { + _default_allocator = "none" +} else { + _default_allocator = "none" +} + +# The debug CRT on Windows has some debug features that are incompatible with +# the shim. NaCl in particular does seem to link some binaries statically +# against the debug CRT with "is_nacl=false". +if ((is_linux || is_android || (is_win && !is_component_build && !is_debug)) && + !is_syzyasan && !is_asan && !is_lsan && !is_tsan && !is_msan) { + _default_use_experimental_allocator_shim = true +} else { + _default_use_experimental_allocator_shim = false +} + +declare_args() { + # Memory allocator to use. Set to "none" to use default allocator. + use_allocator = _default_allocator + + # TODO(primiano): this should just become the default without having a flag, + # but we need to get there first. http://crbug.com/550886 . + # Causes all the allocations to be routed via allocator_shim.cc. + use_experimental_allocator_shim = _default_use_experimental_allocator_shim +} + +if (is_nacl) { + # Turn off the build flag for NaCL builds to minimize confusion, as NaCL + # doesn't support the heap shim. + use_experimental_allocator_shim = false +} + +assert(use_allocator == "none" || use_allocator == "tcmalloc") + +assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.") + +assert( + !use_experimental_allocator_shim || is_linux || is_android || is_win, + "use_experimental_allocator_shim supported only on Linux, Android and Windows targets") + +if (is_win && use_experimental_allocator_shim) { + assert(!is_component_build, + "The allocator shim doesn't work for the component build on Windows.") +} diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn new file mode 100644 index 00000000000..8343861ab10 --- /dev/null +++ b/build/config/android/BUILD.gn @@ -0,0 +1,197 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/sanitizers/sanitizers.gni") + +assert(is_android) + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Android-only. +config("compiler") { + cflags = [ + "-ffunction-sections", + "-fno-short-enums", + ] + defines = [ + "ANDROID", + + # The NDK has these things, but doesn't define the constants to say that it + # does. Define them here instead. + "HAVE_SYS_UIO_H", + + # Forces full rebuilds on NDK rolls. + "ANDROID_NDK_VERSION=${android_ndk_version}", + ] + + if (is_clang) { + rebased_android_toolchain_root = + rebase_path(android_toolchain_root, root_build_dir) + assert(rebased_android_toolchain_root != "") # Mark as used. + if (current_cpu == "mipsel" || current_cpu == "mips64el") { + cflags += [ + # TODO(gordanac) Enable integrated-as. + "-fno-integrated-as", + "-B${rebased_android_toolchain_root}/bin", # Else /usr/bin/as gets picked up. + ] + } + } else { + # Clang doesn't support these flags. + cflags += [ "-finline-limit=64" ] + } + + ldflags = [ + "-Wl,--build-id=sha1", + "-Wl,--no-undefined", + + # Don't allow visible symbols from libgcc or libc++ to be + # re-exported. + "-Wl,--exclude-libs=libgcc.a", + "-Wl,--exclude-libs=libc++_static.a", + + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://crbug.com/448386 + "-Wl,--exclude-libs=libvpx_assembly_arm.a", + ] + + if (is_clang) { + if (current_cpu == "arm") { + abi_target = "arm-linux-androideabi" + } else if (current_cpu == "x86") { + abi_target = "i686-linux-androideabi" + } else if (current_cpu == "arm64") { + # Place holder for arm64 support, not tested. + # TODO: Enable clang support for Android Arm64. http://crbug.com/539781 + abi_target = "aarch64-linux-android" + } else if (current_cpu == "x64") { + # Place holder for x64 support, not tested. + # TODO: Enable clang support for Android x64. http://crbug.com/539781 + abi_target = "x86_64-linux-androideabi" + } else if (current_cpu == "mipsel") { + abi_target = "mipsel-linux-android" + } else if (current_cpu == "mips64el") { + # Place holder for mips64 support, not tested. + abi_target = "mips64el-linux-androideabi" + } else { + assert(false, "Architecture not supported") + } + cflags += [ "--target=$abi_target" ] + ldflags += [ "--target=$abi_target" ] + } + + # Assign any flags set for the C compiler to asmflags so that they are sent + # to the assembler. + asmflags = cflags +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Android-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + # NOTE: The libc++ header include paths below are specified in cflags_cc + # rather than include_dirs because they need to come after include_dirs. + # Think of them like system headers, but don't use '-isystem' because the + # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit + # strange errors. The include ordering here is important; change with + # caution. + cflags_cc = [ + "-isystem" + + rebase_path("$android_libcpp_root/libcxx/include", root_build_dir), + "-isystem" + rebase_path( + "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include", + root_build_dir), + "-isystem" + + rebase_path("$android_ndk_root/sources/android/support/include", + root_build_dir), + ] + + defines = [ "__GNU_SOURCE=1" ] # Necessary for clone(). + ldflags = [ "-nostdlib" ] + lib_dirs = [ android_libcpp_lib_dir ] + + # The libc++ runtime library (must come first). + # ASan needs to dynamically link to libc++ even in static builds so + # that it can interpose operator new. + if (is_component_build || is_asan) { + libs = [ "c++_shared" ] + } else { + libs = [ "c++_static" ] + } + + # Manually link the libgcc.a that the cross compiler uses. This is + # absolute because the linker will look inside the sysroot if it's not. + libs += [ + rebase_path(android_libgcc_file), + "c", + ] + + # Clang with libc++ does not require an explicit atomic library reference. + if (!is_clang) { + libs += [ "atomic" ] + } + + if (is_clang) { + # Work around incompatibilities between bionic and clang headers. + defines += [ + "__compiler_offsetof=__builtin_offsetof", + "nan=__builtin_nan", + ] + } + + # TODO(jdduke) Re-enable on mips after resolving linking + # issues with libc++ (crbug.com/456380). + if (current_cpu != "mipsel" && current_cpu != "mips64el") { + ldflags += [ "-Wl,--warn-shared-textrel" ] + } +} + +config("executable_config") { + cflags = [ "-fPIE" ] + asmflags = [ "-fPIE" ] + ldflags = [ "-pie" ] +} + +config("hide_native_jni_exports") { + ldflags = [ "-Wl,--version-script=" + + rebase_path("//build/android/android_no_jni_exports.lst") ] +} + +# Instrumentation ------------------------------------------------------------- +# +# The BUILDCONFIG file sets the "default_cygprofile_instrumentation" config on +# targets by default. You can override whether the cygprofile instrumentation is +# used on a per-target basis: +# +# configs -= [ "//build/config/android:default_cygprofile_instrumentation" ] +# configs += [ "//build/config/android:no_cygprofile_instrumentation" ] + +config("default_cygprofile_instrumentation") { + if (use_order_profiling) { + configs = [ ":cygprofile_instrumentation" ] + } else { + configs = [ ":no_cygprofile_instrumentation" ] + } +} + +config("cygprofile_instrumentation") { + defines = [ "CYGPROFILE_INSTRUMENTATION=1" ] + cflags = [ "-finstrument-functions" ] + + if (!is_clang) { + cflags += [ + # Allow mmx intrinsics to inline, so that the compiler can expand the intrinsics. + "-finstrument-functions-exclude-file-list=mmintrin.h", + + # Avoid errors with current NDK: + # "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant" + "-finstrument-functions-exclude-file-list=arm_neon.h", + ] + } +} + +config("no_cygprofile_instrumentation") { +} diff --git a/build/config/android/OWNERS b/build/config/android/OWNERS new file mode 100644 index 00000000000..39f58e9eb69 --- /dev/null +++ b/build/config/android/OWNERS @@ -0,0 +1 @@ +agrieve@chromium.org diff --git a/build/config/android/config.gni b/build/config/android/config.gni new file mode 100644 index 00000000000..75ab921e272 --- /dev/null +++ b/build/config/android/config.gni @@ -0,0 +1,325 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains common system config stuff for the Android build. + +if (is_android) { + assert(rebase_path("//", root_build_dir) == "../../", + "Android output directory must be nested 2 levels within src/ (" + + "e.g.: out-gn/Debug). http://crbug.com/412935") + + import("//build_overrides/build.gni") + + has_chrome_android_internal = + exec_script("//build/dir_exists.py", + [ rebase_path("//clank", root_build_dir) ], + "string") == "True" + + if (has_chrome_android_internal) { + import("//clank/config.gni") + } + + if (!defined(extra_chrome_shared_library_configs)) { + extra_chrome_shared_library_configs = [] + } + + if (!defined(default_android_ndk_root)) { + default_android_ndk_major_version = "10" + default_android_ndk_root = "//third_party/android_tools/ndk" + default_android_ndk_version = "r10e" + } else { + assert(defined(default_android_ndk_version)) + } + + # The same version of lint should be used for building all targets, + # and these args ensure that the upstream version of lint is used for + # both upstream and downstream targets. + lint_android_sdk_root = "//third_party/android_tools/sdk" + lint_android_sdk_version = "23" + + if (!defined(default_android_sdk_root)) { + # Reuse the lint variables to ensure that the lint version gets updated + # when the upstream SDK version is updated. + default_android_sdk_root = lint_android_sdk_root + default_android_sdk_version = lint_android_sdk_version + default_android_sdk_build_tools_version = "23.0.1" + } + + if (!defined(default_android_keystore_path)) { + default_android_keystore_path = + "//build/android/ant/chromium-debug.keystore" + default_android_keystore_name = "chromiumdebugkey" + default_android_keystore_password = "chromium" + } + + if (!defined(google_play_services_library)) { + google_play_services_library = + "//third_party/android_tools:google_play_services_default_java" + } + + if (!defined(google_play_services_resources)) { + google_play_services_resources = + "//third_party/android_tools:google_play_services_default_resources" + } + + webview_public_framework_jar = + "//third_party/android_platform/webview/frameworks_6.0.jar" + if (!defined(webview_framework_jar)) { + webview_framework_jar = webview_public_framework_jar + } + + declare_args() { + android_ndk_major_version = default_android_ndk_major_version + android_ndk_root = default_android_ndk_root + android_ndk_version = default_android_ndk_version + + android_sdk_root = default_android_sdk_root + android_sdk_version = default_android_sdk_version + android_sdk_build_tools_version = default_android_sdk_build_tools_version + + # Libc++ library directory. Override to use a custom libc++ binary. + android_libcpp_lib_dir = "" + + # Android versionCode for android_apk()s that don't expclitly set one. + android_default_version_code = "1" + + # Android versionName for android_apk()s that don't expclitly set one. + android_default_version_name = "Developer Build" + + # The path to the keystore to use for signing builds. + android_keystore_path = default_android_keystore_path + + # The name of the keystore to use for signing builds. + android_keystore_name = default_android_keystore_name + + # The password for the keystore to use for signing builds. + android_keystore_password = default_android_keystore_password + + # Set to true to run findbugs on JAR targets. + run_findbugs = false + + # Set to true to enable verbose findbugs logging. This does nothing if + # run_findbugs is false. + findbugs_verbose = false + + # Enables verbose proguard output (summaries and unfiltered output). + proguard_verbose = false + + # Java debug on Android. Having this on enables multidexing, and turning it + # off will enable proguard. + is_java_debug = is_debug + + # Set to true to enable the Errorprone compiler + use_errorprone_java_compiler = false + + # Enables EMMA Java code coverage. Instruments classes during build to + # produce .ec files during runtime + emma_coverage = false + + # EMMA filter string consisting of a list of inclusion/exclusion patterns + # separated with whitespace and/or comma. Only has effect if + # emma_coverage==true + emma_filter = "" + + # Disables process isolation when building _incremental targets. + # Required for Android M+ due to SELinux policies (stronger sandboxing). + disable_incremental_isolated_processes = false + + # Speed up incremental compiles by compiling only changed files. + enable_incremental_javac = false + + # Adds intrumentation to each function. Writes a file with the order that + # functions are called at startup. + use_order_profiling = false + } + + # We need a second declare_args block to make sure we are using the overridden + # value of the arguments set above. + declare_args() { + # Speed up dexing using dx --incremental. + enable_incremental_dx = is_java_debug + } + + # Neither of these should ever be used for release builds since they are + # somewhat experimental and dx --incremental is known to not produce + # byte-for-byte identical output. + assert(!(enable_incremental_dx && !is_java_debug)) + assert(!(enable_incremental_javac && !is_java_debug)) + + # Host stuff ----------------------------------------------------------------- + + # Defines the name the Android build gives to the current host CPU + # architecture, which is different than the names GN uses. + if (host_cpu == "x64") { + android_host_arch = "x86_64" + } else if (host_cpu == "x86") { + android_host_arch = "x86" + } else { + assert(false, "Need Android toolchain support for your build CPU arch.") + } + + # Defines the name the Android build gives to the current host CPU + # architecture, which is different than the names GN uses. + if (host_os == "linux") { + android_host_os = "linux" + } else if (host_os == "mac") { + android_host_os = "darwin" + } else { + assert(false, "Need Android toolchain support for your build OS.") + } + + # Directories and files ------------------------------------------------------ + # + # We define may of the dirs strings here for each output architecture (rather + # than just the current one) since these are needed by the Android toolchain + # file to define toolchains for all possible targets in one pass. + + android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}" + + # Path to the Android NDK and SDK. + android_ndk_include_dir = "$android_ndk_root/usr/include" + + android_sdk_tools = "${android_sdk_root}/tools" + android_sdk_build_tools = + "${android_sdk_root}/build-tools/$android_sdk_build_tools_version" + + # Path to the SDK's android.jar + android_sdk_jar = "$android_sdk/android.jar" + + zipalign_path = "$android_sdk_build_tools/zipalign" + + # Subdirectories inside android_ndk_root that contain the sysroot for the + # associated platform. + _android_api_level = 16 + x86_android_sysroot_subdir = + "platforms/android-${_android_api_level}/arch-x86" + arm_android_sysroot_subdir = + "platforms/android-${_android_api_level}/arch-arm" + mips_android_sysroot_subdir = + "platforms/android-${_android_api_level}/arch-mips" + _android64_api_level = 21 + x86_64_android_sysroot_subdir = + "platforms/android-${_android64_api_level}/arch-x86_64" + arm64_android_sysroot_subdir = + "platforms/android-${_android64_api_level}/arch-arm64" + mips64_android_sysroot_subdir = + "platforms/android-${_android64_api_level}/arch-mips64" + + # Toolchain root directory for each build. The actual binaries are inside + # a "bin" directory inside of these. + _android_toolchain_version = "4.9" + x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}" + + # Location of libgcc. This is only needed for the current GN toolchain, so we + # only need to define the current one, rather than one for every platform + # like the toolchain roots. + if (current_cpu == "x86") { + android_prebuilt_arch = "android-x86" + _binary_prefix = "i686-linux-android" + android_toolchain_root = "$x86_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_version}/libgcc.a" + } else if (current_cpu == "arm") { + android_prebuilt_arch = "android-arm" + _binary_prefix = "arm-linux-androideabi" + android_toolchain_root = "$arm_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_version}/libgcc.a" + } else if (current_cpu == "mipsel") { + android_prebuilt_arch = "android-mips" + _binary_prefix = "mipsel-linux-android" + android_toolchain_root = "$mips_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a" + } else if (current_cpu == "x64") { + android_prebuilt_arch = "android-x86_64" + _binary_prefix = "x86_64-linux-android" + android_toolchain_root = "$x86_64_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_version}/libgcc.a" + } else if (current_cpu == "arm64") { + android_prebuilt_arch = "android-arm64" + _binary_prefix = "aarch64-linux-android" + android_toolchain_root = "$arm64_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_version}/libgcc.a" + } else if (current_cpu == "mips64el") { + android_prebuilt_arch = "android-mips64" + _binary_prefix = "mips64el-linux-android" + android_toolchain_root = "$mips64_android_toolchain_root" + android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_version}/libgcc.a" + } else { + assert(false, "Need android libgcc support for your target arch.") + } + + android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-" + android_readelf = "${android_tool_prefix}readelf" + android_objcopy = "${android_tool_prefix}objcopy" + android_gdbserver = + "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver" + + # Toolchain stuff ------------------------------------------------------------ + + android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++" + + # ABI ------------------------------------------------------------------------ + + if (current_cpu == "x86") { + android_app_abi = "x86" + } else if (current_cpu == "arm") { + import("//build/config/arm.gni") + if (arm_version < 7) { + android_app_abi = "armeabi" + } else { + android_app_abi = "armeabi-v7a" + } + } else if (current_cpu == "mipsel") { + android_app_abi = "mips" + } else if (current_cpu == "x64") { + android_app_abi = "x86_64" + } else if (current_cpu == "arm64") { + android_app_abi = "arm64-v8a" + } else if (current_cpu == "mips64el") { + android_app_abi = "mips64" + } else { + assert(false, "Unknown Android ABI: " + current_cpu) + } + + if (android_libcpp_lib_dir == "") { + android_libcpp_lib_dir = "${android_libcpp_root}/libs/${android_app_abi}" + } + + # Secondary ABI ------------------------------------------------------------- + if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") { + android_64bit_target_cpu = true + } else if (target_cpu == "arm" || target_cpu == "x86" || + target_cpu == "mipsel") { + android_64bit_target_cpu = false + } else { + assert(false, "Unknown target CPU: $target_cpu") + } + + # Intentionally do not define android_app_secondary_abi_cpu and + # android_app_secondary_abi for 32-bit target_cpu, since they are not used. + if (target_cpu == "arm64") { + android_secondary_abi_cpu = "arm" + android_app_secondary_abi = "armeabi-v7a" + } else if (target_cpu == "x64") { + android_secondary_abi_cpu = "x86" + android_app_secondary_abi = "x86" + } else if (target_cpu == "mips64el") { + android_secondary_abi_cpu = "mipsel" + android_app_secondary_abi = "mips" + } + + if (defined(android_secondary_abi_cpu)) { + if (is_clang) { + android_secondary_abi_toolchain = + "//build/toolchain/android:clang_${android_secondary_abi_cpu}" + } else { + android_secondary_abi_toolchain = + "//build/toolchain/android:${android_secondary_abi_cpu}" + } + } +} diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni new file mode 100644 index 00000000000..c3ce1ba65b6 --- /dev/null +++ b/build/config/android/internal_rules.gni @@ -0,0 +1,2637 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/sanitizers/sanitizers.gni") + +assert(is_android) + +# These identify targets that have .build_config files (except for android_apk, +# java_binary, resource_rewriter, since we never need to depend on these). +_java_target_whitelist = [ + "*:*_java", + "*:*_javalib", + "*:*_java_*", # e.g. java_test_support + "*:java", + "*:junit", + "*:junit_*", + "*:*_junit_*", + "*:*javatests", + "*:*_assets", + "*android*:assets", + "*:*_apk_*resources", + "*android*:resources", + "*:*_resources", + "*:*_grd", + "*:*locale_paks", + + # TODO(agrieve): Rename targets below to match above patterns. + "*android_webview/glue:glue", + "//build/android/pylib/device/commands:chromium_commands", + "//build/android/rezip:rezip", + "//chrome/test/android/cast_emulator:cast_emulator", + "//components/cronet/android:cronet_api", + "//components/cronet/android:cronet_javadoc_classpath", + "//components/policy:app_restrictions_resources", + "//device/battery/android:battery_monitor_android", + "//device/vibration/android:vibration_manager_android", + "//mojo/public/java:bindings", + "//mojo/public/java:system", + "//third_party/android_tools:emma_device", + "//third_party/cardboard-java:cardboard-java", + "//third_party/custom_tabs_client:custom_tabs_client_shared_lib", + "//third_party/custom_tabs_client:custom_tabs_support_lib", + "//third_party/errorprone:chromium_errorprone", + "//third_party/haha:haha", + "//third_party/junit:hamcrest", + "//third_party/netty4:netty_all", + "//third_party/netty-tcnative:netty-tcnative", + "//third_party/robolectric:android-all-4.3_r2-robolectric-0", + "//third_party/robolectric:android-all-5.0.0_r2-robolectric-1", + "//third_party/robolectric:json-20080701", + "//third_party/robolectric:tagsoup-1.2", + "//third_party/robolectric:shadows-core-3.0-18", + "//third_party/robolectric:shadows-core-3.0-21", + "//third_party/robolectric:shadows-multidex-3.0", +] + +# Targets that match the whitelist but are not actually java targets. +_java_target_blacklist = [ + "//chrome:packed_extra_resources", + "//chrome:packed_resources", + "//remoting/android:remoting_android_raw_resources", + "*:*_unpack_aar", +] + +# Write the target's .build_config file. This is a json file that contains a +# dictionary of information about how to build this target (things that +# require knowledge about this target's dependencies and cannot be calculated +# at gn-time). There is a special syntax to add a value in that dictionary to +# an action/action_foreachs args: +# --python-arg=@FileArg($rebased_build_config_path:key0:key1) +# At runtime, such an arg will be replaced by the value in the build_config. +# See build/android/gyp/write_build_config.py and +# build/android/gyp/util/build_utils.py:ExpandFileArgs +template("write_build_config") { + type = invoker.type + + # Don't need to enforce naming scheme for these targets since we never + # consider them in dependency chains. + if (type != "android_apk" && type != "java_binary" && + type != "resource_rewriter") { + set_sources_assignment_filter(_java_target_whitelist) + _parent_invoker = invoker.invoker + _target_label = + get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain") + sources = [ + _target_label, + ] + if (sources != []) { + set_sources_assignment_filter(_java_target_blacklist) + sources = [] + sources = [ + _target_label, + ] + if (sources != []) { + assert(false, "Invalid java target name: $_target_label") + } + } + sources = [] + } + + action(target_name) { + set_sources_assignment_filter([]) + build_config = invoker.build_config + + assert(type == "android_apk" || type == "java_library" || + type == "android_resources" || type == "deps_dex" || + type == "android_assets" || type == "resource_rewriter" || + type == "java_binary" || type == "group" || type == "java_prebuilt") + + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + if (!defined(deps)) { + deps = [] + } + + script = "//build/android/gyp/write_build_config.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [] + + _deps_configs = [] + if (defined(invoker.possible_config_deps)) { + foreach(_possible_dep, invoker.possible_config_deps) { + set_sources_assignment_filter(_java_target_whitelist) + _target_label = get_label_info(_possible_dep, "label_no_toolchain") + sources = [ + _target_label, + ] + if (sources == []) { + set_sources_assignment_filter(_java_target_blacklist) + sources = [] + sources = [ + _target_label, + ] + if (sources != []) { + deps += [ "${_target_label}__build_config" ] + _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") + _dep_name = get_label_info(_possible_dep, "name") + _deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ] + } + } + sources = [] + } + set_sources_assignment_filter([]) + } + _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir) + + outputs = [ + depfile, + build_config, + ] + + args = [ + "--type", + type, + "--depfile", + rebase_path(depfile, root_build_dir), + "--deps-configs=$_rebased_deps_configs", + "--build-config", + rebase_path(build_config, root_build_dir), + ] + + is_java = type == "java_library" || type == "java_binary" || + type == "java_prebuilt" + is_apk = type == "android_apk" + is_android_assets = type == "android_assets" + is_android_resources = type == "android_resources" + is_deps_dex = type == "deps_dex" + is_group = type == "group" + + supports_android = is_apk || is_android_assets || is_android_resources || + is_deps_dex || is_group || + (is_java && defined(invoker.supports_android) && + invoker.supports_android) + requires_android = + is_apk || is_android_assets || is_android_resources || is_deps_dex || + (is_java && defined(invoker.requires_android) && + invoker.requires_android) + + assert(!requires_android || supports_android, + "requires_android requires" + " supports_android") + + # Mark these variables as used. + assert(is_java || true) + assert(is_apk || true) + assert(is_android_resources || true) + assert(is_deps_dex || true) + assert(is_group || true) + + if (is_java || is_apk) { + args += [ + "--jar-path", + rebase_path(invoker.jar_path, root_build_dir), + ] + } + + if (is_apk || is_deps_dex || (is_java && supports_android)) { + args += [ + "--dex-path", + rebase_path(invoker.dex_path, root_build_dir), + ] + } + if (supports_android) { + args += [ "--supports-android" ] + } + if (requires_android) { + args += [ "--requires-android" ] + } + if (defined(invoker.bypass_platform_checks) && + invoker.bypass_platform_checks) { + args += [ "--bypass-platform-checks" ] + } + + if (defined(invoker.apk_under_test)) { + deps += [ "${invoker.apk_under_test}__build_config" ] + apk_under_test_gen_dir = + get_label_info(invoker.apk_under_test, "target_gen_dir") + apk_under_test_name = get_label_info(invoker.apk_under_test, "name") + apk_under_test_config = + "$apk_under_test_gen_dir/$apk_under_test_name.build_config" + args += [ + "--tested-apk-config", + rebase_path(apk_under_test_config, root_build_dir), + ] + } + + if (is_android_assets) { + if (defined(invoker.asset_sources)) { + _rebased_asset_sources = + rebase_path(invoker.asset_sources, root_build_dir) + args += [ "--asset-sources=$_rebased_asset_sources" ] + } + if (defined(invoker.asset_renaming_sources)) { + _rebased_asset_renaming_sources = + rebase_path(invoker.asset_renaming_sources, root_build_dir) + args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ] + + # These are zip paths, so no need to rebase. + args += [ "--asset-renaming-destinations=${invoker.asset_renaming_destinations}" ] + } + if (defined(invoker.disable_compression) && invoker.disable_compression) { + args += [ "--disable-asset-compression" ] + } + } + + if (is_android_resources || is_apk) { + assert(defined(invoker.resources_zip)) + args += [ + "--resources-zip", + rebase_path(invoker.resources_zip, root_build_dir), + ] + if (defined(invoker.android_manifest)) { + inputs += [ invoker.android_manifest ] + args += [ + "--android-manifest", + rebase_path(invoker.android_manifest, root_build_dir), + ] + } else { + assert(!is_apk, "apk build configs require an android_manifest") + } + if (defined(invoker.custom_package)) { + args += [ + "--package-name", + invoker.custom_package, + ] + } + if (defined(invoker.r_text)) { + args += [ + "--r-text", + rebase_path(invoker.r_text, root_build_dir), + ] + } + if (defined(invoker.is_locale_resource) && invoker.is_locale_resource) { + args += [ "--is-locale-resource" ] + } + if (defined(invoker.has_alternative_locale_resource) && + invoker.has_alternative_locale_resource) { + args += [ "--has-alternative-locale-resource" ] + } + } + + if (is_android_resources && defined(invoker.resource_dirs)) { + resource_dirs = rebase_path(invoker.resource_dirs, root_build_dir) + args += [ "--resource-dirs=$resource_dirs" ] + } + + if (is_apk) { + if (defined(invoker.shared_libraries_runtime_deps_file)) { + # Don't list shared_libraries_runtime_deps_file as an input in order to + # avoid having to depend on the runtime_deps target. See comment in + # rules.gni for why we do this. + args += [ + "--shared-libraries-runtime-deps", + rebase_path(invoker.shared_libraries_runtime_deps_file, + root_build_dir), + ] + } + + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) { + args += [ + "--proguard-enabled", + "--proguard-info", + rebase_path(invoker.proguard_info, root_build_dir), + ] + } + + if (defined(invoker.apk_path)) { + _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir) + _rebased_incremental_apk_path = + rebase_path(invoker.incremental_apk_path, root_build_dir) + _rebased_incremental_install_script_path = + rebase_path(invoker.incremental_install_script_path, root_build_dir) + args += [ "--apk-path=$_rebased_apk_path" ] + args += [ "--incremental-apk-path=$_rebased_incremental_apk_path" ] + args += [ "--incremental-install-script-path=$_rebased_incremental_install_script_path" ] + } + } + + if (defined(invoker.java_sources_file)) { + args += [ + "--java-sources-file", + rebase_path(invoker.java_sources_file, root_build_dir), + ] + } + if (defined(invoker.srcjar)) { + args += [ + "--srcjar", + rebase_path(invoker.srcjar, root_build_dir), + ] + } + if (defined(invoker.bundled_srcjars)) { + _rebased_bundled_srcjars = + rebase_path(invoker.bundled_srcjars, root_build_dir) + args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ] + } + if (defined(invoker.input_jars_paths)) { + _rebased_input_jars_paths = + rebase_path(invoker.input_jars_paths, root_build_dir) + args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ] + } + if (current_toolchain != default_toolchain) { + # This has to be a built-time error rather than a GN assert because many + # packages have a mix of java and non-java targets. For example, the + # following would fail even though nothing depends on :bar(//baz): + # + # shared_library("foo") { + # } + # + # android_library("bar") { + # deps = [ ":foo(//baz)" ] + # assert(current_toolchain == default_toolchain) + # } + _msg = [ + "Tried to build an Android target in a non-default toolchain.", + "target: " + get_label_info(":$target_name", "label_with_toolchain"), + "default_toolchain: $default_toolchain", + ] + args += [ "--fail=$_msg" ] + } + } +} + +template("copy_ex") { + set_sources_assignment_filter([]) + action(target_name) { + forward_variables_from(invoker, + [ + "data", + "deps", + "inputs", + "sources", + "testonly", + "visibility", + ]) + if (!defined(sources)) { + sources = [] + } + script = "//build/android/gyp/copy_ex.py" + + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--dest", + rebase_path(invoker.dest, root_build_dir), + ] + rebased_sources = rebase_path(sources, root_build_dir) + args += [ "--files=$rebased_sources" ] + + if (defined(invoker.clear_dir) && invoker.clear_dir) { + args += [ "--clear" ] + } + + if (defined(invoker.args)) { + args += invoker.args + } + + if (defined(invoker.renaming_sources) && + defined(invoker.renaming_destinations)) { + sources += invoker.renaming_sources + rebased_renaming_sources = + rebase_path(invoker.renaming_sources, root_build_dir) + args += [ "--renaming-sources=$rebased_renaming_sources" ] + + renaming_destinations = invoker.renaming_destinations + args += [ "--renaming-destinations=$renaming_destinations" ] + } + } +} + +template("device_isolate") { + testonly = true + _runtime_deps_file = "$target_gen_dir/$target_name.runtime_deps" + group("${target_name}__write_deps") { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "deps", + "public_deps", + ]) + write_runtime_deps = _runtime_deps_file + } + + action(target_name) { + script = "//build/android/gn/generate_isolate.py" + inputs = [ + _runtime_deps_file, + ] + outputs = [ + invoker.output, + ] + args = [ + "--output-directory=.", + "--out-file", + rebase_path(invoker.output, root_build_dir), + "--runtime-deps-file", + rebase_path(_runtime_deps_file, root_build_dir), + "--apply-android-filters", + "--apply-device-filters", + ] + _assert_no_odd_data = + defined(invoker.assert_no_odd_data) && invoker.assert_no_odd_data + if (_assert_no_odd_data) { + args += [ "--assert-no-odd-data" ] + } + if (defined(invoker.command)) { + _isolate_dir = get_path_info(invoker.output, "dir") + args += [ + "--command", + rebase_path(invoker.command, _isolate_dir), + ] + } + deps = [ + ":${invoker.target_name}__write_deps", + ] + } +} + +# Generates a script in the output bin directory which runs the test +# target using the test runner script in build/android/test_runner.py. +template("test_runner_script") { + testonly = true + _test_name = invoker.test_name + _test_type = invoker.test_type + _incremental_install = + defined(invoker.incremental_install) && invoker.incremental_install + + action(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + ]) + if (!defined(deps)) { + deps = [] + } + if (!defined(data_deps)) { + data_deps = [] + } + + script = "//build/android/gyp/create_test_runner_script.py" + depfile = "$target_gen_dir/$target_name.d" + + data_deps += [ "//build/android:test_runner_py" ] + data = [] + + test_runner_args = [ + _test_type, + "--output-directory", + rebase_path(root_build_dir, root_build_dir), + ] + + # apk_target is not used for native executable tests + # (e.g. breakpad_unittests). + if (defined(invoker.apk_target)) { + assert(!defined(invoker.executable_dist_dir)) + deps += [ "${invoker.apk_target}__build_config" ] + _apk_build_config = + get_label_info(invoker.apk_target, "target_gen_dir") + "/" + + get_label_info(invoker.apk_target, "name") + ".build_config" + _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir) + assert(_rebased_apk_build_config != "") # Mark as used. + } else if (_test_type == "gtest") { + assert( + defined(invoker.executable_dist_dir), + "Must define either apk_target or executable_dist_dir for test_runner_script()") + test_runner_args += [ + "--executable-dist-dir", + rebase_path(invoker.executable_dist_dir, root_build_dir), + ] + } + + if (_test_type == "gtest") { + assert(defined(invoker.test_suite)) + test_runner_args += [ + "--suite", + invoker.test_suite, + ] + } else if (_test_type == "instrumentation") { + _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:apk_path)" + if (_incremental_install) { + _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path)" + } + test_runner_args += [ "--test-apk=$_test_apk" ] + if (defined(invoker.apk_under_test)) { + deps += [ "${invoker.apk_under_test}__build_config" ] + _apk_under_test_build_config = + get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + + get_label_info(invoker.apk_under_test, "name") + ".build_config" + _rebased_apk_under_test_build_config = + rebase_path(_apk_under_test_build_config, root_build_dir) + _apk_under_test = + "@FileArg($_rebased_apk_under_test_build_config:deps_info:apk_path)" + if (_incremental_install) { + _apk_under_test = "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path)" + } + test_runner_args += [ "--apk-under-test=$_apk_under_test" ] + } + if (emma_coverage) { + # Set a default coverage output directory (can be overridden by user + # passing the same flag). + test_runner_args += [ + "--coverage-dir", + rebase_path("$root_out_dir/coverage", root_build_dir), + ] + } + } else if (_test_type == "junit") { + assert(defined(invoker.test_suite)) + test_runner_args += [ + "--test-suite", + invoker.test_suite, + ] + } else { + assert(false, "Invalid test type: $_test_type.") + } + + if (defined(invoker.additional_apks)) { + foreach(additional_apk, invoker.additional_apks) { + deps += [ "${additional_apk}__build_config" ] + _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" + + get_label_info(additional_apk, "name") + ".build_config" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + test_runner_args += [ + "--additional-apk", + "@FileArg($_rebased_build_config:deps_info:apk_path)", + ] + } + } + if (defined(invoker.isolate_file)) { + data += [ invoker.isolate_file ] + test_runner_args += [ + "--isolate-file-path", + rebase_path(invoker.isolate_file, root_build_dir), + ] + } + if (defined(invoker.shard_timeout)) { + test_runner_args += [ "--shard-timeout=${invoker.shard_timeout}" ] + } + if (_incremental_install) { + test_runner_args += [ + "--test-apk-incremental-install-script", + "@FileArg($_rebased_apk_build_config:deps_info:incremental_install_script_path)", + ] + if (defined(invoker.apk_under_test)) { + test_runner_args += [ + "--apk-under-test-incremental-install-script", + "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_script_path)", + ] + } + test_runner_args += [ "--fast-local-dev" ] + } + if (is_asan) { + test_runner_args += [ "--tool=asan" ] + } + + generated_script = "$root_build_dir/bin/run_${_test_name}" + outputs = [ + depfile, + generated_script, + ] + data += [ generated_script ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--script-output-path", + rebase_path(generated_script, root_build_dir), + ] + args += test_runner_args + } +} + +if (enable_java_templates) { + import("//build/config/zip.gni") + import("//third_party/ijar/ijar.gni") + + rebased_android_sdk = rebase_path(android_sdk, root_build_dir) + rebased_android_sdk_build_tools = + rebase_path(android_sdk_build_tools, root_build_dir) + + android_sdk_jar = "$android_sdk/android.jar" + rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir) + android_default_aapt_path = "$rebased_android_sdk_build_tools/aapt" + + android_configuration_name = "Release" + if (is_debug) { + android_configuration_name = "Debug" + } + + template("android_lint") { + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "public_deps", + "testonly", + ]) + if (!defined(deps)) { + deps = [] + } + _cache_dir = "$root_build_dir/android_lint_cache" + _result_path = "$target_gen_dir/$target_name/result.xml" + _config_path = "$target_gen_dir/$target_name/config.xml" + _suppressions_file = "//build/android/lint/suppressions.xml" + _platform_xml_path = + "${android_sdk_root}/platform-tools/api/api-versions.xml" + _rebased_lint_android_sdk_root = + rebase_path(lint_android_sdk_root, root_build_dir) + + script = "//build/android/gyp/lint.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [ + _platform_xml_path, + _suppressions_file, + invoker.android_manifest, + ] + + outputs = [ + depfile, + _config_path, + _result_path, + ] + + args = [ + "--lint-path=$_rebased_lint_android_sdk_root/tools/lint", + "--cache-dir", + rebase_path(_cache_dir, root_build_dir), + "--platform-xml-path", + rebase_path(_platform_xml_path, root_build_dir), + "--android-sdk-version=${lint_android_sdk_version}", + "--depfile", + rebase_path(depfile, root_build_dir), + "--config-path", + rebase_path(_suppressions_file, root_build_dir), + "--manifest-path", + rebase_path(invoker.android_manifest, root_build_dir), + "--product-dir=.", + "--processed-config-path", + rebase_path(_config_path, root_build_dir), + "--result-path", + rebase_path(_result_path, root_build_dir), + "--enable", + ] + + if (defined(invoker.create_cache) && invoker.create_cache) { + args += [ + "--create-cache", + "--silent", + ] + } else { + inputs += invoker.java_files + inputs += [ + invoker.jar_path, + invoker.build_config, + ] + if (invoker.java_files != []) { + inputs += [ invoker.java_sources_file ] + _rebased_java_sources_file = + rebase_path(invoker.java_sources_file, root_build_dir) + args += [ "--java-sources-file=$_rebased_java_sources_file" ] + } + deps += [ "//build/android:prepare_android_lint_cache" ] + + _rebased_build_config = + rebase_path(invoker.build_config, root_build_dir) + args += [ + "--jar-path", + rebase_path(invoker.jar_path, root_build_dir), + "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)", + "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_dirs)", + "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_zips)", + ] + } + } + } + + template("proguard") { + action(target_name) { + set_sources_assignment_filter([]) + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "public_deps", + "testonly", + ]) + script = "//build/android/gyp/proguard.py" + if (defined(invoker.proguard_jar_path)) { + _proguard_jar_path = invoker.proguard_jar_path + } else { + _proguard_jar_path = "//third_party/proguard/lib/proguard.jar" + } + _output_jar_path = invoker.output_jar_path + inputs = [ + _proguard_jar_path, + ] + if (defined(invoker.alternative_android_sdk_jar)) { + inputs += [ invoker.alternative_android_sdk_jar ] + _rebased_android_sdk_jar = + rebase_path(invoker.alternative_android_sdk_jar) + } else { + inputs += [ android_sdk_jar ] + _rebased_android_sdk_jar = rebased_android_sdk_jar + } + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + depfile = "${target_gen_dir}/${target_name}.d" + outputs = [ + depfile, + _output_jar_path, + "$_output_jar_path.dump", + "$_output_jar_path.seeds", + "$_output_jar_path.mapping", + "$_output_jar_path.usage", + ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--proguard-path", + rebase_path(_proguard_jar_path, root_build_dir), + "--output-path", + rebase_path(_output_jar_path, root_build_dir), + "--classpath", + _rebased_android_sdk_jar, + ] + if (proguard_verbose) { + args += [ "--verbose" ] + } + if (defined(invoker.args)) { + args += invoker.args + } + if (defined(invoker.proguard_jar_path)) { + # We assume that if we are using a different ProGuard, this new version + # can handle the 'dangerous' optimizaions. + args += [ "--enable-dangerous-optimizations" ] + } + } + } + + template("findbugs") { + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + script = "//build/android/findbugs_diff.py" + depfile = "$target_gen_dir/$target_name.d" + _result_path = "$target_gen_dir/$target_name/result.xml" + _exclusions_file = "//build/android/findbugs_filter/findbugs_exclude.xml" + + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + + inputs = [ + "//build/android/pylib/utils/findbugs.py", + _exclusions_file, + invoker.jar_path, + invoker.build_config, + ] + + outputs = [ + depfile, + _result_path, + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--exclude", + rebase_path(_exclusions_file, root_build_dir), + "--auxclasspath-gyp", + "@FileArg($_rebased_build_config:javac:classpath)", + "--output-file", + rebase_path(_result_path, root_build_dir), + rebase_path(invoker.jar_path, root_build_dir), + ] + + if (findbugs_verbose) { + args += [ "-vv" ] + } + } + } + + # Generates a script in the output bin.java directory to run a java binary. + # + # Variables + # main_class: The class containing the progam entry point. + # jar_path: The path to the jar to run. + # script_name: Name of the script to generate. + # build_config: Path to .build_config for the jar (contains classpath). + # wrapper_script_args: List of extra arguments to pass to the executable. + # + template("java_binary_script") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + _main_class = invoker.main_class + _build_config = invoker.build_config + _jar_path = invoker.jar_path + _script_name = invoker.script_name + + action(target_name) { + script = "//build/android/gyp/create_java_binary_script.py" + depfile = "$target_gen_dir/$_script_name.d" + java_script = "$root_build_dir/bin/$_script_name" + inputs = [ + _build_config, + ] + outputs = [ + depfile, + java_script, + ] + forward_variables_from(invoker, [ "deps" ]) + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(java_script, root_build_dir), + "--classpath=@FileArg($_rebased_build_config:deps_info:java:full_classpath)", + "--jar-path", + rebase_path(_jar_path, root_build_dir), + "--main-class", + _main_class, + ] + if (emma_coverage) { + args += [ + "--classpath", + rebase_path("//third_party/android_tools/sdk/tools/lib/emma.jar", + root_build_dir), + ] + args += [ "--noverify" ] + } + if (defined(invoker.wrapper_script_args)) { + args += [ "--" ] + invoker.wrapper_script_args + } + if (defined(invoker.bootclasspath)) { + inputs += [ invoker.bootclasspath ] + args += [ + "--bootclasspath", + rebase_path(invoker.bootclasspath, root_build_dir), + ] + } + } + } + + template("dex") { + set_sources_assignment_filter([]) + + _enable_multidex = + defined(invoker.enable_multidex) && invoker.enable_multidex + + if (_enable_multidex) { + _main_dex_list_path = invoker.output + ".main_dex_list" + _main_dex_list_target_name = "${target_name}__main_dex_list" + action(_main_dex_list_target_name) { + forward_variables_from(invoker, + [ + "deps", + "inputs", + "sources", + "testonly", + ]) + + script = "//build/android/gyp/main_dex_list.py" + depfile = "$target_gen_dir/$target_name.d" + + main_dex_rules = "//build/android/main_dex_classes.flags" + + outputs = [ + depfile, + _main_dex_list_path, + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--android-sdk-tools", + rebased_android_sdk_build_tools, + "--main-dex-list-path", + rebase_path(_main_dex_list_path, root_build_dir), + "--main-dex-rules-path", + rebase_path(main_dex_rules, root_build_dir), + ] + + if (defined(invoker.args)) { + args += invoker.args + } + + if (defined(invoker.sources)) { + args += rebase_path(invoker.sources, root_build_dir) + } + } + } + + assert(defined(invoker.output)) + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "inputs", + "sources", + "testonly", + ]) + script = "//build/android/gyp/dex.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + invoker.output, + ] + + rebased_output = rebase_path(invoker.output, root_build_dir) + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--android-sdk-tools", + rebased_android_sdk_build_tools, + "--dex-path", + rebased_output, + ] + + if (enable_incremental_dx) { + args += [ "--incremental" ] + } + + # EMMA requires --no-locals. + if (emma_coverage) { + args += [ "--no-locals=1" ] + } + + if (_enable_multidex) { + args += [ + "--multi-dex", + "--main-dex-list-path", + rebase_path(_main_dex_list_path, root_build_dir), + ] + deps += [ ":${_main_dex_list_target_name}" ] + inputs += [ _main_dex_list_path ] + } + + if (defined(invoker.args)) { + args += invoker.args + } + + if (defined(invoker.sources)) { + args += rebase_path(invoker.sources, root_build_dir) + } + } + } + + template("process_java_prebuilt") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(invoker.build_config != "") + _build_config = invoker.build_config + _rebased_build_config = rebase_path(_build_config, root_build_dir) + assert(_rebased_build_config != "" || true) # Mark used. + + _proguard_preprocess = + defined(invoker.proguard_preprocess) && invoker.proguard_preprocess + _input_jar_path = invoker.input_jar_path + _output_jar_path = invoker.output_jar_path + + _jar_excluded_patterns = [] + if (defined(invoker.jar_excluded_patterns)) { + _jar_excluded_patterns = invoker.jar_excluded_patterns + } + _strip_resource_classes = defined(invoker.strip_resource_classes) && + invoker.strip_resource_classes + _filter_jar = _jar_excluded_patterns != [] || _strip_resource_classes + + if (_filter_jar) { + _filter_target = "${target_name}__filter" + _output_jar_target = _filter_target + } + if (_proguard_preprocess) { + _proguard_target = "${target_name}__proguard_process" + _output_jar_target = _proguard_target + } + if (!_filter_jar && !_proguard_preprocess) { + _copy_target = "${target_name}__copy" + _output_jar_target = _copy_target + } + + if (_filter_jar) { + _filtered_jar_path = _output_jar_path + if (_proguard_preprocess) { + _filtered_jar_path = "$target_out_dir/$target_name-filtered.jar" + } + action(_filter_target) { + script = "//build/android/gyp/jar.py" + forward_variables_from(invoker, + [ + "deps", + "public_deps", + ]) + inputs = [ + _build_config, + _input_jar_path, + ] + outputs = [ + _filtered_jar_path, + ] + args = [ + "--input-jar", + rebase_path(_input_jar_path, root_build_dir), + "--jar-path", + rebase_path(_filtered_jar_path, root_build_dir), + "--excluded-classes=$_jar_excluded_patterns", + ] + if (_strip_resource_classes) { + args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ] + } + } + } + + if (_proguard_preprocess) { + _proguard_config_path = invoker.proguard_config + proguard(_proguard_target) { + if (_filter_jar) { + _proguard_input_jar = _filtered_jar_path + public_deps = [ + ":$_filter_target", + ] + } else { + _proguard_input_jar = _input_jar_path + public_deps = [] + } + if (defined(invoker.deps)) { + deps = invoker.deps + } + if (defined(invoker.public_deps)) { + public_deps += invoker.public_deps + } + inputs = [ + _build_config, + _proguard_config_path, + _proguard_input_jar, + ] + output_jar_path = _output_jar_path + + _rebased_input_paths = [ rebase_path(_input_jar_path, root_build_dir) ] + _rebased_proguard_configs = + [ rebase_path(_proguard_config_path, root_build_dir) ] + args = [ + "--input-paths=$_rebased_input_paths", + "--proguard-configs=$_rebased_proguard_configs", + "--classpath=@FileArg($_rebased_build_config:javac:classpath)", + ] + } + } else if (!_filter_jar) { + copy(_copy_target) { + forward_variables_from(invoker, + [ + "deps", + "public_deps", + ]) + sources = [ + _input_jar_path, + ] + outputs = [ + _output_jar_path, + ] + } + } + + group(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "visibility", + ]) + public_deps = [ + ":$_output_jar_target", + ] + } + } + + template("emma_instr") { + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + + _coverage_file = "$target_out_dir/${target_name}.em" + _source_dirs_listing_file = "$target_out_dir/${target_name}_sources.txt" + _emma_jar = "${android_sdk_root}/tools/lib/emma.jar" + + script = "//build/android/gyp/emma_instr.py" + depfile = "${target_gen_dir}/${target_name}.d" + inputs = invoker.java_files + [ + _emma_jar, + invoker.input_jar_path, + ] + outputs = [ + depfile, + _coverage_file, + _source_dirs_listing_file, + invoker.output_jar_path, + ] + args = [ + "instrument_jar", + "--input-path", + rebase_path(invoker.input_jar_path, root_build_dir), + "--output-path", + rebase_path(invoker.output_jar_path, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + "--coverage-file", + rebase_path(_coverage_file, root_build_dir), + "--sources-list-file", + rebase_path(_source_dirs_listing_file, root_build_dir), + "--src-root", + rebase_path("//", root_build_dir), + "--emma-jar", + rebase_path(_emma_jar, root_build_dir), + ] + _rebased_java_sources_file = + rebase_path(invoker.java_sources_file, root_build_dir) + args += [ "--java-sources-file=$_rebased_java_sources_file" ] + + if (emma_filter != "") { + args += [ + "--filter-string", + emma_filter, + ] + } + } + } + + # Creates an unsigned .apk. + # + # Variables + # assets_build_config: Path to android_apk .build_config containing merged + # asset information. + # deps: Specifies the dependencies of this target. + # dex_path: Path to classes.dex file to include (optional). + # resource_packaged_apk_path: Path to .ap_ to use. + # output_apk_path: Output path for the generated .apk. + # native_lib_placeholders: List of placeholder filenames to add to the apk + # (optional). + # native_libs: List of native libraries. + # native_libs_filearg: @FileArg() of additionaly native libraries. + # write_asset_list: Adds an extra file to the assets, which contains a list of + # all other asset files. + template("package_apk") { + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + ]) + _native_lib_placeholders = [] + if (defined(invoker.native_lib_placeholders)) { + _native_lib_placeholders = invoker.native_lib_placeholders + } + + script = "//build/android/gyp/apkbuilder.py" + depfile = "$target_gen_dir/$target_name.d" + data_deps = [ + "//tools/android/md5sum", + ] # Used when deploying APKs + + inputs = invoker.native_libs + [ invoker.resource_packaged_apk_path ] + if (defined(invoker.dex_path)) { + inputs += [ invoker.dex_path ] + } + + outputs = [ + depfile, + invoker.output_apk_path, + ] + + _rebased_resource_packaged_apk_path = + rebase_path(invoker.resource_packaged_apk_path, root_build_dir) + _rebased_packaged_apk_path = + rebase_path(invoker.output_apk_path, root_build_dir) + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--resource-apk=$_rebased_resource_packaged_apk_path", + "--output-apk=$_rebased_packaged_apk_path", + ] + if (defined(invoker.assets_build_config)) { + inputs += [ invoker.assets_build_config ] + _rebased_build_config = + rebase_path(invoker.assets_build_config, root_build_dir) + args += [ + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", + ] + } + if (defined(invoker.write_asset_list) && invoker.write_asset_list) { + args += [ "--write-asset-list" ] + } + if (defined(invoker.dex_path)) { + _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir) + args += [ "--dex-file=$_rebased_dex_path" ] + } + if (invoker.native_libs != [] || defined(invoker.native_libs_filearg) || + _native_lib_placeholders != []) { + args += [ "--android-abi=$android_app_abi" ] + } + if (invoker.native_libs != []) { + _rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir) + args += [ "--native-libs=$_rebased_native_libs" ] + } + if (defined(invoker.native_libs_filearg)) { + args += [ "--native-libs=${invoker.native_libs_filearg}" ] + } + if (_native_lib_placeholders != []) { + args += [ "--native-lib-placeholders=$_native_lib_placeholders" ] + } + if (defined(invoker.secondary_native_libs) && + invoker.secondary_native_libs != []) { + assert(defined(android_app_secondary_abi)) + inputs += invoker.secondary_native_libs + _secondary_native_libs = rebase_path(invoker.secondary_native_libs) + args += [ + "--secondary-native-libs=$_secondary_native_libs", + "--secondary-android-abi=$android_app_secondary_abi", + ] + } + + if (defined(invoker.emma_instrument) && invoker.emma_instrument) { + _emma_device_jar = "$android_sdk_root/tools/lib/emma_device.jar" + _rebased_emma_device_jar = rebase_path(_emma_device_jar, root_build_dir) + args += [ "--emma-device-jar=$_rebased_emma_device_jar" ] + } + + if (defined(invoker.uncompress_shared_libraries) && + invoker.uncompress_shared_libraries) { + args += [ "--uncompress-shared-libraries" ] + } + } + } + + # Signs & zipaligns an apk. + # + # Variables + # input_apk_path: Path of the .apk to be finalized. + # output_apk_path: Output path for the generated .apk. + # keystore_path: Path to keystore to use for signing. + # keystore_name: Key alias to use. + # keystore_password: Keystore password. + # rezip_apk: Whether to add crazy-linker alignment. + template("finalize_apk") { + action(target_name) { + deps = [] + script = "//build/android/gyp/finalize_apk.py" + depfile = "$target_gen_dir/$target_name.d" + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "public_deps", + "testonly", + ]) + + sources = [ + invoker.input_apk_path, + ] + inputs = [ + invoker.keystore_path, + ] + outputs = [ + depfile, + invoker.output_apk_path, + ] + data = [ + invoker.output_apk_path, + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--zipalign-path", + rebase_path(zipalign_path, root_build_dir), + "--unsigned-apk-path", + rebase_path(invoker.input_apk_path, root_build_dir), + "--final-apk-path", + rebase_path(invoker.output_apk_path, root_build_dir), + "--key-path", + rebase_path(invoker.keystore_path, root_build_dir), + "--key-name", + invoker.keystore_name, + "--key-passwd", + invoker.keystore_password, + ] + if (defined(invoker.rezip_apk) && invoker.rezip_apk) { + deps += [ "//build/android/rezip" ] + _rezip_jar_path = "$root_build_dir/lib.java/rezip_apk.jar" + args += [ + "--load-library-from-zip=1", + "--rezip-apk-jar-path", + rebase_path(_rezip_jar_path, root_build_dir), + ] + } + + if (defined(invoker.page_align_shared_libraries) && + invoker.page_align_shared_libraries) { + args += [ "--page-align-shared-libraries" ] + } + } + } + + # Packages resources, assets, dex, and native libraries into an apk. Signs and + # zipaligns the apk. + template("create_apk") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + _android_manifest = invoker.android_manifest + _base_path = invoker.base_path + _final_apk_path = invoker.apk_path + _incremental_final_apk_path_helper = + process_file_template( + [ _final_apk_path ], + "{{source_dir}}/{{source_name_part}}_incremental.apk") + _incremental_final_apk_path = _incremental_final_apk_path_helper[0] + + if (defined(invoker.resources_zip)) { + _resources_zip = invoker.resources_zip + assert(_resources_zip != "") # Mark as used. + } + if (defined(invoker.dex_path)) { + _dex_path = invoker.dex_path + } + _load_library_from_apk = invoker.load_library_from_apk + + _deps = [] + if (defined(invoker.deps)) { + _deps = invoker.deps + } + _incremental_deps = [] + if (defined(invoker.incremental_deps)) { + _incremental_deps = invoker.incremental_deps + } + _native_libs = [] + if (defined(invoker.native_libs)) { + _native_libs = invoker.native_libs + } + _native_libs_even_when_incremental = [] + if (defined(invoker.native_libs_even_when_incremental)) { + _native_libs_even_when_incremental = + invoker.native_libs_even_when_incremental + } + + _version_code = invoker.version_code + _version_name = invoker.version_name + assert(_version_code != -1) # Mark as used. + assert(_version_name != "") # Mark as used. + + _base_apk_path = _base_path + ".apk_intermediates" + + _resource_packaged_apk_path = _base_apk_path + ".ap_" + _incremental_resource_packaged_apk_path = + _base_apk_path + "_incremental.ap_" + _packaged_apk_path = _base_apk_path + ".unfinished.apk" + _incremental_packaged_apk_path = + _base_apk_path + "_incremental.unfinished.apk" + _shared_resources = + defined(invoker.shared_resources) && invoker.shared_resources + assert(_shared_resources || true) # Mark as used. + _app_as_shared_lib = + defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib + assert(_app_as_shared_lib || true) # Mark as used. + assert(!(_shared_resources && _app_as_shared_lib)) + + _keystore_path = invoker.keystore_path + _keystore_name = invoker.keystore_name + _keystore_password = invoker.keystore_password + + _split_densities = [] + if (defined(invoker.create_density_splits) && + invoker.create_density_splits) { + _split_densities = [ + "hdpi", + "xhdpi", + "xxhdpi", + "xxxhdpi", + "tvdpi", + ] + } + + _split_languages = [] + if (defined(invoker.language_splits)) { + _split_languages = invoker.language_splits + } + + template("package_resources_helper") { + action(target_name) { + deps = invoker.deps + + script = "//build/android/gyp/package_resources.py" + depfile = "${target_gen_dir}/${target_name}.d" + inputs = [ + invoker.android_manifest, + ] + if (defined(_resources_zip)) { + inputs += [ _resources_zip ] + } + outputs = [ + depfile, + invoker.resource_packaged_apk_path, + ] + + if (defined(invoker.android_aapt_path)) { + _android_aapt_path = invoker.android_aapt_path + } else { + _android_aapt_path = android_default_aapt_path + } + + if (defined(invoker.alternative_android_sdk_jar)) { + _rebased_android_sdk_jar = + rebase_path(invoker.alternative_android_sdk_jar) + } else { + _rebased_android_sdk_jar = rebased_android_sdk_jar + } + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--android-sdk-jar", + _rebased_android_sdk_jar, + "--aapt-path", + _android_aapt_path, + "--configuration-name=$android_configuration_name", + "--android-manifest", + rebase_path(invoker.android_manifest, root_build_dir), + "--version-code", + _version_code, + "--version-name", + _version_name, + "--apk-path", + rebase_path(invoker.resource_packaged_apk_path, root_build_dir), + ] + + if (defined(_resources_zip)) { + args += [ + "--resource-zips", + rebase_path(_resources_zip, root_build_dir), + ] + } + if (_shared_resources) { + args += [ "--shared-resources" ] + } + if (_app_as_shared_lib) { + args += [ "--app-as-shared-lib" ] + } + if (_split_densities != []) { + args += [ "--create-density-splits" ] + foreach(_density, _split_densities) { + outputs += [ "${invoker.resource_packaged_apk_path}_${_density}" ] + } + } + if (_split_languages != []) { + args += [ "--language-splits=$_split_languages" ] + foreach(_language, _split_languages) { + outputs += [ "${invoker.resource_packaged_apk_path}_${_language}" ] + } + } + if (defined(invoker.extensions_to_not_compress)) { + args += [ + "--no-compress", + invoker.extensions_to_not_compress, + ] + } + } + } + + _package_resources_target_name = "${target_name}__package_resources" + package_resources_helper(_package_resources_target_name) { + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "android_aapt_path", + "extensions_to_not_compress", + ]) + deps = _deps + android_manifest = _android_manifest + resource_packaged_apk_path = _resource_packaged_apk_path + } + + _generate_incremental_manifest_target_name = + "${target_name}_incremental_generate_manifest" + _incremental_android_manifest = + get_label_info(_generate_incremental_manifest_target_name, + "target_gen_dir") + "/AndroidManifest.xml" + action(_generate_incremental_manifest_target_name) { + deps = _incremental_deps + script = + "//build/android/incremental_install/generate_android_manifest.py" + depfile = "${target_gen_dir}/${target_name}.d" + inputs = [ + _android_manifest, + ] + outputs = [ + depfile, + _incremental_android_manifest, + ] + + _rebased_src_manifest = rebase_path(_android_manifest, root_build_dir) + _rebased_incremental_manifest = + rebase_path(_incremental_android_manifest, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + args = [ + "--src-manifest=$_rebased_src_manifest", + "--out-manifest=$_rebased_incremental_manifest", + "--depfile=$_rebased_depfile", + ] + if (disable_incremental_isolated_processes) { + args += [ "--disable-isolated-processes" ] + } + } + + _incremental_package_resources_target_name = + "${target_name}_incremental__package_resources" + + # TODO(agrieve): See if we can speed up this step by swapping the manifest + # from the result of the main package_resources step. + package_resources_helper(_incremental_package_resources_target_name) { + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "android_aapt_path", + "extensions_to_not_compress", + ]) + deps = + _incremental_deps + [ ":$_generate_incremental_manifest_target_name" ] + android_manifest = _incremental_android_manifest + resource_packaged_apk_path = _incremental_resource_packaged_apk_path + } + + package_target = "${target_name}__package" + package_apk(package_target) { + forward_variables_from(invoker, + [ + "assets_build_config", + "emma_instrument", + "native_lib_placeholders", + "native_libs_filearg", + "secondary_native_libs", + "uncompress_shared_libraries", + "write_asset_list", + ]) + deps = _deps + [ ":${_package_resources_target_name}" ] + native_libs = _native_libs + _native_libs_even_when_incremental + + if (defined(_dex_path)) { + dex_path = _dex_path + } + + output_apk_path = _packaged_apk_path + resource_packaged_apk_path = _resource_packaged_apk_path + } + + _incremental_package_target = "${target_name}_incremental__package" + package_apk(_incremental_package_target) { + forward_variables_from(invoker, + [ + "assets_build_config", + "emma_instrument", + "secondary_native_libs", + "uncompress_shared_libraries", + ]) + _dex_target = "//build/android/incremental_install:bootstrap_java__dex" + deps = _incremental_deps + [ + ":${_incremental_package_resources_target_name}", + _dex_target, + ] + + if (defined(_dex_path)) { + dex_path = + get_label_info(_dex_target, "target_gen_dir") + "/bootstrap.dex" + } + + native_libs = _native_libs_even_when_incremental + + # http://crbug.com/384638 + _has_native_libs = + defined(invoker.native_libs_filearg) || _native_libs != [] + if (_has_native_libs && _native_libs_even_when_incremental == []) { + native_lib_placeholders = [ "libfix.crbug.384638.so" ] + } + + output_apk_path = _incremental_packaged_apk_path + resource_packaged_apk_path = _incremental_resource_packaged_apk_path + } + + _finalize_apk_rule_name = "${target_name}__finalize" + finalize_apk(_finalize_apk_rule_name) { + forward_variables_from(invoker, [ "page_align_shared_libraries" ]) + + input_apk_path = _packaged_apk_path + output_apk_path = _final_apk_path + keystore_path = _keystore_path + keystore_name = _keystore_name + keystore_password = _keystore_password + rezip_apk = _load_library_from_apk + + public_deps = [ + # Generator of the _packaged_apk_path this target takes as input. + ":$package_target", + ] + } + + _incremental_finalize_apk_rule_name = "${target_name}_incremental__finalize" + finalize_apk(_incremental_finalize_apk_rule_name) { + input_apk_path = _incremental_packaged_apk_path + output_apk_path = _incremental_final_apk_path + keystore_path = _keystore_path + keystore_name = _keystore_name + keystore_password = _keystore_password + + public_deps = [ + ":$_incremental_package_target", + ] + } + + _split_deps = [] + + template("finalize_split") { + finalize_apk(target_name) { + _config = invoker.split_config + _type = invoker.split_type + input_apk_path = "${_resource_packaged_apk_path}_${_config}" + _output_paths = process_file_template( + [ _final_apk_path ], + "{{source_dir}}/{{source_name_part}}-${_type}-${_config}.apk") + output_apk_path = _output_paths[0] + keystore_path = _keystore_path + keystore_name = _keystore_name + keystore_password = _keystore_password + deps = [ + ":${_package_resources_target_name}", + ] + } + } + + foreach(_split, _split_densities) { + _split_rule = "${target_name}__finalize_${_split}_split" + finalize_split(_split_rule) { + split_type = "density" + split_config = _split + } + _split_deps += [ ":$_split_rule" ] + } + foreach(_split, _split_languages) { + _split_rule = "${target_name}__finalize_${_split}_split" + finalize_split(_split_rule) { + split_type = "lang" + split_config = _split + } + _split_deps += [ ":$_split_rule" ] + } + + group(target_name) { + public_deps = [ ":${_finalize_apk_rule_name}" ] + _split_deps + } + group("${target_name}_incremental") { + public_deps = [ ":${_incremental_finalize_apk_rule_name}" ] + _split_deps + } + } + + template("java_prebuilt_impl") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + _supports_android = + defined(invoker.supports_android) && invoker.supports_android + + assert(defined(invoker.jar_path)) + _base_path = "${target_gen_dir}/$target_name" + + # Jar files can be needed at runtime (by Robolectric tests or java binaries), + # so do not put them under gen/. + _target_dir_name = get_label_info(":$target_name", "dir") + _jar_path = "$root_out_dir/lib.java$_target_dir_name/$target_name.jar" + _ijar_path = + "$root_out_dir/lib.java$_target_dir_name/$target_name.interface.jar" + _build_config = _base_path + ".build_config" + + if (_supports_android) { + _dex_path = _base_path + ".dex.jar" + } + _deps = [] + if (defined(invoker.deps)) { + _deps = invoker.deps + } + _jar_deps = [] + if (defined(invoker.jar_dep)) { + _jar_deps = [ invoker.jar_dep ] + } + + _template_name = target_name + + _build_config_target_name = "${_template_name}__build_config" + _process_jar_target_name = "${_template_name}__process_jar" + _ijar_target_name = "${_template_name}__ijar" + if (_supports_android) { + _dex_target_name = "${_template_name}__dex" + } + + write_build_config(_build_config_target_name) { + type = "java_prebuilt" + forward_variables_from(invoker, [ "input_jars_paths" ]) + supports_android = _supports_android + requires_android = + defined(invoker.requires_android) && invoker.requires_android + + if (defined(invoker.deps)) { + possible_config_deps = _deps + } + build_config = _build_config + jar_path = _jar_path + if (_supports_android) { + dex_path = _dex_path + } + } + + process_java_prebuilt(_process_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "strip_resource_classes", + ]) + + visibility = [ + ":$_ijar_target_name", + ":$_template_name", + ] + if (_supports_android) { + visibility += [ ":$_dex_target_name" ] + } + + if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) { + proguard_preprocess = true + proguard_config = invoker.proguard_config + } + + build_config = _build_config + input_jar_path = invoker.jar_path + output_jar_path = _jar_path + + deps = [ ":$_build_config_target_name" ] + _deps + _jar_deps + } + + generate_interface_jar(_ijar_target_name) { + input_jar = _jar_path + output_jar = _ijar_path + deps = [ + ":$_process_jar_target_name", + ] + } + + if (_supports_android) { + dex(_dex_target_name) { + sources = [ + _jar_path, + ] + output = _dex_path + deps = [ ":$_process_jar_target_name" ] + _deps + _jar_deps + } + } + + if (defined(invoker.main_class)) { + _binary_script_target_name = "${_template_name}__java_binary_script" + java_binary_script(_binary_script_target_name) { + forward_variables_from(invoker, + [ + "bootclasspath", + "deps", + "main_class", + "wrapper_script_args", + ]) + if (!defined(deps)) { + deps = [] + } + build_config = _build_config + jar_path = _jar_path + script_name = _template_name + if (defined(invoker.wrapper_script_name)) { + script_name = invoker.wrapper_script_name + } + deps += [ ":$_build_config_target_name" ] + } + } + + group(target_name) { + forward_variables_from(invoker, [ "data_deps" ]) + public_deps = [ + ":$_ijar_target_name", + ":$_process_jar_target_name", + ] + if (_supports_android) { + public_deps += [ ":$_dex_target_name" ] + } + if (defined(invoker.main_class)) { + # Some targets use the generated script while building, so make it a dep + # rather than a data_dep. + public_deps += [ ":$_binary_script_target_name" ] + } + } + } + + # Compiles and jars a set of java files. + # + # Outputs: + # $jar_path.jar + # $jar_path.interface.jar + # + # Variables + # java_files: List of .java files to compile (same as exists in java_sources_file) + # java_sources_file: Path to file containing list of files to compile. + # chromium_code: If true, enable extra warnings. + # srcjar_deps: List of srcjar dependencies. The .java files contained in the + # dependencies srcjar outputs will be compiled and added to the output jar. + # jar_path: Use this to explicitly set the output jar path. Defaults to + # "${target_gen_dir}/${target_name}.jar. + template("compile_java") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.build_config)) + assert(defined(invoker.jar_path)) + + _build_config = invoker.build_config + + _chromium_code = false + if (defined(invoker.chromium_code)) { + _chromium_code = invoker.chromium_code + } + + _supports_android = true + if (defined(invoker.supports_android)) { + _supports_android = invoker.supports_android + } + + _requires_android = + defined(invoker.requires_android) && invoker.requires_android + + _enable_errorprone = use_errorprone_java_compiler + if (!_chromium_code) { + _enable_errorprone = false + } else if (defined(invoker.enable_errorprone)) { + _enable_errorprone = invoker.enable_errorprone + } + + _provider_configurations = [] + if (defined(invoker.provider_configurations)) { + _provider_configurations = invoker.provider_configurations + } + + _processors = [] + _enable_interface_jars_javac = true + if (defined(invoker.processors_javac)) { + _processors = invoker.processors_javac + _enable_interface_jars_javac = _processors == [] + } + + _processor_args = [] + if (defined(invoker.processor_args_javac)) { + _processor_args = invoker.processor_args_javac + } + + _additional_jar_files = [] + if (defined(invoker.additional_jar_files)) { + _additional_jar_files = invoker.additional_jar_files + } + + if (defined(invoker.enable_incremental_javac_override)) { + # Use invoker-specified override. + _enable_incremental_javac = invoker.enable_incremental_javac_override + } else { + # Default to build arg if not overridden. + _enable_incremental_javac = enable_incremental_javac + } + + _manifest_entries = [] + if (defined(invoker.manifest_entries)) { + _manifest_entries = invoker.manifest_entries + } + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps += invoker.srcjar_deps + } + + _java_srcjars = [] + if (defined(invoker.srcjars)) { + _java_srcjars = invoker.srcjars + } + foreach(dep, _srcjar_deps) { + _dep_gen_dir = get_label_info(dep, "target_gen_dir") + _dep_name = get_label_info(dep, "name") + _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + + # Mark srcjar_deps as used. + assert(_srcjar_deps == [] || true) + + _javac_target_name = "${target_name}__javac" + _process_prebuilt_target_name = "${target_name}__process_prebuilt" + _ijar_target_name = "${target_name}__ijar" + _final_target_name = target_name + + _final_jar_path = invoker.jar_path + _javac_jar_path = "$target_gen_dir/$target_name.javac.jar" + _process_prebuilt_jar_path = _final_jar_path + _final_ijar_path = get_path_info(_final_jar_path, "dir") + "/" + + get_path_info(_final_jar_path, "name") + ".interface.jar" + + _emma_instrument = defined(invoker.emma_instrument) && + invoker.emma_instrument && invoker.java_files != [] + if (_emma_instrument) { + _emma_instr_target_name = "${target_name}__emma_instr" + _process_prebuilt_jar_path = + "$target_gen_dir/$target_name.process_prebuilt.jar" + } + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _rebased_jar_path = rebase_path(_javac_jar_path, root_build_dir) + + action(_javac_target_name) { + script = "//build/android/gyp/javac.py" + depfile = "$target_gen_dir/$target_name.d" + deps = _srcjar_deps + if (defined(invoker.deps)) { + deps += invoker.deps + } + + outputs = [ + depfile, + _javac_jar_path, + _javac_jar_path + ".md5.stamp", + ] + sources = invoker.java_files + _java_srcjars + inputs = [ + _build_config, + ] + if (invoker.java_files != []) { + inputs += [ invoker.java_sources_file ] + } + + _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + args = [ + "--depfile=$_rebased_depfile", + "--jar-path=$_rebased_jar_path", + "--java-srcjars=$_rebased_java_srcjars", + "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)", + ] + if (_enable_interface_jars_javac) { + args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ] + } else { + args += + [ "--classpath=@FileArg($_rebased_build_config:javac:classpath)" ] + } + if (_enable_incremental_javac) { + args += [ "--incremental" ] + deps += [ "//third_party/jmake($default_toolchain)" ] + inputs += [ "$root_build_dir/bin/jmake" ] + outputs += [ "${_javac_jar_path}.pdb" ] + } + if (_requires_android) { + if (defined(invoker.alternative_android_sdk_ijar)) { + deps += [ invoker.alternative_android_sdk_ijar_dep ] + _android_sdk_ijar = invoker.alternative_android_sdk_ijar + } else { + deps += [ "//build/android:android_ijar" ] + _android_sdk_ijar = "$root_out_dir/lib.java/android.interface.jar" + } + inputs += [ _android_sdk_ijar ] + _rebased_android_sdk_ijar = + rebase_path(_android_sdk_ijar, root_build_dir) + args += [ "--bootclasspath=$_rebased_android_sdk_ijar" ] + } + if (_supports_android) { + args += [ "--java-version=1.7" ] + } + foreach(e, _manifest_entries) { + args += [ "--manifest-entry=" + e ] + } + if (_chromium_code) { + args += [ "--chromium-code=1" ] + } + if (_enable_errorprone) { + deps += [ "//third_party/errorprone:chromium_errorprone" ] + args += [ + "--use-errorprone-path", + "bin/chromium_errorprone", + ] + } + foreach(e, _provider_configurations) { + args += [ "--provider-configuration=" + rebase_path(e, root_build_dir) ] + } + foreach(e, _processors) { + args += [ "--processor=" + e ] + } + foreach(e, _processor_args) { + args += [ "--processor-arg=" + e ] + } + foreach(file_tuple, _additional_jar_files) { + # Each element is of length two, [ path_to_file, path_to_put_in_jar ] + inputs += [ file_tuple[0] ] + args += + [ "--additional-jar-file=" + file_tuple[0] + ":" + file_tuple[1] ] + } + if (invoker.java_files != []) { + args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ] + } + } + + process_java_prebuilt(_process_prebuilt_target_name) { + forward_variables_from(invoker, [ "jar_excluded_patterns" ]) + build_config = _build_config + input_jar_path = _javac_jar_path + output_jar_path = _process_prebuilt_jar_path + if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) { + proguard_preprocess = invoker.proguard_preprocess + proguard_config = invoker.proguard_config + } + deps = [ + ":$_javac_target_name", + ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } + + if (_emma_instrument) { + emma_instr(_emma_instr_target_name) { + forward_variables_from(invoker, + [ + "java_files", + "java_sources_file", + ]) + + input_jar_path = _process_prebuilt_jar_path + output_jar_path = _final_jar_path + + deps = [ + ":$_process_prebuilt_target_name", + ] + } + } + + generate_interface_jar(_ijar_target_name) { + input_jar = _final_jar_path + output_jar = _final_ijar_path + if (_emma_instrument) { + deps = [ + ":$_emma_instr_target_name", + ] + } else { + deps = [ + ":$_process_prebuilt_target_name", + ] + } + } + + group(_final_target_name) { + forward_variables_from(invoker, [ "visibility" ]) + public_deps = [ + ":$_ijar_target_name", + ] + if (_emma_instrument) { + public_deps += [ ":$_emma_instr_target_name" ] + } else { + public_deps += [ ":$_process_prebuilt_target_name" ] + } + } + } + + template("java_library_impl") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + _accumulated_deps = [] + if (defined(invoker.deps)) { + _accumulated_deps = invoker.deps + } + + assert(defined(invoker.java_files) || defined(invoker.srcjars) || + defined(invoker.srcjar_deps)) + _base_path = "$target_gen_dir/$target_name" + + # Jar files can be needed at runtime (by Robolectric tests or java binaries), + # so do not put them under gen/. + _jar_name = target_name + if (defined(invoker.jar_name)) { + _jar_name = invoker.jar_name + } + target_dir_name = get_label_info(":$target_name", "dir") + _jar_path = "$root_out_dir/lib.java$target_dir_name/$_jar_name.jar" + if (defined(invoker.jar_path)) { + _jar_path = invoker.jar_path + } + _template_name = target_name + + _final_deps = [] + + _supports_android = + defined(invoker.supports_android) && invoker.supports_android + _requires_android = + defined(invoker.requires_android) && invoker.requires_android + assert(_requires_android || true) # Mark as used. + _android_manifest = "//build/android/AndroidManifest.xml" + if (defined(invoker.android_manifest)) { + _android_manifest = invoker.android_manifest + } + assert(_android_manifest != "") # Mark as used. + + if (defined(invoker.run_findbugs_override)) { + _run_findbugs = invoker.run_findbugs_override + } else { + _run_findbugs = run_findbugs # Default to build arg if not overridden. + } + assert(_run_findbugs || true) # Mark as used. + + # Don't enable coverage, lint, findbugs unless the target has some + # non-generated files. + if (defined(invoker.chromium_code)) { + _chromium_code = invoker.chromium_code + } else { + _chromium_code = defined(invoker.java_files) && invoker.java_files != [] + if (_chromium_code) { + # Make chromium_code = false be the default for targets within + # third_party which contain no chromium-namespaced java files. + set_sources_assignment_filter([ "*\bthird_party\b*" ]) + sources = [ + get_label_info(":$target_name", "dir"), + ] + if (sources == []) { + set_sources_assignment_filter([ "*\bchromium\b*" ]) + sources = invoker.java_files + _chromium_code = invoker.java_files != sources + } + set_sources_assignment_filter([]) + sources = [] + } + } + + _emma_never_instrument = !_chromium_code + if (defined(invoker.emma_never_instrument)) { + _emma_never_instrument = invoker.emma_never_instrument + } + assert(_emma_never_instrument || true) # Mark as used + _emma_instrument = emma_coverage && !_emma_never_instrument + + if (_supports_android) { + _dex_path = _base_path + ".dex.jar" + if (defined(invoker.dex_path)) { + _dex_path = invoker.dex_path + } + } + + _java_files = [] + if (defined(invoker.java_files)) { + _java_files += invoker.java_files + } + if (_java_files != []) { + _java_sources_file = "$_base_path.sources" + write_file(_java_sources_file, rebase_path(_java_files, root_build_dir)) + } + + # Define build_config_deps which will be a list of targets required to + # build the _build_config. + if (defined(invoker.override_build_config)) { + _build_config = invoker.override_build_config + } else { + _build_config = _base_path + ".build_config" + build_config_target_name = "${_template_name}__build_config" + + write_build_config(build_config_target_name) { + forward_variables_from(invoker, [ "input_jars_paths" ]) + if (defined(invoker.is_java_binary) && invoker.is_java_binary) { + type = "java_binary" + } else { + type = "java_library" + } + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + supports_android = _supports_android + requires_android = _requires_android + bypass_platform_checks = defined(invoker.bypass_platform_checks) && + invoker.bypass_platform_checks + + build_config = _build_config + jar_path = _jar_path + if (_supports_android) { + dex_path = _dex_path + } + if (_java_files != []) { + java_sources_file = _java_sources_file + } + + if (defined(invoker.srcjar_deps)) { + bundled_srcjars = [] + foreach(d, invoker.srcjar_deps) { + _dep_gen_dir = get_label_info(d, "target_gen_dir") + _dep_name = get_label_info(d, "name") + bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + } + } + _accumulated_deps += [ ":$build_config_target_name" ] + } + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + + _srcjars = [] + if (defined(invoker.srcjars)) { + _srcjars = invoker.srcjars + } + + assert(_java_files != [] || _srcjar_deps != [] || _srcjars != []) + + _compile_java_target = "${_template_name}__compile_java" + _final_deps += [ ":$_compile_java_target" ] + compile_java(_compile_java_target) { + forward_variables_from(invoker, + [ + "additional_jar_files", + "alternative_android_sdk_ijar", + "alternative_android_sdk_ijar_dep", + "dist_jar_path", + "enable_errorprone", + "enable_incremental_javac_override", + "jar_excluded_patterns", + "manifest_entries", + "processors_javac", + "processor_args_javac", + "proguard_config", + "proguard_preprocess", + "provider_configurations", + ]) + jar_path = _jar_path + build_config = _build_config + java_files = _java_files + if (_java_files != []) { + java_sources_file = _java_sources_file + } + srcjar_deps = _srcjar_deps + srcjars = _srcjars + chromium_code = _chromium_code + supports_android = _supports_android + requires_android = _requires_android + emma_instrument = _emma_instrument + deps = _accumulated_deps + } + _accumulated_deps += [ ":$_compile_java_target" ] + assert(_accumulated_deps != []) # Mark used. + + if (defined(invoker.main_class)) { + # Targets might use the generated script while building, so make it a dep + # rather than a data_dep. + _final_deps += [ ":${_template_name}__java_binary_script" ] + java_binary_script("${_template_name}__java_binary_script") { + forward_variables_from(invoker, + [ + "bootclasspath", + "main_class", + "wrapper_script_args", + ]) + build_config = _build_config + jar_path = _jar_path + script_name = _template_name + if (defined(invoker.wrapper_script_name)) { + script_name = invoker.wrapper_script_name + } + deps = _accumulated_deps + } + } + + _has_lint_target = false + if (_supports_android) { + if (_chromium_code) { + _has_lint_target = true + android_lint("${_template_name}__lint") { + android_manifest = _android_manifest + build_config = _build_config + jar_path = _jar_path + java_files = _java_files + if (_java_files != []) { + java_sources_file = _java_sources_file + } + deps = _accumulated_deps + } + + if (_run_findbugs) { + findbugs("${_template_name}__findbugs") { + build_config = _build_config + jar_path = _jar_path + deps = _accumulated_deps + } + } + + # Use an intermediate group() rather as the data_deps target in order to + # avoid lint artifacts showing up as runtime_deps (while still having lint + # run in parallel to other targets). + group("${_template_name}__analysis") { + public_deps = [ + ":${_template_name}__lint", + ] + if (_run_findbugs) { + public_deps += [ ":${_template_name}__findbugs" ] + } + } + } + + _final_deps += [ ":${_template_name}__dex" ] + dex("${_template_name}__dex") { + sources = [ + _jar_path, + ] + output = _dex_path + deps = [ + ":$_compile_java_target", + ] + } + } + + group(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "visibility", + ]) + if (!defined(data_deps)) { + data_deps = [] + } + public_deps = _final_deps + if (_has_lint_target) { + data_deps += [ ":${_template_name}__analysis" ] + } + } + } + + # Runs process_resources.py + template("process_resources") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + zip_path = invoker.zip_path + srcjar_path = invoker.srcjar_path + r_text_path = invoker.r_text_path + build_config = invoker.build_config + android_manifest = invoker.android_manifest + + non_constant_id = true + if (defined(invoker.generate_constant_ids) && + invoker.generate_constant_ids) { + non_constant_id = false + } + + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "visibility", + ]) + script = "//build/android/gyp/process_resources.py" + + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + zip_path, + srcjar_path, + r_text_path, + ] + + _all_resource_dirs = [] + sources = [] + + if (defined(invoker.resource_dirs)) { + _all_resource_dirs += invoker.resource_dirs + + # Speed up "gn gen" by short-circuiting the empty directory. + if (invoker.resource_dirs != [ "//build/android/ant/empty/res" ] && + invoker.resource_dirs != []) { + _sources_build_rel = + exec_script("//build/android/gyp/find.py", + rebase_path(invoker.resource_dirs, root_build_dir), + "list lines") + sources += rebase_path(_sources_build_rel, ".", root_build_dir) + } + } + + if (defined(invoker.generated_resource_dirs)) { + assert(defined(invoker.generated_resource_files)) + _all_resource_dirs += invoker.generated_resource_dirs + sources += invoker.generated_resource_files + } + + inputs = [ + build_config, + android_manifest, + ] + + _rebased_all_resource_dirs = + rebase_path(_all_resource_dirs, root_build_dir) + rebase_build_config = rebase_path(build_config, root_build_dir) + + if (defined(invoker.android_aapt_path)) { + _android_aapt_path = invoker.android_aapt_path + } else { + _android_aapt_path = android_default_aapt_path + } + + if (defined(invoker.alternative_android_sdk_jar)) { + _rebased_android_sdk_jar = + rebase_path(invoker.alternative_android_sdk_jar) + } else { + _rebased_android_sdk_jar = rebased_android_sdk_jar + } + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--android-sdk-jar", + _rebased_android_sdk_jar, + "--aapt-path", + _android_aapt_path, + "--android-manifest", + rebase_path(android_manifest, root_build_dir), + "--resource-dirs=$_rebased_all_resource_dirs", + "--srcjar-out", + rebase_path(srcjar_path, root_build_dir), + "--resource-zip-out", + rebase_path(zip_path, root_build_dir), + "--r-text-out", + rebase_path(r_text_path, root_build_dir), + "--dependencies-res-zips=@FileArg($rebase_build_config:resources:dependency_zips)", + "--extra-res-packages=@FileArg($rebase_build_config:resources:extra_package_names)", + "--extra-r-text-files=@FileArg($rebase_build_config:resources:extra_r_text_files)", + ] + + if (non_constant_id) { + args += [ "--non-constant-id" ] + } + + if (defined(invoker.custom_package)) { + args += [ + "--custom-package", + invoker.custom_package, + ] + } + + if (defined(invoker.v14_skip) && invoker.v14_skip) { + args += [ "--v14-skip" ] + } + + if (defined(invoker.shared_resources) && invoker.shared_resources) { + args += [ "--shared-resources" ] + } + + if (defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib) { + args += [ "--app-as-shared-lib" ] + } + + if (defined(invoker.include_all_resources) && + invoker.include_all_resources) { + args += [ "--include-all-resources" ] + } + + if (defined(invoker.all_resources_zip_path)) { + all_resources_zip = invoker.all_resources_zip_path + outputs += [ all_resources_zip ] + args += [ + "--all-resources-zip-out", + rebase_path(all_resources_zip, root_build_dir), + ] + } + + if (defined(invoker.proguard_file)) { + outputs += [ invoker.proguard_file ] + args += [ + "--proguard-file", + rebase_path(invoker.proguard_file, root_build_dir), + ] + } + + if (defined(invoker.args)) { + args += invoker.args + } + } + } + + # Produces a single .dex.jar out of a set of Java dependencies. + template("deps_dex") { + set_sources_assignment_filter([]) + build_config = "$target_gen_dir/${target_name}.build_config" + build_config_target_name = "${target_name}__build_config" + + write_build_config(build_config_target_name) { + forward_variables_from(invoker, [ "dex_path" ]) + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + type = "deps_dex" + build_config = build_config + } + + rebased_build_config = rebase_path(build_config, root_build_dir) + dex(target_name) { + inputs = [ + build_config, + ] + output = invoker.dex_path + dex_arg_key = "${rebased_build_config}:final_dex:dependency_dex_files" + args = [ "--inputs=@FileArg($dex_arg_key)" ] + if (defined(invoker.excluded_jars)) { + excluded_jars = rebase_path(invoker.excluded_jars, root_build_dir) + args += [ "--excluded-paths=${excluded_jars}" ] + } + deps = [ + ":$build_config_target_name", + ] + } + } + + # Creates an AndroidManifest.xml for an APK split. + template("generate_split_manifest") { + assert(defined(invoker.main_manifest)) + assert(defined(invoker.out_manifest)) + assert(defined(invoker.split_name)) + + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + depfile = "$target_gen_dir/$target_name.d" + args = [ + "--main-manifest", + rebase_path(invoker.main_manifest, root_build_dir), + "--out-manifest", + rebase_path(invoker.out_manifest, root_build_dir), + "--split", + invoker.split_name, + ] + if (defined(invoker.version_code)) { + args += [ + "--version-code", + invoker.version_code, + ] + } + if (defined(invoker.version_name)) { + args += [ + "--version-name", + invoker.version_name, + ] + } + if (defined(invoker.has_code)) { + args += [ + "--has-code", + invoker.has_code, + ] + } + args += [ + "--depfile", + rebase_path(depfile, root_build_dir), + ] + + script = "//build/android/gyp/generate_split_manifest.py" + outputs = [ + depfile, + invoker.out_manifest, + ] + inputs = [ + invoker.main_manifest, + ] + } + } +} diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni new file mode 100644 index 00000000000..e5ffa00c46c --- /dev/null +++ b/build/config/android/rules.gni @@ -0,0 +1,2707 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/android/internal_rules.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/toolchain/toolchain.gni") + +assert(is_android) + +# Creates a dist directory for a native executable. +# +# Running a native executable on a device requires all the shared library +# dependencies of that executable. To make it easier to install and run such an +# executable, this will create a directory containing the native exe and all +# it's library dependencies. +# +# Note: It's usually better to package things as an APK than as a native +# executable. +# +# Variables +# dist_dir: Directory for the exe and libraries. Everything in this directory +# will be deleted before copying in the exe and libraries. +# binary: Path to (stripped) executable. +# extra_files: List of extra files to copy in (optional). +# +# Example +# create_native_executable_dist("foo_dist") { +# dist_dir = "$root_build_dir/foo_dist" +# binary = "$root_build_dir/foo" +# deps = [ ":the_thing_that_makes_foo" ] +# } +template("create_native_executable_dist") { + forward_variables_from(invoker, [ "testonly" ]) + + _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list" + + _find_deps_target_name = "${target_name}__find_library_dependencies" + + # TODO(agrieve): Extract dependent libs from GN rather than readelf. + action(_find_deps_target_name) { + forward_variables_from(invoker, [ "deps" ]) + + script = "//build/android/gyp/write_ordered_libraries.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [ + invoker.binary, + android_readelf, + ] + outputs = [ + depfile, + _libraries_list, + ] + rebased_binaries = rebase_path([ invoker.binary ], root_build_dir) + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--input-libraries=$rebased_binaries", + "--libraries-dir", + rebase_path(root_shlib_dir, root_build_dir), + "--output", + rebase_path(_libraries_list, root_build_dir), + "--readelf", + rebase_path(android_readelf, root_build_dir), + ] + } + + copy_ex(target_name) { + clear_dir = true + + inputs = [ + _libraries_list, + invoker.binary, + ] + + dest = invoker.dist_dir + data = [ + "${invoker.dist_dir}/", + ] + + _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir) + _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir) + args = [ + "--files=@FileArg($_rebased_libraries_list:lib_paths)", + "--files=$_rebased_binaries_list", + ] + if (defined(invoker.extra_files)) { + _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir) + args += [ "--files=$_rebased_extra_files" ] + } + + deps = [ + ":$_find_deps_target_name", + ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } +} + +# Writes a script to root_out_dir/bin that passes --output-directory to the +# wrapped script, in addition to forwarding arguments. Most / all of these +# wrappers should be made deps of //tools/android:android_tools. +# +# Variables +# target: Script to wrap. +# flag_name: Default is "--output-directory" +# +# Example +# wrapper_script("foo_wrapper") { +# target = "//pkg/foo.py" +# } +template("wrapper_script") { + action(target_name) { + _name = get_path_info(invoker.target, "name") + _output = "$root_out_dir/bin/$_name" + + script = "//build/android/gyp/create_tool_wrapper.py" + outputs = [ + _output, + ] + + # The target isn't actually used by the script, but it's nice to have GN + # check that it exists. + inputs = [ + invoker.target, + ] + args = [ + "--output", + rebase_path(_output, root_build_dir), + "--target", + rebase_path(invoker.target, root_build_dir), + "--output-directory", + rebase_path(root_out_dir, root_build_dir), + ] + if (defined(invoker.flag_name)) { + args += [ "--flag-name=${invoker.flag_name}" ] + } + } +} + +if (enable_java_templates) { + import("//build/config/sanitizers/sanitizers.gni") + import("//third_party/android_platform/config.gni") + import("//tools/grit/grit_rule.gni") + + # Declare a jni target + # + # This target generates the native jni bindings for a set of .java files. + # + # See base/android/jni_generator/jni_generator.py for more info about the + # format of generating JNI bindings. + # + # Variables + # sources: list of .java files to generate jni for + # jni_package: subdirectory path for generated bindings + # + # Example + # generate_jni("foo_jni") { + # sources = [ + # "android/java/src/org/chromium/foo/Foo.java", + # "android/java/src/org/chromium/foo/FooUtil.java", + # ] + # jni_package = "foo" + # } + template("generate_jni") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.sources)) + assert(defined(invoker.jni_package)) + jni_package = invoker.jni_package + base_output_dir = "${target_gen_dir}/${target_name}" + package_output_dir = "${base_output_dir}/${jni_package}" + jni_output_dir = "${package_output_dir}/jni" + + jni_generator_include = + "//base/android/jni_generator/jni_generator_helper.h" + + foreach_target_name = "${target_name}__jni_gen" + action_foreach(foreach_target_name) { + script = "//base/android/jni_generator/jni_generator.py" + depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d" + sources = invoker.sources + outputs = [ + depfile, + "${jni_output_dir}/{{source_name_part}}_jni.h", + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--input_file={{source}}", + "--optimize_generation=1", + "--ptr_type=long", + "--output_dir", + rebase_path(jni_output_dir, root_build_dir), + "--includes", + rebase_path(jni_generator_include, jni_output_dir), + "--native_exports_optional", + ] + } + + config("jni_includes_${target_name}") { + # TODO(cjhopman): #includes should probably all be relative to + # base_output_dir. Remove that from this config once the includes are + # updated. + include_dirs = [ + base_output_dir, + package_output_dir, + ] + } + + group(target_name) { + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$foreach_target_name" ] + public_configs = [ ":jni_includes_${target_name}" ] + } + } + + # Declare a jni target for a prebuilt jar + # + # This target generates the native jni bindings for a set of classes in a .jar. + # + # See base/android/jni_generator/jni_generator.py for more info about the + # format of generating JNI bindings. + # + # Variables + # classes: list of .class files in the jar to generate jni for. These should + # include the full path to the .class file. + # jni_package: subdirectory path for generated bindings + # jar_file: the path to the .jar. If not provided, will default to the sdk's + # android.jar + # + # deps, public_deps: As normal + # + # Example + # generate_jar_jni("foo_jni") { + # classes = [ + # "android/view/Foo.class", + # ] + # jni_package = "foo" + # } + template("generate_jar_jni") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.classes)) + assert(defined(invoker.jni_package)) + + if (defined(invoker.jar_file)) { + jar_file = invoker.jar_file + } else { + jar_file = android_sdk_jar + } + + jni_package = invoker.jni_package + base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}" + jni_output_dir = "${base_output_dir}/jni" + + jni_generator_include = + "//base/android/jni_generator/jni_generator_helper.h" + + # TODO(cjhopman): make jni_generator.py support generating jni for multiple + # .class files from a .jar. + jni_actions = [] + foreach(class, invoker.classes) { + _classname_list = [] + _classname_list = process_file_template([ class ], "{{source_name_part}}") + classname = _classname_list[0] + jni_target_name = "${target_name}__jni_${classname}" + jni_actions += [ ":$jni_target_name" ] + action(jni_target_name) { + # The sources aren't compiled so don't check their dependencies. + check_includes = false + depfile = "$target_gen_dir/$target_name.d" + script = "//base/android/jni_generator/jni_generator.py" + sources = [ + jar_file, + ] + outputs = [ + depfile, + "${jni_output_dir}/${classname}_jni.h", + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--jar_file", + rebase_path(jar_file, root_build_dir), + "--input_file", + class, + "--optimize_generation=1", + "--ptr_type=long", + "--output_dir", + rebase_path(jni_output_dir, root_build_dir), + "--includes", + rebase_path(jni_generator_include, jni_output_dir), + "--native_exports_optional", + ] + } + } + + config("jni_includes_${target_name}") { + include_dirs = [ base_output_dir ] + } + + group(target_name) { + public_deps = [] + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "visibility", + ]) + public_deps += jni_actions + public_configs = [ ":jni_includes_${target_name}" ] + } + } + + # Declare a target for c-preprocessor-generated java files + # + # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum + # rule instead. + # + # This target generates java files using the host C pre-processor. Each file in + # sources will be compiled using the C pre-processor. If include_path is + # specified, it will be passed (with --I) to the pre-processor. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the C pre-processor. For each + # file in sources, there will be one .java file in the final .srcjar. For a + # file named FooBar.template, a java file will be created with name + # FooBar.java. + # inputs: additional compile-time dependencies. Any files + # `#include`-ed in the templates should be listed here. + # package_name: this will be the subdirectory for each .java file in the + # .srcjar. + # + # Example + # java_cpp_template("foo_generated_enum") { + # sources = [ + # "android/java/templates/Foo.template", + # ] + # inputs = [ + # "android/java/templates/native_foo_header.h", + # ] + # + # package_name = "org/chromium/base/library_loader" + # include_path = "android/java/templates" + # } + template("java_cpp_template") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + _include_path = "//" + if (defined(invoker.include_path)) { + _include_path = invoker.include_path + } + + _apply_gcc_target_name = "${target_name}__apply_gcc" + _base_gen_dir = "${target_gen_dir}/${target_name}/java_cpp_template" + + action_foreach(_apply_gcc_target_name) { + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "data_deps", + ]) + script = "//build/android/gyp/gcc_preprocess.py" + if (defined(invoker.inputs)) { + inputs = invoker.inputs + [] + } + depfile = + "${target_gen_dir}/${invoker.target_name}_{{source_name_part}}.d" + + sources = invoker.sources + + outputs = [ + depfile, + "$_base_gen_dir/${invoker.package_name}/{{source_name_part}}.java", + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--include-path", + rebase_path(_include_path, root_build_dir), + "--output", + rebase_path(outputs[1], root_build_dir), + "--template={{source}}", + ] + + if (defined(invoker.defines)) { + foreach(def, invoker.defines) { + args += [ + "--defines", + def, + ] + } + } + } + + # Filter out .d files. + set_sources_assignment_filter([ "*.d" ]) + sources = get_target_outputs(":$_apply_gcc_target_name") + + zip(target_name) { + forward_variables_from(invoker, [ "visibility" ]) + inputs = sources + output = "${target_gen_dir}/${target_name}.srcjar" + base_dir = _base_gen_dir + deps = [ + ":$_apply_gcc_target_name", + ] + } + } + + # Declare a target for generating Java classes from C++ enums. + # + # This target generates Java files from C++ enums using a script. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the script. For each annotated + # enum contained in the sources files the script will generate a .java + # file with the same name as the name of the enum. + # + # Example + # java_cpp_enum("foo_generated_enum") { + # sources = [ + # "src/native_foo_header.h", + # ] + # } + template("java_cpp_enum") { + action(target_name) { + # The sources aren't compiled so don't check their dependencies. + check_includes = false + set_sources_assignment_filter([]) + + assert(defined(invoker.sources)) + forward_variables_from(invoker, + [ + "sources", + "testonly", + "visibility", + ]) + + script = "//build/android/gyp/java_cpp_enum.py" + depfile = "$target_gen_dir/$target_name.d" + + _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir) + _rebased_sources = rebase_path(invoker.sources, root_build_dir) + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--srcjar=$_rebased_srcjar_path", + ] + _rebased_sources + outputs = [ + depfile, + _srcjar_path, + ] + } + } + + # Declare a target for processing a Jinja template. + # + # Variables + # input: The template file to be processed. + # output: Where to save the result. + # variables: (Optional) A list of variables to make available to the template + # processing environment, e.g. ["name=foo", "color=red"]. + # + # Example + # jinja_template("chrome_public_manifest") { + # input = "java/AndroidManifest.xml" + # output = "$target_gen_dir/AndroidManifest.xml" + # } + template("jinja_template") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.input)) + assert(defined(invoker.output)) + + action(target_name) { + forward_variables_from(invoker, + [ + "visibility", + "deps", + ]) + + sources = [ + invoker.input, + ] + script = "//build/android/gyp/jinja_template.py" + depfile = "$target_gen_dir/$target_name.d" + + outputs = [ + depfile, + invoker.output, + ] + + args = [ + "--loader-base-dir", + rebase_path("//", root_build_dir), + "--inputs", + rebase_path(invoker.input, root_build_dir), + "--output", + rebase_path(invoker.output, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + if (defined(invoker.variables)) { + variables = invoker.variables + args += [ "--variables=${variables}" ] + } + } + } + + # Declare a target for processing Android resources as Jinja templates. + # + # This takes an Android resource directory where each resource is a Jinja + # template, processes each template, then packages the results in a zip file + # which can be consumed by an android resources, library, or apk target. + # + # If this target is included in the deps of an android resources/library/apk, + # the resources will be included with that target. + # + # Variables + # resources: The list of resources files to process. + # res_dir: The resource directory containing the resources. + # variables: (Optional) A list of variables to make available to the template + # processing environment, e.g. ["name=foo", "color=red"]. + # + # Example + # jinja_template_resources("chrome_public_template_resources") { + # res_dir = "res_template" + # resources = ["res_template/xml/syncable.xml"] + # variables = ["color=red"] + # } + template("jinja_template_resources") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.resources)) + assert(defined(invoker.res_dir)) + + _base_path = "$target_gen_dir/$target_name" + _resources_zip = _base_path + ".resources.zip" + _build_config = _base_path + ".build_config" + + write_build_config("${target_name}__build_config") { + build_config = _build_config + resources_zip = _resources_zip + type = "android_resources" + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + } + + action("${target_name}__template") { + forward_variables_from(invoker, [ "deps" ]) + sources = invoker.resources + script = "//build/android/gyp/jinja_template.py" + depfile = "$target_gen_dir/$target_name.d" + + outputs = [ + depfile, + _resources_zip, + ] + + rebased_resources = rebase_path(invoker.resources, root_build_dir) + args = [ + "--inputs=${rebased_resources}", + "--inputs-base-dir", + rebase_path(invoker.res_dir, root_build_dir), + "--outputs-zip", + rebase_path(_resources_zip, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + if (defined(invoker.variables)) { + variables = invoker.variables + args += [ "--variables=${variables}" ] + } + } + + group(target_name) { + public_deps = [ + ":${target_name}__build_config", + ":${target_name}__template", + ] + } + } + + # Creates a resources.zip with locale.pak files placed into appropriate + # resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates + # a locale_paks TypedArray so that resource files can be enumerated at runtime. + # + # If this target is included in the deps of an android resources/library/apk, + # the resources will be included with that target. + # + # Variables: + # sources: List of .pak files. Names must be of the form "en.pak" or + # "en-US.pak". + # deps: (optional) List of dependencies that might be needed to generate + # the .pak files. + # + # Example + # locale_pak_resources("locale_paks") { + # sources = [ "path/en-US.pak", "path/fr.pak", ... ] + # } + template("locale_pak_resources") { + set_sources_assignment_filter([]) + assert(defined(invoker.sources)) + + _base_path = "$target_gen_dir/$target_name" + _resources_zip = _base_path + ".resources.zip" + _build_config = _base_path + ".build_config" + + write_build_config("${target_name}__build_config") { + build_config = _build_config + resources_zip = _resources_zip + type = "android_resources" + is_locale_resource = true + } + + action("${target_name}__create_resources_zip") { + forward_variables_from(invoker, + [ + "deps", + "sources", + ]) + script = "//build/android/gyp/locale_pak_resources.py" + depfile = "$target_gen_dir/$target_name.d" + + outputs = [ + depfile, + _resources_zip, + ] + + _rebased_sources = rebase_path(sources, root_build_dir) + args = [ + "--locale-paks=${_rebased_sources}", + "--resources-zip", + rebase_path(_resources_zip, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + + group(target_name) { + public_deps = [ + ":${target_name}__build_config", + ":${target_name}__create_resources_zip", + ] + } + } + + # Declare an Android resources target + # + # This creates a resources zip file that will be used when building an Android + # library or apk and included into a final apk. + # + # To include these resources in a library/apk, this target should be listed in + # the library's deps. A library/apk will also include any resources used by its + # own dependencies. + # + # Variables + # deps: Specifies the dependencies of this target. Any Android resources + # listed in deps will be included by libraries/apks that depend on this + # target. + # resource_dirs: List of directories containing resources for this target. + # generated_resource_dirs: List of directories containing resources for this + # target which are *generated* by a dependency. |generated_resource_files| + # must be specified if |generated_resource_dirs| is specified. + # generated_resource_files: List of all files in |generated_resource_dirs|. + # |generated_resource_dirs| must be specified in |generated_resource_files| + # is specified. + # android_manifest: AndroidManifest.xml for this target. Defaults to + # //build/android/AndroidManifest.xml. + # android_manifest_dep: Target that generates AndroidManifest (if applicable) + # custom_package: java package for generated .java files. + # v14_skip: If true, don't run v14 resource generator on this. Defaults to + # false. (see build/android/gyp/generate_v14_compatible_resources.py) + # shared_resources: If true make a resource package that can be loaded by a + # different application at runtime to access the package's resources. + # app_as_shared_lib: If true make a resource package that can be loaded as + # both shared_resources and normal application. + + # Example: + # android_resources("foo_resources") { + # deps = [":foo_strings_grd"] + # resource_dirs = ["res"] + # custom_package = "org.chromium.foo" + # } + # + # android_resources("foo_resources_overrides") { + # deps = [":foo_resources"] + # resource_dirs = ["res_overrides"] + # } + template("android_resources") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.resource_dirs)) + + base_path = "$target_gen_dir/$target_name" + zip_path = base_path + ".resources.zip" + srcjar_path = base_path + ".srcjar" + r_text_path = base_path + "_R.txt" + build_config = base_path + ".build_config" + + build_config_target_name = "${target_name}__build_config" + process_resources_target_name = "${target_name}__process_resources" + final_target_name = target_name + + write_build_config(build_config_target_name) { + type = "android_resources" + forward_variables_from(invoker, + [ + "android_manifest", + "custom_package", + ]) + resource_dirs = [] + if (defined(invoker.resource_dirs)) { + resource_dirs += invoker.resource_dirs + } + if (defined(invoker.generated_resource_dirs)) { + resource_dirs += invoker.generated_resource_dirs + } + + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + if (defined(invoker.android_manifest_dep)) { + deps = [ + invoker.android_manifest_dep, + ] + } + + # No package means resources override their deps. + if (defined(custom_package) || defined(android_manifest)) { + r_text = r_text_path + } else { + assert(defined(invoker.deps), + "Must specify deps when custom_package is omitted.") + } + + resources_zip = zip_path + srcjar = srcjar_path + } + + process_resources(process_resources_target_name) { + forward_variables_from(invoker, + [ + "app_as_shared_lib", + "android_manifest", + "custom_package", + "deps", + "generated_resource_dirs", + "generated_resource_files", + "resource_dirs", + "shared_resources", + "v14_skip", + ]) + if (!defined(deps)) { + deps = [] + } + deps += [ ":$build_config_target_name" ] + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } + + # Always generate R.onResourcesLoaded() method, it is required for + # compiling ResourceRewriter, there is no side effect because the + # generated R.class isn't used in final apk. + shared_resources = true + if (!defined(android_manifest)) { + android_manifest = "//build/android/AndroidManifest.xml" + } + } + + group(final_target_name) { + forward_variables_from(invoker, [ "visibility" ]) + public_deps = [ + ":${target_name}__process_resources", + ] + } + } + + # Declare an Android assets target. + # + # Defines a set of files to include as assets in a dependent apk. + # + # To include these assets in an apk, this target should be listed in + # the apk's deps, or in the deps of a library target used by an apk. + # + # Variables + # deps: Specifies the dependencies of this target. Any Android assets + # listed in deps will be included by libraries/apks that depend on this + # target. + # sources: List of files to include as assets. + # renaming_sources: List of files to include as assets and be renamed. + # renaming_destinations: List of asset paths for files in renaming_sources. + # disable_compression: Whether to disable compression for files that are + # known to be compressable (default: false). + # + # Example: + # android_assets("content_shell_assets") { + # deps = [ + # ":generates_foo", + # ":other_assets", + # ] + # sources = [ + # "//path/asset1.png", + # "//path/asset2.png", + # "$target_gen_dir/foo.dat", + # ] + # } + # + # android_assets("overriding_content_shell_assets") { + # deps = [ ":content_shell_assets" ] + # # Override foo.dat from content_shell_assets. + # sources = [ "//custom/foo.dat" ] + # renaming_sources = [ "//path/asset2.png" ] + # renaming_destinations = [ "renamed/asset2.png" ] + # } + template("android_assets") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + _build_config = "$target_gen_dir/$target_name.build_config" + _build_config_target_name = "${target_name}__build_config" + + write_build_config(_build_config_target_name) { + type = "android_assets" + build_config = _build_config + + forward_variables_from(invoker, [ "disable_compression" ]) + + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + + if (defined(invoker.sources)) { + asset_sources = invoker.sources + } + if (defined(invoker.renaming_sources)) { + assert(defined(invoker.renaming_destinations)) + _source_count = 0 + foreach(_, invoker.renaming_sources) { + _source_count += 1 + } + _dest_count = 0 + foreach(_, invoker.renaming_destinations) { + _dest_count += 1 + } + assert( + _source_count == _dest_count, + "android_assets() renaming_sources.length != renaming_destinations.length") + asset_renaming_sources = invoker.renaming_sources + asset_renaming_destinations = invoker.renaming_destinations + } + } + + group(target_name) { + forward_variables_from(invoker, + [ + "deps", + "visibility", + ]) + public_deps = [ + ":$_build_config_target_name", + ] + } + } + + # Declare a group() that supports forwarding java dependency information. + # + # Example + # java_group("conditional_deps") { + # if (enable_foo) { + # deps = [":foo_java"] + # } + # } + template("java_group") { + write_build_config("${target_name}__build_config") { + type = "group" + build_config = "$target_gen_dir/${invoker.target_name}.build_config" + + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + } + group(target_name) { + forward_variables_from(invoker, "*") + if (!defined(deps)) { + deps = [] + } + deps += [ ":${target_name}__build_config" ] + } + } + + # Declare a target that generates localized strings.xml from a .grd file. + # + # If this target is included in the deps of an android resources/library/apk, + # the strings.xml will be included with that target. + # + # Variables + # deps: Specifies the dependencies of this target. + # grd_file: Path to the .grd file to generate strings.xml from. + # outputs: Expected grit outputs (see grit rule). + # + # Example + # java_strings_grd("foo_strings_grd") { + # grd_file = "foo_strings.grd" + # } + template("java_strings_grd") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + base_path = "$target_gen_dir/$target_name" + resources_zip = base_path + ".resources.zip" + build_config = base_path + ".build_config" + + write_build_config("${target_name}__build_config") { + type = "android_resources" + } + + # Put grit files into this subdirectory of target_gen_dir. + extra_output_path = target_name + "_grit_output" + + grit_target_name = "${target_name}__grit" + grit_output_dir = "$target_gen_dir/$extra_output_path" + grit(grit_target_name) { + forward_variables_from(invoker, [ "deps" ]) + grit_flags = [ + "-E", + "ANDROID_JAVA_TAGGED_ONLY=false", + ] + output_dir = grit_output_dir + resource_ids = "" + source = invoker.grd_file + outputs = invoker.outputs + } + + # This needs to get outputs from grit's internal target, not the final + # source_set. + generate_strings_outputs = get_target_outputs(":${grit_target_name}_grit") + + zip("${target_name}__zip") { + base_dir = grit_output_dir + inputs = generate_strings_outputs + output = resources_zip + deps = [ + ":$grit_target_name", + ] + } + + group(target_name) { + public_deps = [ + ":${target_name}__build_config", + ":${target_name}__zip", + ] + } + } + + # Declare a target that packages strings.xml generated from a grd file. + # + # If this target is included in the deps of an android resources/library/apk, + # the strings.xml will be included with that target. + # + # Variables + # grit_output_dir: directory containing grit-generated files. + # generated_files: list of android resource files to package. + # + # Example + # java_strings_grd_prebuilt("foo_strings_grd") { + # grit_output_dir = "$root_gen_dir/foo/grit" + # generated_files = [ + # "values/strings.xml" + # ] + # } + template("java_strings_grd_prebuilt") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + base_path = "$target_gen_dir/$target_name" + resources_zip = base_path + ".resources.zip" + build_config = base_path + ".build_config" + + build_config_target_name = "${target_name}__build_config" + zip_target_name = "${target_name}__zip" + final_target_name = target_name + + write_build_config(build_config_target_name) { + type = "android_resources" + } + + zip(zip_target_name) { + visibility = [ ":$final_target_name" ] + + base_dir = invoker.grit_output_dir + inputs = rebase_path(invoker.generated_files, ".", base_dir) + output = resources_zip + deps = [ + ":$build_config_target_name", + ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } + + group(final_target_name) { + forward_variables_from(invoker, [ "visibility" ]) + public_deps = [ + ":$zip_target_name", + ] + } + } + + # Declare a Java executable target + # + # This target creates an executable from java code and libraries. The executable + # will be in the output folder's /bin/ directory. + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be included in the executable (and the javac classpath). + # java_files: List of .java files included in this library. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this library. + # srcjars: List of srcjars to be included in this library, together with the + # ones obtained from srcjar_deps. + # bypass_platform_checks: Disables checks about cross-platform (Java/Android) + # dependencies for this target. This will allow depending on an + # android_library target, for example. + # chromium_code: If true, extra analysis warning/errors will be enabled. + # enable_errorprone: If true, enables the errorprone compiler. + # enable_incremental_javac_override: Overrides the + # global enable_incremental_javac. + # main_class: When specified, a wrapper script is created within + # $root_build_dir/bin to launch the binary with the given class as the + # entrypoint. + # wrapper_script_args: List of additional arguments for the wrapper script. + # + # data_deps, testonly + # + # Example + # java_binary("foo") { + # java_files = [ "org/chromium/foo/FooMain.java" ] + # deps = [ ":bar_java" ] + # main_class = "org.chromium.foo.FooMain" + # } + template("java_binary") { + set_sources_assignment_filter([]) + + java_library_impl(target_name) { + forward_variables_from(invoker, "*") + supports_android = false + main_class = invoker.main_class + is_java_binary = true + } + } + + # Declare a Junit executable target + # + # This target creates an executable from java code for running as a junit test + # suite. The executable will be in the output folder's /bin/ directory. + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be included in the executable (and the javac classpath). + # + # java_files: List of .java files included in this library. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this library. + # srcjars: List of srcjars to be included in this library, together with the + # ones obtained from srcjar_deps. + # + # chromium_code: If true, extra analysis warning/errors will be enabled. + # + # Example + # junit_binary("foo") { + # java_files = [ "org/chromium/foo/FooTest.java" ] + # deps = [ ":bar_java" ] + # } + template("junit_binary") { + set_sources_assignment_filter([]) + testonly = true + + _java_binary_target_name = "${target_name}__java_binary" + _test_runner_target_name = "${target_name}__test_runner_script" + + test_runner_script(_test_runner_target_name) { + test_name = invoker.target_name + test_suite = invoker.target_name + test_type = "junit" + } + + java_binary(_java_binary_target_name) { + deps = [] + jar_name = invoker.target_name + forward_variables_from(invoker, "*") + testonly = true + bypass_platform_checks = true + main_class = "org.chromium.testing.local.JunitTestMain" + wrapper_script_name = "helper/$target_name" + deps += [ + "//testing/android/junit:junit_test_support", + "//third_party/junit", + "//third_party/mockito:mockito_java", + "//third_party/robolectric:android-all-5.0.0_r2-robolectric-1", + "//third_party/robolectric:robolectric_annotations_java", + "//third_party/robolectric:robolectric_java", + "//third_party/robolectric:robolectric_resources_java", + "//third_party/robolectric:robolectric_utils_java", + "//third_party/robolectric:shadows-core-3.0-21", + "//third_party/robolectric:shadows-multidex-3.0", + ] + } + group(target_name) { + public_deps = [ + ":$_java_binary_target_name", + ":$_test_runner_target_name", + ] + } + } + + # Declare a java library target + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be added to the javac classpath. + # + # java_files: List of .java files included in this library. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this library. + # srcjars: List of srcjars to be included in this library, together with the + # ones obtained from srcjar_deps. + # + # input_jars_paths: A list of paths to the jars that should be included + # in the classpath. These are in addition to library .jars that + # appear in deps. + # + # chromium_code: If true, extra analysis warning/errors will be enabled. + # enable_errorprone: If true, enables the errorprone compiler. + # enable_incremental_javac_override: Overrides the global + # enable_incremental_javac. + # + # jar_excluded_patterns: List of patterns of .class files to exclude from the + # final jar. + # + # proguard_preprocess: If true, proguard preprocessing will be run. This can + # be used to remove unwanted parts of the library. + # proguard_config: Path to the proguard config for preprocessing. + # + # supports_android: If true, Android targets (android_library, android_apk) + # may depend on this target. Note: if true, this target must only use the + # subset of Java available on Android. + # bypass_platform_checks: Disables checks about cross-platform (Java/Android) + # dependencies for this target. This will allow depending on an + # android_library target, for example. + # + # additional_jar_files: Use to package additional files into the output jar. + # Pass a list of length-2 lists with format + # [ [ path_to_file, path_to_put_in_jar ] ] + # + # + # data_deps, testonly + # + # Example + # java_library("foo_java") { + # java_files = [ + # "org/chromium/foo/Foo.java", + # "org/chromium/foo/FooInterface.java", + # "org/chromium/foo/FooService.java", + # ] + # deps = [ + # ":bar_java" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "*/FooService\$*.class" + # ] + # } + template("java_library") { + set_sources_assignment_filter([]) + java_library_impl(target_name) { + forward_variables_from(invoker, "*") + } + } + + # Declare a java library target for a prebuilt jar + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be added to the javac classpath. + # jar_path: Path to the prebuilt jar. + # jar_dep: Target that builds jar_path (optional). + # proguard_preprocess: If true, proguard preprocessing will be run. This can + # be used to remove unwanted parts of the library. + # proguard_config: Path to the proguard config for preprocessing. + # supports_android: If true, Android targets (android_library, android_apk) + # may depend on this target. Note: if true, this target must only use the + # subset of Java available on Android. + # + # Example + # java_prebuilt("foo_java") { + # jar_path = "foo.jar" + # deps = [ + # ":foo_resources", + # ":bar_java" + # ] + # } + template("java_prebuilt") { + set_sources_assignment_filter([]) + java_prebuilt_impl(target_name) { + forward_variables_from(invoker, "*") + } + } + + # Declare an Android library target + # + # This target creates an Android library containing java code and Android + # resources. + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be added to the javac classpath. Android resources in dependencies + # will be used when building this library. + # + # java_files: List of .java files included in this library. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this library. + # srcjars: List of srcjars to be included in this library, together with the + # ones obtained from srcjar_deps. + # + # input_jars_paths: A list of paths to the jars that should be included + # in the classpath. These are in addition to library .jars that + # appear in deps. + # + # chromium_code: If true, extra analysis warning/errors will be enabled. + # enable_errorprone: If true, enables the errorprone compiler. + # enable_incremental_javac_override: Overrides the global + # enable_incremental_javac. + # + # jar_excluded_patterns: List of patterns of .class files to exclude from the + # final jar. + # + # proguard_preprocess: If true, proguard preprocessing will be run. This can + # be used to remove unwanted parts of the library. + # proguard_config: Path to the proguard config for preprocessing. + # + # dex_path: If set, the resulting .dex.jar file will be placed under this + # path. + # + # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file + # replaces the default android_sdk_ijar. + # + # alternative_android_sdk_ijar_dep: the target that generates + # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar + # is used. + # + # emma_never_instrument: Disables EMMA Java code coverage for this target. + # + # Example + # android_library("foo_java") { + # java_files = [ + # "android/org/chromium/foo/Foo.java", + # "android/org/chromium/foo/FooInterface.java", + # "android/org/chromium/foo/FooService.java", + # ] + # deps = [ + # ":bar_java" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "*/FooService\$*.class" + # ] + # } + template("android_library") { + set_sources_assignment_filter([]) + assert(!defined(invoker.jar_path), + "android_library does not support a custom jar path") + + if (defined(invoker.alternative_android_sdk_ijar)) { + assert(defined(invoker.alternative_android_sdk_ijar_dep)) + } + + java_library_impl(target_name) { + forward_variables_from(invoker, "*") + + supports_android = true + requires_android = true + + if (!defined(jar_excluded_patterns)) { + jar_excluded_patterns = [] + } + jar_excluded_patterns += [ + "*/R.class", + "*/R\$*.class", + "*/Manifest.class", + "*/Manifest\$*.class", + ] + } + } + + # Declare a target that packages a set of Java dependencies into a standalone + # .dex.jar. + # + # Variables + # deps: specifies the dependencies of this target. Android libraries in deps + # will be packaged into the resulting .dex.jar file. + # dex_path: location at which the output file will be put + template("android_standalone_library") { + set_sources_assignment_filter([]) + deps_dex(target_name) { + forward_variables_from(invoker, + [ + "deps", + "dex_path", + "excluded_jars", + ]) + } + } + + # Declare an Android library target for a prebuilt jar + # + # This target creates an Android library containing java code and Android + # resources. + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be added to the javac classpath. Android resources in dependencies + # will be used when building this library. + # jar_path: Path to the prebuilt jar. + # proguard_preprocess: If true, proguard preprocessing will be run. This can + # be used to remove unwanted parts of the library. + # proguard_config: Path to the proguard config for preprocessing. + # + # Example + # android_java_prebuilt("foo_java") { + # jar_path = "foo.jar" + # deps = [ + # ":foo_resources", + # ":bar_java" + # ] + # } + template("android_java_prebuilt") { + set_sources_assignment_filter([]) + java_prebuilt_impl(target_name) { + forward_variables_from(invoker, "*") + supports_android = true + requires_android = true + strip_resource_classes = true + } + } + + # Declare an Android apk target + # + # This target creates an Android APK containing java code, resources, assets, + # and (possibly) native libraries. + # + # Variables + # alternative_android_sdk_jar: The alternative android sdk jar used in + # proguard. + # android_aapt_path: Android aapt tool to replace default one to build + # resource. + # android_manifest: Path to AndroidManifest.xml. + # android_manifest_dep: Target that generates AndroidManifest (if applicable) + # chromium_code: If true, extra analysis warning/errors will be enabled. + # create_dist_ijar: Whether to define the "${target_name}_dist_ijar" target + # (used by instrumentation_test_apk). + # data_deps: List of dependencies needed at runtime. These will be built but + # won't change the generated .apk in any way (in fact they may be built + # after the .apk is). + # deps: List of dependencies. All Android java resources and libraries in the + # "transitive closure" of these dependencies will be included in the apk. + # Note: this "transitive closure" actually only includes such targets if + # they are depended on through android_library or android_resources targets + # (and so not through builtin targets like 'action', 'group', etc). + # install_script_name: Name of wrapper script (default=target_name). + # java_files: List of .java files to include in the apk. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this apk. + # apk_name: Name for final apk. + # final_apk_path: Path to final built apk. Default is + # $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name. + # loadable_modules: List of paths to native libraries to include. Different + # from |shared_libraries| in that: + # * dependencies of this .so are not automatically included + # * ".cr.so" is never added + # * they are not side-loaded for _incremental targets. + # * load_library_from_apk, use_chromium_linker, + # and enable_relocation_packing do not apply + # Use this instead of shared_libraries when you are going to load the library + # conditionally, and only when shared_libraries doesn't work for you. + # shared_libraries: List shared_library targets to bundle. If these + # libraries depend on other shared_library targets, those dependencies will + # also be included in the apk (e.g. for is_component_build). + # native_lib_placeholders: List of placeholder filenames to add to the apk + # (optional). + # apk_under_test: For an instrumentation test apk, this is the target of the + # tested apk. + # include_all_resources - If true include all resource IDs in all generated + # R.java files. + # testonly: Marks this target as "test-only". + # write_asset_list: Adds an extra file to the assets, which contains a list of + # all other asset files. + # alternative_locale_resource_dep: The locale resource target which overrides + # any exsting locale resources in dep graph. + # requires_sdk_api_level_23: If defined and true, the apk is intended for + # installation only on Android M or later. In these releases the system + # linker does relocation unpacking, so we can enable it unconditionally. + # secondary_native_libs: The path of native libraries for secondary app abi. + # run_findbugs_override: Forces run_findbugs on or off. If undefined, the + # default will use the build arg run_findbugs. + # proguard_jar_path: The path to proguard.jar you wish to use. If undefined, + # the proguard used will be the checked in one in //third_party/proguard. + # + # Example + # android_apk("foo_apk") { + # android_manifest = "AndroidManifest.xml" + # java_files = [ + # "android/org/chromium/foo/FooApplication.java", + # "android/org/chromium/foo/FooActivity.java", + # ] + # deps = [ + # ":foo_support_java" + # ":foo_resources" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # shared_libraries = [ + # ":my_shared_lib", + # ] + # } + template("android_apk") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + assert(defined(invoker.final_apk_path) || defined(invoker.apk_name)) + assert(defined(invoker.android_manifest)) + gen_dir = "$target_gen_dir/$target_name" + base_path = "$gen_dir/$target_name" + _build_config = "$target_gen_dir/$target_name.build_config" + resources_zip_path = "$base_path.resources.zip" + _all_resources_zip_path = "$base_path.resources.all.zip" + _jar_path = "$base_path.jar" + _lib_dex_path = "$base_path.dex.jar" + _rebased_lib_dex_path = rebase_path(_lib_dex_path, root_build_dir) + _template_name = target_name + + enable_multidex = + defined(invoker.enable_multidex) && invoker.enable_multidex + if (enable_multidex) { + final_dex_path = "$gen_dir/classes.dex.zip" + } else { + final_dex_path = "$gen_dir/classes.dex" + } + final_dex_target_name = "${_template_name}__final_dex" + + _final_apk_path = "" + if (defined(invoker.final_apk_path)) { + _final_apk_path = invoker.final_apk_path + } else if (defined(invoker.apk_name)) { + _final_apk_path = "$root_build_dir/apks/" + invoker.apk_name + ".apk" + } + _final_apk_path_no_ext_list = + process_file_template([ _final_apk_path ], + "{{source_dir}}/{{source_name_part}}") + _final_apk_path_no_ext = _final_apk_path_no_ext_list[0] + assert(_final_apk_path_no_ext != "") # Mark as used. + + _install_script_name = "install_$_template_name" + if (defined(invoker.install_script_name)) { + _install_script_name = invoker.install_script_name + } + _incremental_install_script_path = + "${root_out_dir}/bin/${_install_script_name}_incremental" + + _version_code = android_default_version_code + if (defined(invoker.version_code)) { + _version_code = invoker.version_code + } + + _version_name = android_default_version_name + if (defined(invoker.version_name)) { + _version_name = invoker.version_name + } + _keystore_path = android_keystore_path + _keystore_name = android_keystore_name + _keystore_password = android_keystore_password + + if (defined(invoker.keystore_path)) { + _keystore_path = invoker.keystore_path + _keystore_name = invoker.keystore_name + _keystore_password = invoker.keystore_password + } + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps += invoker.srcjar_deps + } + + _use_chromium_linker = + defined(invoker.use_chromium_linker) && invoker.use_chromium_linker + _enable_relocation_packing = defined(invoker.enable_relocation_packing) && + invoker.enable_relocation_packing + _load_library_from_apk = + defined(invoker.load_library_from_apk) && invoker.load_library_from_apk + _requires_sdk_api_level_23 = defined(invoker.requires_sdk_api_level_23) && + invoker.requires_sdk_api_level_23 + + assert(_use_chromium_linker || true) # Mark as used. + assert(_requires_sdk_api_level_23 || true) + if (_enable_relocation_packing) { + assert(_use_chromium_linker || _requires_sdk_api_level_23, + "enable_relocation_packing requires either use_chromium_linker " + + "or requires_sdk_api_level_23") + } + if (_load_library_from_apk) { + assert(_use_chromium_linker || _requires_sdk_api_level_23, + "load_library_from_apk requires use_chromium_linker " + + "or requires_sdk_api_level_23") + } + + # The dependency that makes the chromium linker, if any is needed. + _native_libs_deps = [] + + if (defined(invoker.shared_libraries) && invoker.shared_libraries != []) { + _native_libs_deps += invoker.shared_libraries + + if (is_component_build || is_asan) { + _native_libs_deps += [ "//build/android:cpplib_stripped" ] + } + + # To determine the filenames of all dependent shared libraries, write the + # runtime deps of |shared_libraries| to a file during "gn gen". + # write_build_config.py will then grep this file for *.so to obtain the + # complete list. + _runtime_deps_file = + "$target_gen_dir/${_template_name}.native.runtimedeps" + group("${_template_name}__runtime_deps") { + deps = _native_libs_deps + write_runtime_deps = _runtime_deps_file + } + + _native_lib_version_rule = "" + if (defined(invoker.native_lib_version_rule)) { + _native_lib_version_rule = invoker.native_lib_version_rule + } + _native_lib_version_arg = "\"\"" + if (defined(invoker.native_lib_version_arg)) { + _native_lib_version_arg = invoker.native_lib_version_arg + } + } + + if (defined(invoker.deps)) { + set_sources_assignment_filter([ "*manifest*" ]) + sources = invoker.deps + set_sources_assignment_filter([]) + if (sources != invoker.deps) { + _bad_deps = invoker.deps - sources + assert( + false, + "Possible manifest-generating dep found in deps. Use android_manifest_dep for this instead. Found: $_bad_deps") + } + sources = [] + } + _android_manifest_deps = [] + if (defined(invoker.android_manifest_dep)) { + _android_manifest_deps = [ invoker.android_manifest_dep ] + } + _android_manifest = invoker.android_manifest + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _create_abi_split = + defined(invoker.create_abi_split) && invoker.create_abi_split + _create_density_splits = + defined(invoker.create_density_splits) && invoker.create_density_splits + _create_language_splits = + defined(invoker.language_splits) && invoker.language_splits != [] + + # Help GN understand that _create_abi_split is not unused (bug in GN). + assert(_create_abi_split || true) + + _proguard_enabled = + defined(invoker.proguard_enabled) && invoker.proguard_enabled + if (_proguard_enabled) { + _proguard_output_jar_path = "$base_path.proguard.jar" + } + + _emma_never_instrument = defined(invoker.testonly) && invoker.testonly + + build_config_target = "${_template_name}__build_config" + write_build_config(build_config_target) { + forward_variables_from(invoker, [ "apk_under_test" ]) + type = "android_apk" + jar_path = _jar_path + dex_path = final_dex_path + apk_path = _final_apk_path + incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk" + incremental_install_script_path = _incremental_install_script_path + resources_zip = resources_zip_path + build_config = _build_config + android_manifest = _android_manifest + + deps = _android_manifest_deps + + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + + if (defined(invoker.alternative_locale_resource_dep)) { + possible_config_deps += [ invoker.alternative_locale_resource_dep ] + has_alternative_locale_resource = true + } + + # Added emma to the target's classpath via its .build_config. + if (emma_coverage && !_emma_never_instrument) { + possible_config_deps += [ "//third_party/android_tools:emma_device" ] + } + + proguard_enabled = _proguard_enabled + if (_proguard_enabled) { + proguard_info = "$_proguard_output_jar_path.info" + } + + # Don't depend on the runtime_deps target in order to avoid having to + # build the native libraries just to create the .build_config file. + # The dep is unnecessary since the runtime_deps file is created by gn gen + # and the runtime_deps file is added to write_build_config.py's depfile. + if (_native_libs_deps != []) { + shared_libraries_runtime_deps_file = _runtime_deps_file + } + } + + _final_deps = [] + + _generated_proguard_config = "$base_path.resources.proguard.txt" + process_resources_target = "${_template_name}__process_resources" + process_resources(process_resources_target) { + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "android_aapt_path", + "app_as_shared_lib", + "include_all_resources", + "shared_resources", + ]) + srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + r_text_path = "${target_gen_dir}/${target_name}_R.txt" + android_manifest = _android_manifest + resource_dirs = [ "//build/android/ant/empty/res" ] + zip_path = resources_zip_path + all_resources_zip_path = _all_resources_zip_path + generate_constant_ids = true + proguard_file = _generated_proguard_config + + build_config = _build_config + deps = _android_manifest_deps + [ ":$build_config_target" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + if (defined(invoker.alternative_locale_resource_dep)) { + deps += [ invoker.alternative_locale_resource_dep ] + } + } + _srcjar_deps += [ ":$process_resources_target" ] + + if (_native_libs_deps != []) { + _enable_chromium_linker_tests = false + if (defined(invoker.enable_chromium_linker_tests)) { + _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests + } + _ordered_libraries_json = + "$target_gen_dir/$target_name.ordered_libararies.json" + _rebased_ordered_libraries_json = + rebase_path(_ordered_libraries_json, root_build_dir) + _ordered_libraries_target = "${_template_name}__write_ordered_libraries" + + # TODO(agrieve): Make GN write runtime deps in dependency order so as to + # not need this manual sorting step. + action(_ordered_libraries_target) { + script = "//build/android/gyp/write_ordered_libraries.py" + deps = _native_libs_deps + [ ":$build_config_target" ] + outputs = [ + _ordered_libraries_json, + ] + _rebased_android_readelf = rebase_path(android_readelf, root_build_dir) + args = [ + "--readelf=$_rebased_android_readelf", + "--output=$_rebased_ordered_libraries_json", + "--libraries-dir=.", + "--input-libraries=@FileArg($_rebased_build_config:native:libraries)", + ] + } + + java_cpp_template("${_template_name}__native_libraries_java") { + package_name = "org/chromium/base/library_loader" + sources = [ + "//base/android/java/templates/NativeLibraries.template", + ] + inputs = [ + _ordered_libraries_json, + ] + deps = [ + ":${_ordered_libraries_target}", + ] + if (_native_lib_version_rule != "") { + deps += [ _native_lib_version_rule ] + } + + defines = [ + "NATIVE_LIBRARIES_LIST=" + + "@FileArg($_rebased_ordered_libraries_json:java_libraries_list)", + "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg", + ] + if (_use_chromium_linker) { + defines += [ "ENABLE_CHROMIUM_LINKER" ] + } + if (_load_library_from_apk) { + defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ] + } + if (_enable_chromium_linker_tests) { + defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ] + } + } + _srcjar_deps += [ ":${_template_name}__native_libraries_java" ] + } + + if (!defined(invoker.apk_under_test)) { + java_cpp_template("${_template_name}__build_config_java") { + package_name = "org/chromium/base" + sources = [ + "//base/android/java/templates/BuildConfig.template", + ] + + defines = [] + if (enable_multidex) { + defines += [ "ENABLE_MULTIDEX" ] + } + if (is_java_debug || dcheck_always_on) { + defines += [ "_DCHECK_IS_ON" ] + } + } + _srcjar_deps += [ ":${_template_name}__build_config_java" ] + } + + java_target = "${_template_name}__java" + java_library_impl(java_target) { + forward_variables_from(invoker, + [ + "chromium_code", + "java_files", + "run_findbugs_override", + ]) + supports_android = true + requires_android = true + override_build_config = _build_config + deps = _android_manifest_deps + [ ":$build_config_target" ] + + android_manifest = _android_manifest + srcjar_deps = _srcjar_deps + jar_path = _jar_path + dex_path = _lib_dex_path + emma_never_instrument = _emma_never_instrument + + if (defined(invoker.deps)) { + deps += invoker.deps + } + if (defined(invoker.apk_under_test)) { + deps += [ "${invoker.apk_under_test}__java" ] + } + if (emma_coverage && !_emma_never_instrument) { + deps += [ "//third_party/android_tools:emma_device" ] + } + } + + # TODO(cjhopman): This is only ever needed to calculate the list of tests to + # run. See build/android/pylib/instrumentation/test_jar.py. We should be + # able to just do that calculation at build time instead. + if (defined(invoker.create_dist_ijar) && invoker.create_dist_ijar) { + _dist_ijar_path = "$root_build_dir/test.lib.java/" + + get_path_info(_final_apk_path, "name") + ".jar" + action("${_template_name}_dist_ijar") { + script = "//build/android/gyp/create_dist_jar.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [ + _build_config, + ] + outputs = [ + depfile, + "${_dist_ijar_path}", + ] + data = [ + _dist_ijar_path, + ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path("${_dist_ijar_path}", root_build_dir), + "--inputs=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)", + ] + deps = [ + ":$build_config_target", # Generates the build config file. + ":$java_target", # Generates the jar file. + ] + } + } + + if (_proguard_enabled) { + _proguard_configs = [ _generated_proguard_config ] + if (defined(invoker.proguard_configs)) { + _proguard_configs += invoker.proguard_configs + } + assert(_proguard_configs != []) # Mark as used. + _proguard_target = "${_template_name}__proguard" + proguard(_proguard_target) { + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "proguard_jar_path", + ]) + deps = [ + ":$build_config_target", + ":$java_target", + ":$process_resources_target", + ] + inputs = [ + _build_config, + _jar_path, + ] + _proguard_configs + + output_jar_path = _proguard_output_jar_path + _rebased_proguard_configs = + rebase_path(_proguard_configs, root_build_dir) + args = [ + "--proguard-configs=$_rebased_proguard_configs", + "--input-paths=@FileArg($_rebased_build_config:proguard:input_paths)", + "--classpath=@FileArg($_rebased_build_config:proguard:lib_paths)", + ] + if (defined(invoker.apk_under_test)) { + deps += [ + "${invoker.apk_under_test}__build_config", + "${invoker.apk_under_test}__proguard", + ] + _apk_under_test_build_config = + get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + + get_label_info(invoker.apk_under_test, "name") + ".build_config" + _rebased_apk_under_test_build_config = + rebase_path(_apk_under_test_build_config, root_build_dir) + args += [ "--tested-apk-info=@FileArg($_rebased_apk_under_test_build_config:deps_info:proguard_info)" ] + } + } + _dex_sources = [ _proguard_output_jar_path ] + _dex_deps = [ ":$_proguard_target" ] + + _copy_proguard_mapping_target = "${_template_name}__copy_proguard_mapping" + copy(_copy_proguard_mapping_target) { + sources = [ + "$_proguard_output_jar_path.mapping", + ] + outputs = [ + "$_final_apk_path.mapping", + ] + deps = [ + ":$_proguard_target", + ] + } + } else { + if (enable_multidex) { + _dex_sources = [ _jar_path ] + } else { + _dex_sources = [ _lib_dex_path ] + } + _dex_deps = [ ":$java_target" ] + } + + dex("$final_dex_target_name") { + deps = _dex_deps + [ ":$build_config_target" ] + inputs = [ + _build_config, + ] + sources = _dex_sources + output = final_dex_path + + # All deps are already included in _dex_sources when proguard is used. + if (!_proguard_enabled) { + if (enable_multidex) { + _dex_arg_key = "${_rebased_build_config}:dist_jar:dependency_jars" + } else { + _dex_arg_key = + "${_rebased_build_config}:final_dex:dependency_dex_files" + } + args = [ "--inputs=@FileArg($_dex_arg_key)" ] + } + } + + _native_libs_file_arg_dep = ":$build_config_target" + _native_libs_file_arg = "@FileArg($_rebased_build_config:native:libraries)" + + if (_native_libs_deps != [] && _enable_relocation_packing) { + _prepare_native_target_name = "${_template_name}__prepare_native" + _native_libs_dir = "$gen_dir/packed-libs" + _native_libs_json = "$gen_dir/packed-libs/filelist.json" + _rebased_native_libs_json = rebase_path(_native_libs_json, root_build_dir) + + _native_libs_file_arg_dep = ":$_prepare_native_target_name" + _native_libs_file_arg = "@FileArg($_rebased_native_libs_json:files)" + + action(_prepare_native_target_name) { + forward_variables_from(invoker, + [ + "deps", + "public_deps", + ]) + script = "//build/android/gyp/pack_relocations.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + _native_libs_json, + ] + + inputs = [ + _build_config, + ] + + deps += _native_libs_deps + deps += [ + ":$build_config_target", + relocation_packer_target, + ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--enable-packing=1", + "--android-pack-relocations", + rebase_path(relocation_packer_exe, root_build_dir), + "--stripped-libraries-dir", + rebase_path(root_build_dir, root_build_dir), + "--packed-libraries-dir", + rebase_path(_native_libs_dir, root_build_dir), + "--libraries=@FileArg(${_rebased_build_config}:native:libraries)", + "--filelistjson=$_rebased_native_libs_json", + ] + } + } + + _extra_native_libs = [] + _extra_native_libs_deps = [] + _extra_native_libs_even_when_incremental = [] + _extra_native_libs_even_when_incremental_deps = [] + assert(_extra_native_libs_even_when_incremental_deps == []) # Mark as used. + if (_native_libs_deps != []) { + # zipalign can't align gdb_server, don't pack gdbserver temporarily. + if (is_debug && (!defined(invoker.page_align_shared_libraries) || + !invoker.page_align_shared_libraries)) { + _extra_native_libs_even_when_incremental = [ android_gdbserver ] + } + + if (_use_chromium_linker) { + _extra_native_libs = + [ "$root_shlib_dir/libchromium_android_linker$shlib_extension" ] + _extra_native_libs_deps += + [ "//base/android/linker:chromium_android_linker" ] + } + } + if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) { + _extra_native_libs_even_when_incremental += invoker.loadable_modules + } + + _final_deps += [ ":${_template_name}__create" ] + create_apk("${_template_name}__create") { + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "android_aapt_path", + "app_as_shared_lib", + "deps", + "extensions_to_not_compress", + "language_splits", + "page_align_shared_libraries", + "public_deps", + "secondary_native_libs", + "shared_resources", + "uncompress_shared_libraries", + "write_asset_list", + ]) + if (!defined(deps)) { + deps = [] + } + apk_path = _final_apk_path + android_manifest = _android_manifest + assets_build_config = _build_config + resources_zip = _all_resources_zip_path + dex_path = final_dex_path + load_library_from_apk = _load_library_from_apk + create_density_splits = _create_density_splits + emma_instrument = emma_coverage && !_emma_never_instrument + + if (!defined(extensions_to_not_compress)) { + # Allow icu data, v8 snapshots, and pak files to be loaded directly from + # the .apk. + # Note: These are actually suffix matches, not necessarily extensions. + extensions_to_not_compress = ".dat,.bin,.pak" + } + + version_code = _version_code + version_name = _version_name + + keystore_name = _keystore_name + keystore_path = _keystore_path + keystore_password = _keystore_password + + # Incremental apk does not use native libs nor final dex. + incremental_deps = deps + _android_manifest_deps + [ + ":$build_config_target", + ":$process_resources_target", + ] + + # This target generates the input file _all_resources_zip_path. + deps += _android_manifest_deps + [ + ":$build_config_target", + ":$process_resources_target", + ":$final_dex_target_name", + ] + + if ((_native_libs_deps != [] || + _extra_native_libs_even_when_incremental != []) && + !_create_abi_split) { + deps += _native_libs_deps + _extra_native_libs_deps + + _extra_native_libs_even_when_incremental_deps + + [ _native_libs_file_arg_dep ] + native_libs_filearg = _native_libs_file_arg + native_libs = _extra_native_libs + native_libs_even_when_incremental = + _extra_native_libs_even_when_incremental + } + + # Placeholders necessary for some older devices. + # http://crbug.com/395038 + forward_variables_from(invoker, [ "native_lib_placeholders" ]) + } + + if ((_native_libs_deps != [] || + _extra_native_libs_even_when_incremental != []) && _create_abi_split) { + _manifest_rule = + "${_template_name}__split_manifest_abi_${android_app_abi}" + generate_split_manifest(_manifest_rule) { + main_manifest = _android_manifest + out_manifest = + "$gen_dir/split-manifests/${android_app_abi}/AndroidManifest.xml" + split_name = "abi_${android_app_abi}" + deps = _android_manifest_deps + } + + _apk_rule = "${_template_name}__split_apk_abi_${android_app_abi}" + _final_deps += [ ":$_apk_rule" ] + + create_apk(_apk_rule) { + apk_path = "${_final_apk_path_no_ext}-abi-${android_app_abi}.apk" + base_path = "$gen_dir/$_apk_rule" + + manifest_outputs = get_target_outputs(":${_manifest_rule}") + android_manifest = manifest_outputs[1] + load_library_from_apk = _load_library_from_apk + + version_code = _version_code + version_name = _version_name + + keystore_name = _keystore_name + keystore_path = _keystore_path + keystore_password = _keystore_password + + # Placeholders necessary for some older devices. + # http://crbug.com/395038 + deps = [] + forward_variables_from(invoker, + [ + "alternative_android_sdk_jar", + "android_aapt_path", + "deps", + "native_lib_placeholders", + "public_deps", + ]) + + incremental_deps = + deps + _extra_native_libs_even_when_incremental_deps + + [ ":$_manifest_rule" ] + deps = [] + deps = incremental_deps + _native_libs_deps + _extra_native_libs_deps + + [ _native_libs_file_arg_dep ] + native_libs_filearg = _native_libs_file_arg + native_libs = _extra_native_libs + native_libs_even_when_incremental = + _extra_native_libs_even_when_incremental + } + } + + _create_incremental_script_rule_name = + "${_template_name}__incremental_script" + action(_create_incremental_script_rule_name) { + script = "//build/android/incremental_install/create_install_script.py" + depfile = "$target_gen_dir/$target_name.d" + deps = [ + _native_libs_file_arg_dep, + ] + + outputs = [ + depfile, + _incremental_install_script_path, + ] + + _rebased_apk_path_no_ext = + rebase_path(_final_apk_path_no_ext, root_build_dir) + _rebased_incremental_install_script_path = + rebase_path(_incremental_install_script_path, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + _dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files" + args = [ + "--apk-path=${_rebased_apk_path_no_ext}_incremental.apk", + "--script-output-path=$_rebased_incremental_install_script_path", + "--dex-file=$_rebased_lib_dex_path", + "--dex-file-list=@FileArg($_dex_arg_key)", + "--depfile=$_rebased_depfile", + ] + if (_proguard_enabled) { + args += [ "--show-proguard-warning" ] + } + if (defined(_native_libs_file_arg)) { + args += [ "--native-libs=$_native_libs_file_arg" ] + } + if (_extra_native_libs != []) { + # Don't pass in _extra_native_libs_even_when_incremental, since these are + # end up in the apk and are not side-loaded. + _rebased_extra_native_libs = + rebase_path(_extra_native_libs, root_build_dir) + args += [ "--native-libs=$_rebased_extra_native_libs" ] + } + if (_create_density_splits) { + args += [ "--split=${_rebased_apk_path_no_ext}-density-*.apk" ] + } + if (_create_language_splits) { + args += [ "--split=${_rebased_apk_path_no_ext}-language-*.apk" ] + } + if (_load_library_from_apk) { + args += [ "--dont-even-try=Incremental builds do not work with load_library_from_apk. Try setting is_component_build=true in your GN args." ] + } + } + + group(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + ]) + public_deps = _final_deps + + # Make the proguard .mapping file easy to find by putting it beside the .apk. + if (_proguard_enabled) { + deps = [ + ":$_copy_proguard_mapping_target", + ] + } + } + group("${target_name}_incremental") { + forward_variables_from(invoker, + [ + "data", + "data_deps", + ]) + if (!defined(data_deps)) { + data_deps = [] + } + + # device/commands is used by the installer script to push files via .zip. + data_deps += [ "//build/android/pylib/device/commands" ] + + _native_libs_deps + _extra_native_libs_deps + + # Since the _incremental.apk does not include use .so nor .dex from the + # actual target, but instead loads them at runtime, we need to explicitly + # depend on them here. + public_deps = [ + ":${_create_incremental_script_rule_name}", + ":${_template_name}__create_incremental", + ":${java_target}", + ] + } + } + + # Declare an Android instrumentation test apk + # + # This target creates an Android instrumentation test apk. + # + # Variables + # android_manifest: Path to AndroidManifest.xml. + # data_deps: List of dependencies needed at runtime. These will be built but + # won't change the generated .apk in any way (in fact they may be built + # after the .apk is). + # deps: List of dependencies. All Android java resources and libraries in the + # "transitive closure" of these dependencies will be included in the apk. + # Note: this "transitive closure" actually only includes such targets if + # they are depended on through android_library or android_resources targets + # (and so not through builtin targets like 'action', 'group', etc). + # java_files: List of .java files to include in the apk. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to java_files and be included in this apk. + # apk_name: Name for final apk. + # final_apk_path: Path to final built apk. Default is + # $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name. + # shared_libraries: List shared_library targets to bundle. If these + # libraries depend on other shared_library targets, those dependencies will + # also be included in the apk (e.g. for is_component_build). + # apk_under_test: The apk being tested. + # + # Example + # instrumentation_test_apk("foo_test_apk") { + # android_manifest = "AndroidManifest.xml" + # apk_name = "FooTest" + # apk_under_test = "Foo" + # java_files = [ + # "android/org/chromium/foo/FooTestCase.java", + # "android/org/chromium/foo/FooExampleTest.java", + # ] + # deps = [ + # ":foo_test_support_java" + # ] + # } + template("instrumentation_test_apk") { + testonly = true + _apk_target_name = "${target_name}__apk" + _gen_isolate_target_name = "${target_name}__isolate" + _test_runner_target_name = "${target_name}__test_runner_script" + _install_script_name = "install_$target_name" + + _target_dir_name = get_label_info(":$target_name", "dir") + _device_isolate_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.device.isolate" + device_isolate(_gen_isolate_target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "deps", + "public_deps", + ]) + output = _device_isolate_path + } + + test_runner_script(_test_runner_target_name) { + forward_variables_from(invoker, + [ + "additional_apks", + "apk_under_test", + ]) + test_name = invoker.target_name + test_type = "instrumentation" + apk_target = ":$_apk_target_name" + isolate_file = _device_isolate_path + deps = [ + ":$_gen_isolate_target_name", + ] + } + + test_runner_script("${_test_runner_target_name}_incremental") { + forward_variables_from(invoker, + [ + "additional_apks", + "apk_under_test", + ]) + test_name = "${invoker.target_name}_incremental" + test_type = "instrumentation" + apk_target = ":$_apk_target_name" + incremental_install = true + isolate_file = _device_isolate_path + deps = [ + ":$_gen_isolate_target_name", + ] + } + + android_apk(_apk_target_name) { + deps = [] + data_deps = [] + forward_variables_from(invoker, "*") + install_script_name = _install_script_name + deps += [ "//testing/android/broker:broker_java" ] + data_deps += [ + "//testing/android/driver:driver_apk", + "//tools/android/forwarder2", + "//tools/android/md5sum", + ] + if (defined(invoker.additional_apks)) { + data_deps += invoker.additional_apks + } + + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) { + # When ProGuard is on, we use ProGuard to combine the under test java + # code and the test java code. This is to allow us to apply all ProGuard + # optimizations that we ship with, but not have them break tests. The + # apk under test will still have the same resources, assets, and + # manifest, all of which are the ones used in the tests. + proguard_configs = [ "//testing/android/proguard_for_test.flags" ] + } + + create_dist_ijar = true + if (defined(invoker.run_findbugs_override)) { + # Only allow findbugs when there are java files. + run_findbugs_override = + invoker.run_findbugs_override && defined(invoker.java_files) + } + } + + group(target_name) { + public_deps = [ + ":$_apk_target_name", + ":$_test_runner_target_name", + + # Required by test runner to enumerate test list. + ":${_apk_target_name}_dist_ijar", + ] + if (defined(invoker.apk_under_test)) { + public_deps += [ invoker.apk_under_test ] + } + } + + # TODO: Delete once recipes no longer use this target. + group("${target_name}_run") { + public_deps = [ + ":${invoker.target_name}", + ] + } + group("${target_name}_incremental") { + public_deps = [ + ":${_apk_target_name}_dist_ijar", + ":${_apk_target_name}_incremental", + ":${_test_runner_target_name}_incremental", + ] + if (defined(invoker.apk_under_test)) { + public_deps += [ "${invoker.apk_under_test}_incremental" ] + } + } + } + + # Declare an Android gtest apk + # + # This target creates an Android apk for running gtest-based unittests. + # + # Variables + # deps: Specifies the dependencies of this target. These will be passed to + # the underlying android_apk invocation and should include the java and + # resource dependencies of the apk. + # shared_library: shared_library target that contains the unit tests. + # apk_name: The name of the produced apk. If unspecified, it uses the name + # of the shared_library target suffixed with "_apk" + # use_default_launcher: Whether the default activity (NativeUnitTestActivity) + # should be used for launching tests. + # use_native_activity: Test implements ANativeActivity_onCreate(). + # + # Example + # unittest_apk("foo_unittests_apk") { + # deps = [ ":foo_java", ":foo_resources" ] + # shared_library = ":foo_unittests" + # } + template("unittest_apk") { + _use_native_activity = + defined(invoker.use_native_activity) && invoker.use_native_activity + _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml" + assert(invoker.shared_library != "") + + # This trivial assert is needed in case android_manifest is defined, + # as otherwise _use_native_activity and _android_manifest would not be used. + assert(_use_native_activity != "" && _android_manifest != "") + + if (!defined(invoker.android_manifest)) { + jinja_template("${target_name}_manifest") { + _native_library_name = get_label_info(invoker.shared_library, "name") + input = "//testing/android/native_test/java/AndroidManifest.xml.jinja2" + output = _android_manifest + variables = [ + "is_component_build=${is_component_build}", + "native_library_name=${_native_library_name}", + "use_native_activity=${_use_native_activity}", + ] + } + } + + android_apk(target_name) { + set_sources_assignment_filter([]) + data_deps = [] + deps = [] + forward_variables_from(invoker, "*") + testonly = true + + assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled || + invoker.proguard_configs != []) + + if (!defined(apk_name)) { + apk_name = get_label_info(invoker.shared_library, "name") + } + + if (!defined(android_manifest)) { + android_manifest_dep = ":${target_name}_manifest" + android_manifest = _android_manifest + } + + final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk" + + if (!defined(use_default_launcher) || use_default_launcher) { + deps += [ "//testing/android/native_test:native_test_java" ] + } + shared_libraries = [ invoker.shared_library ] + deps += [ + "//base:base_java", + "//testing/android/appurify_support:appurify_support_java", + "//testing/android/reporter:reporter_java", + ] + data_deps += [ + "//build/android/pylib/remote/device/dummy:remote_device_dummy_apk", + "//tools/android/md5sum", + ] + if (host_os == "linux") { + data_deps += [ "//tools/android/forwarder2" ] + } + } + } + + # Generate .java files from .aidl files. + # + # This target will store the .java files in a srcjar and should be included in + # an android_library or android_apk's srcjar_deps. + # + # Variables + # sources: Paths to .aidl files to compile. + # import_include: Path to directory containing .java files imported by the + # .aidl files. + # interface_file: Preprocessed aidl file to import. + # + # Example + # android_aidl("foo_aidl") { + # import_include = "java/src" + # sources = [ + # "java/src/com/foo/bar/FooBarService.aidl", + # "java/src/com/foo/bar/FooBarServiceCallback.aidl", + # ] + # } + template("android_aidl") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + + srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + aidl_path = "${android_sdk_build_tools}/aidl" + framework_aidl = "$android_sdk/framework.aidl" + + action(target_name) { + script = "//build/android/gyp/aidl.py" + sources = invoker.sources + + imports = [ framework_aidl ] + if (defined(invoker.interface_file)) { + assert(invoker.interface_file != "") + imports += [ invoker.interface_file ] + } + + inputs = [ aidl_path ] + imports + + depfile = "${target_gen_dir}/${target_name}.d" + outputs = [ + depfile, + srcjar_path, + ] + rebased_imports = rebase_path(imports, root_build_dir) + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--aidl-path", + rebase_path(aidl_path, root_build_dir), + "--imports=$rebased_imports", + "--srcjar", + rebase_path(srcjar_path, root_build_dir), + ] + if (defined(invoker.import_include) && invoker.import_include != "") { + # TODO(cjhopman): aidl supports creating a depfile. We should be able to + # switch to constructing a depfile for the overall action from that + # instead of having all the .java files in the include paths as inputs. + rebased_import_includes = + rebase_path([ invoker.import_include ], root_build_dir) + args += [ "--includes=$rebased_import_includes" ] + + _java_files_build_rel = + exec_script("//build/android/gyp/find.py", + rebase_path([ invoker.import_include ], root_build_dir), + "list lines") + _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir) + inputs += _java_files + } + args += rebase_path(sources, root_build_dir) + } + } + + # Compile a protocol buffer to java. + # + # This generates java files from protocol buffers and creates an Android library + # containing the classes. + # + # Variables + # sources: Paths to .proto files to compile. + # proto_path: Root directory of .proto files. + # + # Example: + # proto_java_library("foo_proto_java") { + # proto_path = "src/foo" + # sources = [ "$proto_path/foo.proto" ] + # } + template("proto_java_library") { + set_sources_assignment_filter([]) + forward_variables_from(invoker, [ "testonly" ]) + _protoc_dep = + "//third_party/android_protobuf:android_protoc($host_toolchain)" + _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir") + _protoc_bin = "$_protoc_out_dir/android_protoc" + _proto_path = invoker.proto_path + + _template_name = target_name + + action("${_template_name}__protoc_java") { + srcjar_path = "$target_gen_dir/$target_name.srcjar" + script = "//build/protoc_java.py" + + deps = [ + _protoc_dep, + ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + + sources = invoker.sources + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + srcjar_path, + ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--protoc", + rebase_path(_protoc_bin, root_build_dir), + "--proto-path", + rebase_path(_proto_path, root_build_dir), + "--srcjar", + rebase_path(srcjar_path, root_build_dir), + ] + rebase_path(sources, root_build_dir) + } + + android_library(target_name) { + chromium_code = false + java_files = [] + srcjar_deps = [ ":${_template_name}__protoc_java" ] + deps = [ + "//third_party/android_protobuf:protobuf_nano_javalib", + ] + } + } + + # Declare an Android library target for a prebuilt AAR. + # + # This target creates an Android library containing java code and Android + # resources. For libraries without resources, it will not generate + # corresponding android_resources targets. + # + # Variables + # aar_path: Path to the AAR. + # TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed. + # requires_android: Whether this target can only be used for compiling Android related targets. + # + # Example + # android_aar_prebuilt("foo_java") { + # aar_path = "foo.aar" + # } + template("android_aar_prebuilt") { + assert(defined(invoker.aar_path)) + _output_path = "${target_gen_dir}/${target_name}" + _unpack_target_name = "${target_name}__unpack_aar" + + # Scan the AAR file and determine the resources and jar files. + # Some libraries might not have resources; others might have two jars. + _scanned_files = + exec_script("//build/android/gyp/aar.py", + [ + "--input-file", + rebase_path(invoker.aar_path, root_build_dir), + "--list", + ], + "scope") + + action(_unpack_target_name) { + script = "//build/android/gyp/aar.py" # Unzips the AAR + args = [ + "--input-file", + rebase_path(invoker.aar_path, root_build_dir), + "--output-dir", + rebase_path(_output_path, root_build_dir), + "--extract", + ] + inputs = [ + invoker.aar_path, + ] + outputs = [ + "${_output_path}/AndroidManifest.xml", + ] + + if (_scanned_files.resources != []) { + outputs += [ "${_output_path}/R.txt" ] + outputs += get_path_info( + rebase_path(_scanned_files.resources, "", _output_path), + "abspath") + } + if (defined(_scanned_files.jars)) { + outputs += + get_path_info(rebase_path(_scanned_files.jars, "", _output_path), + "abspath") + } + } + + _resource_targets = [] + + # Create the android_resources target for resources. + if (_scanned_files.resources != []) { + _res_target_name = "${target_name}__res" + _resource_targets += [ ":$_res_target_name" ] + android_resources(_res_target_name) { + forward_variables_from(invoker, [ "deps" ]) + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_unpack_target_name" ] + resource_dirs = [] + generated_resource_dirs = [ "${_output_path}/res" ] + generated_resource_files = + rebase_path(_scanned_files.resources, "", _output_path) + android_manifest_dep = ":$_unpack_target_name" + android_manifest = "${_output_path}/AndroidManifest.xml" + v14_skip = true + } + } + + # Create android_java_prebuilt targets for jar files. + _jar_targets = [] + _counter = 0 + foreach(jar, _scanned_files.jars) { + _counter += 1 + _current_target = "${target_name}__jar_$_counter" + _jar_targets += [ ":$_current_target" ] + java_prebuilt(_current_target) { + forward_variables_from(invoker, + [ + "deps", + "input_jars_paths", + "requires_android", + "jar_excluded_patterns", + ]) + if (!defined(deps)) { + deps = [] + } + deps += _resource_targets + [ ":$_unpack_target_name" ] + if (!defined(requires_android)) { + requires_android = true + } + supports_android = true + jar_path = "${_output_path}/$jar" + } + } + + java_group(target_name) { + deps = _resource_targets + _jar_targets + } + } +} diff --git a/build/config/arm.gni b/build/config/arm.gni new file mode 100644 index 00000000000..6eeca62778b --- /dev/null +++ b/build/config/arm.gni @@ -0,0 +1,106 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/v8_target_cpu.gni") + +# These are primarily relevant in current_cpu == "arm" contexts, where +# ARM code is being compiled. But they can also be relevant in the +# other contexts when the code will change its behavior based on the +# cpu it wants to generate code for. +if (current_cpu == "arm" || v8_current_cpu == "arm") { + declare_args() { + # Version of the ARM processor when compiling on ARM. Ignored on non-ARM + # platforms. + arm_version = 7 + + # The ARM floating point mode. This is either the string "hard", "soft", or + # "softfp". An empty string means to use the default one for the + # arm_version. + arm_float_abi = "" + + # The ARM variant-specific tuning mode. This will be a string like "armv6" + # or "cortex-a15". An empty string means to use the default for the + # arm_version. + arm_tune = "" + + # Whether to use the neon FPU instruction set or not. + arm_use_neon = "" + + # Whether to enable optional NEON code paths. + arm_optionally_use_neon = false + + # Thumb is a reduced instruction set available on some ARM processors that + # has increased code density. + arm_use_thumb = true + } + + assert(arm_float_abi == "" || arm_float_abi == "hard" || + arm_float_abi == "soft" || arm_float_abi == "softfp") + + if (arm_use_neon == "") { + if (current_os == "linux" && current_cpu != v8_current_cpu) { + # Don't use neon on V8 simulator builds as a default. + arm_use_neon = false + } else { + arm_use_neon = true + } + } + + if (arm_version == 6) { + arm_arch = "armv6" + if (arm_tune != "") { + arm_tune = "" + } + if (arm_float_abi == "") { + arm_float_abi = "softfp" + } + arm_fpu = "vfp" + arm_use_thumb = false + } else if (arm_version == 7) { + arm_arch = "armv7-a" + if (arm_tune == "") { + arm_tune = "generic-armv7-a" + } + + if (arm_float_abi == "") { + if (current_os == "android" || target_os == "android") { + arm_float_abi = "softfp" + } else if (current_os == "linux" && current_cpu != v8_current_cpu) { + # Default to the same as Android for V8 simulator builds. + arm_float_abi = "softfp" + } else { + arm_float_abi = "hard" + } + } + + if (arm_use_neon) { + arm_fpu = "neon" + } else { + arm_fpu = "vfpv3-d16" + } + } else if (arm_version == 8) { + arm_arch = "armv8-a" + if (arm_tune == "") { + arm_tune = "generic-armv8-a" + } + + if (arm_float_abi == "") { + if (current_os == "android" || target_os == "android") { + arm_float_abi = "softfp" + } else { + arm_float_abi = "hard" + } + } + + if (arm_use_neon) { + arm_fpu = "neon" + } else { + arm_fpu = "vfpv3-d16" + } + } +} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") { + # arm64 supports only "hard". + arm_float_abi = "hard" + arm_use_neon = true +} diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni new file mode 100644 index 00000000000..7b527b6bd56 --- /dev/null +++ b/build/config/chrome_build.gni @@ -0,0 +1,22 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Select the desired branding flavor. False means normal Chromium branding, + # true means official Google Chrome branding (requires extra Google-internal + # resources). + is_chrome_branded = false + + # Break chrome.dll into multple pieces based on process type. Only available + # on Windows. + is_multi_dll_chrome = false #is_win && !is_component_build +} + +# Refers to the subdirectory for branding in various places including +# chrome/app/theme. +if (is_chrome_branded) { + branding_path_component = "google_chrome" +} else { + branding_path_component = "chromium" +} diff --git a/build/config/chromecast/BUILD.gn b/build/config/chromecast/BUILD.gn new file mode 100644 index 00000000000..f1cbfe61946 --- /dev/null +++ b/build/config/chromecast/BUILD.gn @@ -0,0 +1,69 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") + +assert(is_chromecast) + +config("static_config") { + ldflags = [ + # We want to statically link libstdc++/libgcc. + "-static-libstdc++", + "-static-libgcc", + + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://b/26390825 + "-Wl,--exclude-libs=libffmpeg.a", + ] +} + +config("ldconfig") { + visibility = [ ":*" ] + + # Chromecast executables depend on several shared libraries in + # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary. + # This is explicitly disabled in Chrome for security reasons (see comments in + # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may + # override the default libraries shipped in the Cast receiver package. + ldflags = [ + "-Wl,-rpath=/oem_cast_shlib", + "-Wl,-rpath=\$ORIGIN/lib", + "-Wl,-rpath=\$ORIGIN", + ] +} + +config("executable_config") { + configs = [ ":ldconfig" ] + + if (current_cpu == "arm") { + ldflags = [ + # Export stdlibc++ and libgcc symbols to force shlibs to refer to these + # symbols from the executable. + "-Wl,--export-dynamic", + + "-lm", # stdlibc++ requires math.h + + # In case we redefined stdlibc++ symbols (e.g. tc_malloc) + "-Wl,--allow-multiple-definition", + + "-Wl,--whole-archive", + "-l:libstdc++.a", + "-l:libgcc.a", + "-Wl,--no-whole-archive", + ] + + # Despite including libstdc++/libgcc archives, we still need to specify + # static linking for them in order to prevent the executable from having a + # dynamic dependency on them. + configs += [ ":static_config" ] + } +} + +config("shared_library_config") { + configs = [ ":ldconfig" ] + if (current_cpu == "arm") { + configs += [ ":static_config" ] + } +} diff --git a/build/config/chromecast_build.gni b/build/config/chromecast_build.gni new file mode 100644 index 00000000000..b2259704983 --- /dev/null +++ b/build/config/chromecast_build.gni @@ -0,0 +1,36 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The args declared in this file should be referenced by components outside of +# //chromecast. Args needed only in //chromecast should be declared in +# //chromecast/chromecast.gni. +declare_args() { + # Set this true for a Chromecast build. Chromecast builds are supported on + # Linux and Android. + is_chromecast = false + + # Set this true for an audio-only Chromecast build. + disable_display = false +} + +# Note(slan): This arg depends on the value of is_chromecast, and thus must be +# declared in a separate block. These blocks can be combined when/if +# crbug.com/542846 is resolved. +declare_args() { + # True if Chromecast build is targeted for linux desktop. This type of build + # is useful for testing and development, but currently supports only a subset + # of Cast functionality. Though this defaults to true for x86 Linux devices, + # this should be overriden manually for an embedded x86 build. + # TODO(slan): Remove instances of this when x86 is a fully supported platform. + is_cast_desktop_build = is_chromecast && target_os == "linux" && + (target_cpu == "x86" || target_cpu == "x64") +} + +# Assert that Chromecast is being built for a supported platform. +assert(is_linux || is_android || !is_chromecast, + "Chromecast builds are not supported on $target_os") + +# Assert that disable_display and is_cast_desktop_build are both false on a +# non-Chromecast build. +assert(is_chromecast || !(disable_display || is_cast_desktop_build)) diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn new file mode 100644 index 00000000000..4e0d88b268d --- /dev/null +++ b/build/config/clang/BUILD.gn @@ -0,0 +1,72 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("clang.gni") +import("//build/config/chromecast_build.gni") + +config("find_bad_constructs") { + if (clang_use_chrome_plugins) { + cflags = [] + + # On Windows, the plugin is built directly into clang, so there's + # no need to load it dynamically. + + if (is_mac || is_ios) { + cflags += [ + "-Xclang", + "-load", + "-Xclang", + rebase_path("${clang_base_path}/lib/libFindBadConstructs.dylib", + root_build_dir), + ] + } else if (is_linux || is_android) { + cflags += [ + "-Xclang", + "-load", + "-Xclang", + rebase_path("${clang_base_path}/lib/libFindBadConstructs.so", + root_build_dir), + ] + } + + cflags += [ + "-Xclang", + "-add-plugin", + "-Xclang", + "find-bad-constructs", + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-templates", + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "follow-macro-expansion", + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "enforce-in-pdf", + ] + + if ((is_linux || is_android) && !is_chromecast) { + cflags += [ + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-ipc", + ] + } + } +} + +# Enables some extra Clang-specific warnings. Some third-party code won't +# compile with these so may want to remove this config. +config("extra_warnings") { + cflags = [ + "-Wheader-hygiene", + + # Warns when a const char[] is converted to bool. + "-Wstring-conversion", + ] +} diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni new file mode 100644 index 00000000000..7026f65207f --- /dev/null +++ b/build/config/clang/clang.gni @@ -0,0 +1,13 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/toolchain.gni") + +declare_args() { + # Indicates if the build should use the Chrome-specific plugins for enforcing + # coding guidelines, etc. Only used when compiling with Clang. + clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang + + clang_base_path = "//third_party/llvm-build/Release+Asserts" +} diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn new file mode 100644 index 00000000000..69321a5f3fc --- /dev/null +++ b/build/config/compiler/BUILD.gn @@ -0,0 +1,1590 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/chrome_build.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/nacl/config.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/toolchain.gni") + +if (current_cpu == "arm" || current_cpu == "arm64") { + import("//build/config/arm.gni") +} +if (current_cpu == "mipsel" || current_cpu == "mips64el") { + import("//build/config/mips.gni") +} +if (is_win) { + import("//build/config/win/visual_studio_version.gni") +} +if (is_mac) { + import("//build/config/mac/symbols.gni") +} + +declare_args() { + # Default to warnings as errors for default workflow, where we catch + # warnings with known toolchains. Allow overriding this e.g. for Chromium + # builds on Linux that could use a different version of the compiler. + # With GCC, warnings in no-Chromium code are always not treated as errors. + treat_warnings_as_errors = true + + # Normally, Android builds are lightly optimized, even for debug builds, to + # keep binary size down. Setting this flag to true disables such optimization + android_full_debug = false + + # Whether to use the binary binutils checked into third_party/binutils. + # These are not multi-arch so cannot be used except on x86 and x86-64 (the + # only two architectures that are currently checked in). Turn this off when + # you are using a custom toolchain and need to control -B in cflags. + linux_use_bundled_binutils = + is_linux && (current_cpu == "x64" || current_cpu == "x86") + binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin", + root_build_dir) + + # Compile in such a way as to make it possible for the profiler to unwind full + # stack frames. Setting this flag has a large effect on the performance of the + # generated code than just setting profiling, but gives the profiler more + # information to analyze. + # Requires profiling to be set to true. + enable_full_stack_frames_for_profiling = false + + # When we are going to use gold we need to find it. + # This is initialized below, after use_gold might have been overridden. + gold_path = false + + if (is_win) { + # Whether the VS xtree header has been patched to disable warning 4702. If + # it has, then we don't need to disable 4702 (unreachable code warning). + # The patch is preapplied to the internal toolchain and hence all bots. + msvs_xtree_patched = false + } + + # Omit unwind support in official builds to save space. + # We can use breakpad for these builds. + exclude_unwind_tables = true #is_chrome_branded && is_official_build + + # If true, gold linker will save symbol table inside object files. + # This speeds up gdb startup by 60% + gdb_index = false + + # If true, optimize for size. Does not affect windows builds. + # Linux & Mac favor speed over size. + # TODO(brettw) it's weird that Mac and desktop Linux are different. We should + # explore favoring size over speed in this case as well. + optimize_for_size = is_android || is_ios + + # Enable fatal linker warnings. Building Chromium with certain versions + # of binutils can cause linker warning. + # See: https://bugs.chromium.org/p/chromium/issues/detail?id=457359 + fatal_linker_warnings = true +} + +if (is_clang && !is_nacl) { + update_args = [ "--print-revision" ] + if (llvm_force_head_revision) { + update_args += [ "--llvm-force-head-revision" ] + } + clang_revision = + exec_script("//tools/clang/scripts/update.py", update_args, "trim string") +} + +# Apply the default logic for these values if they were not set explicitly. +if (gold_path == false) { + if (use_gold) { + gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin", + root_build_dir) + } else { + gold_path = "" + } +} + +if (use_debug_fission == "default") { + use_debug_fission = is_debug && !is_win && use_gold && + linux_use_bundled_binutils && cc_wrapper == "" +} + +# default_include_dirs --------------------------------------------------------- +# +# This is a separate config so that third_party code (which would not use the +# source root and might have conflicting versions of some headers) can remove +# this and specify their own include paths. +config("default_include_dirs") { + include_dirs = [ + "//", + root_gen_dir, + ] +} + +# compiler --------------------------------------------------------------------- +# +# Base compiler configuration. +# +# See also "runtime_library" below for related stuff and a discussion about +# where stuff should go. Put warning related stuff in the "warnings" config. + +config("compiler") { + asmflags = [] + cflags = [] + cflags_c = [] + cflags_cc = [] + cflags_objc = [] + cflags_objcc = [] + ldflags = [] + defines = [] + configs = [] + + # System-specific flags. If your compiler flags apply to one of the + # categories here, add it to the associated file to keep this shared config + # smaller. + if (is_win) { + configs += [ "//build/config/win:compiler" ] + } else if (is_android) { + configs += [ "//build/config/android:compiler" ] + } else if (is_linux) { + configs += [ "//build/config/linux:compiler" ] + } else if (is_nacl) { + configs += [ "//build/config/nacl:compiler" ] + } else if (is_ios || is_mac) { + configs += [ "//build/config/mac:compiler" ] + } + + # Applies to all Posix systems. + if (is_posix) { + configs += [ "//build/config/posix:compiler" ] + } + + # See the definitions below. + configs += [ + ":compiler_cpu_abi", + ":compiler_codegen", + ] + + # In general, Windows is totally different, but all the other builds share + # some common GCC configuration. + if (!is_win) { + # Common GCC compiler flags setup. + # -------------------------------- + cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204 + cflags_cc += [ + # If this is removed then remove the corresponding /Zc:threadSafeInit- for + # Windows. + "-fno-threadsafe-statics", + + # Not exporting C++ inline functions can generally be applied anywhere + # so we do so here. Normal function visibility is controlled by + # //build/config/gcc:symbol_visibility_hidden. + "-fvisibility-inlines-hidden", + ] + + # Stack protection. + if (is_mac) { + # The strong variant of the stack protector significantly increases + # binary size, so only enable it in debug mode. + if (is_debug) { + cflags += [ "-fstack-protector-strong" ] + } else { + cflags += [ "-fstack-protector" ] + } + } else if (is_posix && !is_chromeos && !is_nacl) { + # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it. + # See also https://crbug.com/533294 + cflags += [ "--param=ssp-buffer-size=4" ] + + # The x86 toolchain currently has problems with stack-protector. + if (is_android && current_cpu == "x86") { + cflags += [ "-fno-stack-protector" ] + } else { + cflags += [ "-fstack-protector" ] + } + } + + # Linker warnings. + if (fatal_linker_warnings && !(is_chromeos && current_cpu == "arm") && + !(is_android && use_order_profiling) && !is_mac && !is_ios) { + # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580 + # TODO(lizeb,pasko): Fix link errors when linking with order_profiling=1 + # crbug.com/485542 + ldflags += [ "-Wl,--fatal-warnings" ] + } + } + + if (is_clang && is_debug) { + # Allow comparing the address of references and 'this' against 0 + # in debug builds. Technically, these can never be null in + # well-defined C/C++ and Clang can optimize such checks away in + # release builds, but they may be used in asserts in debug builds. + cflags_cc += [ + "-Wno-undefined-bool-conversion", + "-Wno-tautological-undefined-compare", + ] + } + + if (is_clang && !is_nacl) { + # This is here so that all files get recompiled after a clang roll and + # when turning clang on or off. (defines are passed via the command line, + # and build system rebuild things when their commandline changes). Nothing + # should ever read this define. + defines += [ "CR_CLANG_REVISION=$clang_revision" ] + } + + # Non-Mac Posix compiler flags setup. + # ----------------------------------- + if (is_posix && !(is_mac || is_ios)) { + if (enable_profiling) { + # Explicitly ask for frame pointers. Otherwise they are omitted when + # any optimization level is used (and Android debug builds use -Os). + cflags += [ "-fno-omit-frame-pointer" ] + if (!is_debug) { + cflags += [ "-g" ] + + if (enable_full_stack_frames_for_profiling) { + cflags += [ + "-fno-inline", + "-fno-optimize-sibling-calls", + ] + } + } + } + + defines += [ "_FILE_OFFSET_BITS=64" ] + + if (!is_android) { + defines += [ + "_LARGEFILE_SOURCE", + "_LARGEFILE64_SOURCE", + ] + } + + if (!is_nacl) { + if (exclude_unwind_tables) { + cflags += [ + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + ] + defines += [ "NO_UNWIND_TABLES" ] + } else { + cflags += [ "-funwind-tables" ] + } + } + } + + # Linux/Android common flags setup. + # --------------------------------- + if (is_linux || is_android) { + cflags += [ + "-fPIC", + "-pipe", # Use pipes for communicating between sub-processes. Faster. + ] + + ldflags += [ + "-fPIC", + "-Wl,-z,noexecstack", + "-Wl,-z,now", + "-Wl,-z,relro", + ] + if (!using_sanitizer && !use_cfi_diag) { + ldflags += [ "-Wl,-z,defs" ] + } + } + + # Linux-specific compiler flags setup. + # ------------------------------------ + if (is_posix && use_lld && !is_nacl) { + ldflags += [ "-fuse-ld=lld" ] + } else if (use_gold) { + ldflags += [ "-fuse-ld=gold" ] + if (is_android) { + if (is_clang) { + _rebased_android_toolchain_root = + rebase_path(android_toolchain_root, root_build_dir) + + # Let clang find the ld.gold in the NDK. + ldflags += [ "--gcc-toolchain=$_rebased_android_toolchain_root" ] + } + + # Use -mstackrealign due to a bug on ia32 Jelly Bean. + # See crbug.com/521527 + if (current_cpu == "x86") { + cflags += [ "-mstackrealign" ] + } + } else { + ldflags += [ "-B$gold_path" ] + + if (linux_use_bundled_binutils) { + ldflags += [ + # Experimentation found that using four linking threads + # saved ~20% of link time. + # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36 + # Only apply this to the target linker, since the host + # linker might not be gold, but isn't used much anyway. + "-Wl,--threads", + "-Wl,--thread-count=4", + ] + } + } + + if (gdb_index) { + ldflags += [ "-Wl,--gdb-index" ] + } + + # TODO(thestig): Make this flag work with GN. + #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) { + # ldflags += [ + # "-Wl,--detect-odr-violations", + # ] + #} + } else if (linux_use_bundled_binutils) { + # Gold is the default linker for the bundled binutils so we explicitly + # enable the bfd linker when use_gold is not set. + ldflags += [ "-fuse-ld=bfd" ] + } else if (is_android && is_clang && + (current_cpu == "mipsel" || current_cpu == "mips64el")) { + # Let clang find the ld.bfd in the NDK. + _rebased_android_toolchain_root = + rebase_path(android_toolchain_root, root_build_dir) + ldflags += [ "--gcc-toolchain=$_rebased_android_toolchain_root" ] + } + + if (is_posix && (use_gold || (use_lld && !is_nacl)) && !using_sanitizer && + !(is_android && use_order_profiling)) { + # TODO(crbug.com/576197) - gcc on x86 platforms + gold + icf=all + # doesn't currently work on non-chromeos platforms. + # Additionally, on Android x86 --icf=safe seems to cause issues as well. + # Additionally, on cast Android x86, --icf=all causes issues. + if (is_android && current_cpu == "x86") { + ldflags += [ "-Wl,--icf=none" ] + } else if (is_clang || is_chromeos || + (current_cpu != "x86" && current_cpu != "x64")) { + ldflags += [ "-Wl,--icf=all" ] + } else if (!is_android) { + ldflags += [ "-Wl,--icf=safe" ] + } + } + + if (linux_use_bundled_binutils) { + cflags += [ "-B$binutils_path" ] + } + + # Clang-specific compiler flags setup. + # ------------------------------------ + if (is_clang) { + cflags += [ "-fcolor-diagnostics" ] + } + + # Makes builds independent of absolute file path. + # clang-cl (used if is_win) doesn't expose this flag. + # Currently disabled for nacl since its toolchain lacks this flag (too old). + # TODO(zforman): Once nacl's toolchain is updated, remove check. + if (is_clang && is_linux) { + absolute_path = rebase_path("//.") + cflags += [ "-fdebug-prefix-map=$absolute_path=." ] + } + + # C++11 compiler flags setup. + # --------------------------- + if (is_linux || is_android || (is_nacl && is_clang)) { + # gnu++11 instead of c++11 is needed because some code uses typeof() (a + # GNU extension). + # TODO(thakis): Eventually switch this to c++11 instead, + # http://crbug.com/427584 + cflags_cc += [ "-std=gnu++11" ] + } else if (!is_win && !is_nacl) { + # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu++11 + # or c++11; we technically don't need this toolchain any more, but there + # are still a few buildbots using it, so until those are turned off + # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above. + cflags_cc += [ "-std=c++11" ] + } + + if (is_mac) { + # Tell the compiler to use libc++'s headers and the linker to link + # against libc++. The latter part normally requires OS X 10.7, + # but we still support running on 10.6. How does this work? Two + # parts: + # 1. Chromium's clang doesn't error on -mmacosx-version-min=10.6 + # combined with -stdlib=libc++ (it normally silently produced a + # binary that doesn't run on 10.6) + # 2. Further down, library_dirs is set to + # third_party/libc++-static, which contains a static + # libc++.a library. The linker then links against that instead + # of against /usr/lib/libc++.dylib when it sees the -lc++ flag + # added by the driver. + # + # In component builds, just link to the system libc++. This has + # the effect of making everything depend on libc++, which means + # component-build binaries won't run on 10.6 (no libc++ there), + # but for a developer-only configuration that's ok. (We don't + # want to raise the deployment target yet so that official and + # dev builds have the same deployment target. This affects + # things like which functions are considered deprecated.) + cflags_cc += [ "-stdlib=libc++" ] + ldflags += [ "-stdlib=libc++" ] + if (!is_component_build && !is_asan) { + ldflags += [ + "-L", + rebase_path("//third_party/libc++-static", root_build_dir), + ] + } + } + + # Add flags for link-time optimization. These flags enable + # optimizations/transformations that require whole-program visibility at link + # time, so they need to be applied to all translation units, and we may end up + # with miscompiles if only part of the program is compiled with LTO flags. For + # that reason, we cannot allow targets to enable or disable these flags, for + # example by disabling the optimize configuration. + # TODO(pcc): Make this conditional on is_official_build rather than on gn + # flags for specific features. + if (!is_debug && (allow_posix_link_time_opt || is_cfi) && !is_nacl) { + cflags += [ "-flto" ] + ldflags += [ "-flto" ] + + # Apply a lower LTO optimization level as the default is too slow. + if (is_linux) { + if (use_lld) { + ldflags += [ "-Wl,--lto-O1" ] + } else { + ldflags += [ "-Wl,-plugin-opt,O1" ] + } + } else if (is_mac) { + ldflags += [ "-Wl,-mllvm,-O1" ] + } + + # Work-around for http://openradar.appspot.com/20356002 + if (is_mac) { + ldflags += [ "-Wl,-all_load" ] + } + + # Allows the linker to apply ICF to the LTO object file. Also, when + # targeting ARM, without this flag, LTO produces a .text section that is + # larger than the maximum call displacement, preventing the linker from + # relocating calls (http://llvm.org/PR22999). + if (is_linux) { + ldflags += [ "-Wl,-plugin-opt,-function-sections" ] + } + + cflags += [ "-fwhole-program-vtables" ] + ldflags += [ "-fwhole-program-vtables" ] + } + + # Pass the same C/C++ flags to the objective C/C++ compiler. + cflags_objc += cflags_c + cflags_objcc += cflags_cc + + # Assign any flags set for the C compiler to asmflags so that they are sent + # to the assembler. The Windows assembler takes different types of flags + # so only do so for posix platforms. + if (is_posix) { + asmflags += cflags + asmflags += cflags_c + } +} + +# This provides the basic options to select the target CPU and ABI. +# It is factored out of "compiler" so that special cases can use this +# without using everything that "compiler" brings in. Options that +# tweak code generation for a particular CPU do not belong here! +# See "compiler_codegen", below. +config("compiler_cpu_abi") { + cflags = [] + ldflags = [] + + if (is_posix && !(is_mac || is_ios)) { + # CPU architecture. We may or may not be doing a cross compile now, so for + # simplicity we always explicitly set the architecture. + if (current_cpu == "x64") { + cflags += [ + "-m64", + "-march=x86-64", + ] + ldflags += [ "-m64" ] + } else if (current_cpu == "x86") { + cflags += [ "-m32" ] + ldflags += [ "-m32" ] + if (!is_nacl) { + cflags += [ + "-msse2", + "-mfpmath=sse", + "-mmmx", + ] + } + } else if (current_cpu == "arm") { + if (is_clang && !is_android && !is_nacl) { + cflags += [ "--target=arm-linux-gnueabihf" ] + ldflags += [ "--target=arm-linux-gnueabihf" ] + } + if (!is_nacl) { + cflags += [ + "-march=$arm_arch", + "-mfloat-abi=$arm_float_abi", + ] + if (arm_use_thumb) { + cflags += [ "-mthumb" ] + if (is_android && !is_clang) { + # Clang doesn't support this option. + cflags += [ "-mthumb-interwork" ] + } + } + } + if (arm_tune != "") { + cflags += [ "-mtune=$arm_tune" ] + } + } else if (current_cpu == "arm64") { + if (is_clang && !is_android && !is_nacl) { + cflags += [ "--target=aarch64-linux-gnu" ] + ldflags += [ "--target=aarch64-linux-gnu" ] + } + } else if (current_cpu == "mipsel" && !is_nacl) { + if (is_clang) { + # Temporarily needed due to unsupported MIPS macro instructions + # in inline asm code in third_party projects (libwebp and webrtc). + # TODO(lmilko) Enable integrated-as. + cflags += [ "-fno-integrated-as" ] + } + if (mips_arch_variant == "r6") { + if (is_clang) { + cflags += [ + "--target=mipsel-linux-gnu", + "-march=mips32r6", + ] + ldflags += [ "--target=mipsel-linux-gnu" ] + } else { + cflags += [ + "-mips32r6", + "-Wa,-mips32r6", + ] + if (is_android) { + ldflags += [ + "-mips32r6", + "-Wl,-melf32ltsmip", + ] + } + } + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + "-msched-weight", + "-mload-store-pairs", + ] + } + } else if (mips_arch_variant == "r2") { + if (is_clang) { + if (is_android) { + cflags += [ + "--target=mipsel-linux-android", + "-march=mipsel", + "-mcpu=mips32r2", + ] + ldflags += [ "--target=mipsel-linux-android" ] + } else { + cflags += [ + "--target=mipsel-linux-gnu", + "-march=mipsel", + "-mcpu=mips32r2", + ] + ldflags += [ "--target=mipsel-linux-gnu" ] + } + } else { + cflags += [ + "-mips32r2", + "-Wa,-mips32r2", + ] + if (mips_float_abi == "hard" && mips_fpu_mode != "") { + cflags += [ "-m$mips_fpu_mode" ] + } + } + } else if (mips_arch_variant == "r1") { + if (is_clang) { + if (is_android) { + cflags += [ + "--target=mipsel-linux-android", + "-march=mipsel", + "-mcpu=mips32", + ] + ldflags += [ "--target=mipsel-linux-android" ] + } else { + cflags += [ + "--target=mipsel-linux-gnu", + "-march=mipsel", + "-mcpu=mips32", + ] + ldflags += [ "--target=mipsel-linux-gnu" ] + } + } else { + cflags += [ + "-mips32", + "-Wa,-mips32", + ] + } + } + + if (mips_dsp_rev == 1) { + cflags += [ "-mdsp" ] + } else if (mips_dsp_rev == 2) { + cflags += [ "-mdspr2" ] + } + + cflags += [ "-m${mips_float_abi}-float" ] + } else if (current_cpu == "mips64el") { + if (mips_arch_variant == "r6") { + if (is_clang) { + if (is_android) { + cflags += [ + "--target=mips64el-linux-android", + "-march=mips64el", + "-mcpu=mips64r6", + ] + ldflags += [ "--target=mips64el-linux-android" ] + } + } else { + cflags += [ + "-mips64r6", + "-Wa,-mips64r6", + ] + ldflags += [ "-mips64r6" ] + } + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + "-msched-weight", + "-mload-store-pairs", + ] + } + } else if (mips_arch_variant == "r2") { + cflags += [ + "-mips64r2", + "-Wa,-mips64r2", + ] + ldflags += [ "-mips64r2" ] + } + } else if (current_cpu == "pnacl" && is_nacl_nonsfi) { + if (target_cpu == "x86" || target_cpu == "x64") { + cflags += [ + "-arch", + "x86-32-nonsfi", + "--pnacl-bias=x86-32-nonsfi", + "--target=i686-unknown-nacl", + ] + ldflags += [ + "-arch", + "x86-32-nonsfi", + "--target=i686-unknown-nacl", + ] + } else if (target_cpu == "arm") { + cflags += [ + "-arch", + "arm-nonsfi", + "-mfloat-abi=hard", + "--pnacl-bias=arm-nonsfi", + "--target=armv7-unknown-nacl-gnueabihf", + ] + ldflags += [ + "-arch", + "arm-nonsfi", + "--target=armv7-unknown-nacl-gnueabihf", + ] + } + } + } + + asmflags = cflags +} + +# This provides options to tweak code generation that are necessary +# for particular Chromium code or for working around particular +# compiler bugs (or the combination of the two). +config("compiler_codegen") { + configs = [] + cflags = [] + + if (is_nacl) { + configs += [ "//build/config/nacl:compiler_codegen" ] + } else if (is_posix && !is_mac && !is_ios) { + if (current_cpu == "x86") { + if (is_clang) { + cflags += [ + # Else building libyuv gives clang's register allocator issues, + # see llvm.org/PR15798 / crbug.com/233709 + "-momit-leaf-frame-pointer", + ] + } + } else if (current_cpu == "arm") { + if (is_android && !is_clang) { + # Clang doesn't support these flags. + cflags += [ + # The tree-sra optimization (scalar replacement for + # aggregates enabling subsequent optimizations) leads to + # invalid code generation when using the Android NDK's + # compiler (r5-r7). This can be verified using + # webkit_unit_tests' WTF.Checked_int8_t test. + "-fno-tree-sra", + + # The following option is disabled to improve binary + # size and performance in gcc 4.9. + "-fno-caller-saves", + ] + } + } + } + + asmflags = cflags +} + +# This is separate from :compiler_codegen (and not even a sub-config there) +# so that some targets can remove it from the list with: +# configs -= [ "//build/config/compiler:clang_stackrealign" ] +# See https://crbug.com/556393 for details of where it must be avoided. +config("clang_stackrealign") { + if (is_clang && current_cpu == "x86" && is_linux) { + cflags = [ + # Align the stack on 16-byte boundaries, http://crbug.com/418554. + "-mstack-alignment=16", + "-mstackrealign", + ] + } +} + +config("compiler_arm_fpu") { + if (current_cpu == "arm" && !is_ios && !is_nacl) { + cflags = [ "-mfpu=$arm_fpu" ] + asmflags = cflags + } +} + +# runtime_library ------------------------------------------------------------- +# +# Sets the runtime library and associated options. +# +# How do you determine what should go in here vs. "compiler" above? Consider if +# a target might choose to use a different runtime library (ignore for a moment +# if this is possible or reasonable on your system). If such a target would want +# to change or remove your option, put it in the runtime_library config. If a +# target wants the option regardless, put it in the compiler config. + +config("runtime_library") { + defines = [] + configs = [] + + # System-specific flags. If your compiler flags apply to one of the + # categories here, add it to the associated file to keep this shared config + # smaller. + if (is_win) { + configs += [ "//build/config/win:runtime_library" ] + } else if (is_linux) { + configs += [ "//build/config/linux:runtime_library" ] + } else if (is_ios) { + configs += [ "//build/config/ios:runtime_library" ] + } else if (is_mac) { + configs += [ "//build/config/mac:runtime_library" ] + } else if (is_android) { + configs += [ "//build/config/android:runtime_library" ] + } + + if (is_posix) { + configs += [ "//build/config/posix:runtime_library" ] + } + + if (is_component_build) { + defines += [ "COMPONENT_BUILD" ] + } +} + +# default_warnings ------------------------------------------------------------ +# +# Collects all warning flags that are used by default. This is used as a +# subconfig of both chromium_code and no_chromium_code. This way these +# flags are guaranteed to appear on the compile command line after -Wall. +config("default_warnings") { + cflags = [] + cflags_cc = [] + + if (is_win) { + if (treat_warnings_as_errors) { + cflags += [ "/WX" ] + } + + cflags += [ + # Warnings permanently disabled: + + # C4091: 'typedef ': ignored on left of 'X' when no variable is + # declared. + # This happens in a number of Windows headers. Dumb. + "/wd4091", + + # C4127: conditional expression is constant + # This warning can in theory catch dead code and other problems, but + # triggers in far too many desirable cases where the conditional + # expression is either set by macros or corresponds some legitimate + # compile-time constant expression (due to constant template args, + # conditionals comparing the sizes of different types, etc.). Some of + # these can be worked around, but it's not worth it. + "/wd4127", + + # C4251: 'identifier' : class 'type' needs to have dll-interface to be + # used by clients of class 'type2' + # This is necessary for the shared library build. + "/wd4251", + + # C4351: new behavior: elements of array 'array' will be default + # initialized + # This is a silly "warning" that basically just alerts you that the + # compiler is going to actually follow the language spec like it's + # supposed to, instead of not following it like old buggy versions did. + # There's absolutely no reason to turn this on. + "/wd4351", + + # C4355: 'this': used in base member initializer list + # It's commonly useful to pass |this| to objects in a class' initializer + # list. While this warning can catch real bugs, most of the time the + # constructors in question don't attempt to call methods on the passed-in + # pointer (until later), and annotating every legit usage of this is + # simply more hassle than the warning is worth. + "/wd4355", + + # C4503: 'identifier': decorated name length exceeded, name was + # truncated + # This only means that some long error messages might have truncated + # identifiers in the presence of lots of templates. It has no effect on + # program correctness and there's no real reason to waste time trying to + # prevent it. + "/wd4503", + + # Warning C4589 says: "Constructor of abstract class ignores + # initializer for virtual base class." Disable this warning because it + # is flaky in VS 2015 RTM. It triggers on compiler generated + # copy-constructors in some cases. + "/wd4589", + + # C4611: interaction between 'function' and C++ object destruction is + # non-portable + # This warning is unavoidable when using e.g. setjmp/longjmp. MSDN + # suggests using exceptions instead of setjmp/longjmp for C++, but + # Chromium code compiles without exception support. We therefore have to + # use setjmp/longjmp for e.g. JPEG decode error handling, which means we + # have to turn off this warning (and be careful about how object + # destruction happens in such cases). + "/wd4611", + + # Warnings to evaluate and possibly fix/reenable later: + + "/wd4100", # Unreferenced formal function parameter. + "/wd4121", # Alignment of a member was sensitive to packing. + "/wd4244", # Conversion: possible loss of data. + "/wd4505", # Unreferenced local function has been removed. + "/wd4510", # Default constructor could not be generated. + "/wd4512", # Assignment operator could not be generated. + "/wd4610", # Class can never be instantiated, constructor required. + "/wd4838", # Narrowing conversion. Doesn't seem to be very useful. + "/wd4995", # 'X': name was marked as #pragma deprecated + "/wd4996", # Deprecated function warning. + + # These are variable shadowing warnings that are new in VS2015. We + # should work through these at some point -- they may be removed from + # the RTM release in the /W4 set. + "/wd4456", + "/wd4457", + "/wd4458", + "/wd4459", + ] + + if (visual_studio_version == "2015") { + cflags += [ + # C4312 is a VS 2015 64-bit warning for integer to larger pointer. + # TODO(brucedawson): fix warnings, crbug.com/554200 + "/wd4312", + + # TODO(brucedawson): http://crbug.com/593448 - C4595 is an 'illegal + # inline operator new' warning that is new in VS 2015 Update 2. + # This is equivalent to clang's no-inline-new-delete warning. + # See http://bugs.icu-project.org/trac/ticket/11122 + "/wd4595", + ] + + if (current_cpu == "x86") { + cflags += [ + # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to + # 4267. Example: short TruncTest(size_t x) { return x; } + # Since we disable 4244 we need to disable 4267 during migration. + # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. + "/wd4267", + ] + } + } + + # VS xtree header file needs to be patched or 4702 (unreachable code + # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is + # not patched. + if (!msvs_xtree_patched && + exec_script("../../win_is_xtree_patched.py", [], "value") == 0) { + cflags += [ "/wd4702" ] # Unreachable code. + } + + # Building with Clang on Windows is a work in progress and very + # experimental. See crbug.com/82385. + # Keep this in sync with the similar block in build/common.gypi + if (is_clang) { + cflags += [ + # TODO(hans): Make this list shorter eventually, http://crbug.com/504657 + "-Wno-microsoft-enum-value", # http://crbug.com/505296 + "-Wno-unknown-pragmas", # http://crbug.com/505314 + "-Wno-microsoft-cast", # http://crbug.com/550065 + ] + } + } else { + if (is_mac && !is_nacl) { + # When compiling Objective-C, warns if a method is used whose + # availability is newer than the deployment target. This is not + # required when compiling Chrome for iOS. + cflags += [ "-Wpartial-availability" ] + } + + # Suppress warnings about ABI changes on ARM (Clang doesn't give this + # warning). + if (current_cpu == "arm" && !is_clang) { + cflags += [ "-Wno-psabi" ] + } + + if (!is_clang) { + cflags_cc += [ + # See comment for -Wno-c++11-narrowing. + "-Wno-narrowing", + ] + + # Don't warn about the "typedef 'foo' locally defined but not used" + # for gcc 4.8. + # TODO: remove this flag once all builds work. See crbug.com/227506 + cflags += [ "-Wno-unused-local-typedefs" ] + + # Don't warn about "maybe" uninitialized. Clang doesn't include this + # in -Wall but gcc does, and it gives false positives. + cflags += [ "-Wno-maybe-uninitialized" ] + } + } + + # Common Clang and GCC warning setup. + if (!is_win || is_clang) { + cflags += [ + # Disables. + "-Wno-missing-field-initializers", # "struct foo f = {0};" + "-Wno-unused-parameter", # Unused function parameters. + ] + } + + if (is_chromeos) { + # TODO(thakis): Remove, http://crbug.com/263960 + if (is_clang) { + cflags_cc += [ "-Wno-reserved-user-defined-literal" ] + } else { + cflags_cc += [ "-Wno-literal-suffix" ] + } + } + + if (is_clang) { + cflags += [ + # TODO(thakis): Consider -Wloop-analysis (turns on + # -Wrange-loop-analysis too). + + # This warns on using ints as initializers for floats in + # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|), + # which happens in several places in chrome code. Not sure if + # this is worth fixing. + "-Wno-c++11-narrowing", + + # Warns on switches on enums that cover all enum values but + # also contain a default: branch. Chrome is full of that. + "-Wno-covered-switch-default", + + # Clang considers the `register` keyword as deprecated, but e.g. + # code generated by flex (used in angle) contains that keyword. + # http://crbug.com/255186 + "-Wno-deprecated-register", + + # TODO(thakis): This used to be implied by -Wno-unused-function, + # which we no longer use. Check if it makes sense to remove + # this as well. http://crbug.com/316352 + "-Wno-unneeded-internal-declaration", + + # TODO(hans): Get this cleaned up, http://crbug.com/428099 + "-Wno-inconsistent-missing-override", + ] + + # Chrome's hermetic Clang compiler, NaCl's Clang compiler and Xcode's Clang + # compiler will almost always have different versions. Certain flags may not + # be recognized by one version or the other. + if (!is_nacl) { + # Flags NaCl (Clang 3.7) does not recognize. + cflags += [ + # TODO(thakis): Enable this, crbug.com/507717 + "-Wno-shift-negative-value", + ] + } + if (!is_nacl && !use_xcode_clang) { + # Flags NaCl (Clang 3.7) and Xcode 7.3 (Clang clang-703.0.31) do not + # recognize. + cflags += [ + # TODO(thakis): https://crbug.com/604888 + "-Wno-undefined-var-template", + + # TODO(thakis): https://crbug.com/617318 + "-Wno-nonportable-include-path", + + # TODO(hans): https://crbug.com/637306 + "-Wno-address-of-packed-member", + ] + } + } +} + +# chromium_code --------------------------------------------------------------- +# +# Toggles between higher and lower warnings for code that is (or isn't) +# part of Chromium. + +config("chromium_code") { + if (is_win) { + cflags = [ "/W4" ] # Warning level 4. + } else { + cflags = [ "-Wall" ] + if (treat_warnings_as_errors) { + cflags += [ "-Werror" ] + } + if (is_clang) { + # Enable -Wextra for chromium_code when we control the compiler. + cflags += [ "-Wextra" ] + } + + # In Chromium code, we define __STDC_foo_MACROS in order to get the + # C99 macros on Mac and Linux. + defines = [ + "__STDC_CONSTANT_MACROS", + "__STDC_FORMAT_MACROS", + ] + + if (!is_debug && !using_sanitizer && + (!is_linux || !is_clang || is_official_build)) { + # _FORTIFY_SOURCE isn't really supported by Clang now, see + # http://llvm.org/bugs/show_bug.cgi?id=16821. + # It seems to work fine with Ubuntu 12 headers though, so use it in + # official builds. + # + # Non-chromium code is not guaranteed to compile cleanly with + # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are + # disabled, so only do that for Release build. + defines += [ "_FORTIFY_SOURCE=2" ] + } + + if (is_mac) { + # Warn if automatic synthesis is triggered. This triggers numerous + # warnings for internal iOS directories. + cflags_objc = [ "-Wobjc-missing-property-synthesis" ] + cflags_objcc = [ "-Wobjc-missing-property-synthesis" ] + } + } + + configs = [ ":default_warnings" ] +} + +config("no_chromium_code") { + cflags = [] + cflags_cc = [] + defines = [] + + if (is_win) { + cflags += [ + "/W3", # Warning level 3. + "/wd4800", # Disable warning when forcing value to bool. + "/wd4267", # TODO(jschuh): size_t to int. + "/wd4996", # Deprecated function warning. + ] + defines += [ + "_CRT_NONSTDC_NO_WARNINGS", + "_CRT_NONSTDC_NO_DEPRECATE", + ] + } else { + # GCC may emit unsuppressible warnings so don't add -Werror for no chromium + # code. crbug.com/589724 + if (treat_warnings_as_errors && is_clang) { + cflags += [ "-Werror" ] + } + if (is_clang && !is_nacl) { + # TODO(thakis): Remove !is_nacl once + # https://codereview.webrtc.org/1552863002/ made its way into chromium. + cflags += [ "-Wall" ] + } + } + + if (is_clang) { + cflags += [ + # Lots of third-party libraries have unused variables. Instead of + # suppressing them individually, we just blanket suppress them here. + "-Wno-unused-variable", + ] + } + + if (is_linux || is_android) { + cflags_cc += [ + # Don't warn about hash_map in third-party code. + "-Wno-deprecated", + ] + } + + configs = [ ":default_warnings" ] +} + +# rtti ------------------------------------------------------------------------ +# +# Allows turning Run-Time Type Identification on or off. + +config("rtti") { + if (is_win) { + cflags_cc = [ "/GR" ] + } else { + cflags_cc = [ "-frtti" ] + } +} +config("no_rtti") { + # CFI diagnostics and UBsan vptr require RTTI. + if (!use_cfi_diag && !is_ubsan_vptr && !is_ubsan_security) { + if (is_win) { + cflags_cc = [ "/GR-" ] + } else { + cflags_cc = [ "-fno-rtti" ] + cflags_objcc = cflags_cc + } + } +} + +# Warnings --------------------------------------------------------------------- + +# This will generate warnings when using Clang if code generates exit-time +# destructors, which will slow down closing the program. +# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600 +config("wexit_time_destructors") { + # TODO: Enable on Windows too, http://crbug.com/404525 + if (is_clang && !is_win) { + cflags = [ "-Wexit-time-destructors" ] + } +} + +# On Windows compiling on x64, VC will issue a warning when converting +# size_t to int because it will truncate the value. Our code should not have +# these warnings and one should use a static_cast or a checked_cast for the +# conversion depending on the case. However, a lot of code still needs to be +# fixed. Apply this config to such targets to disable the warning. +# +# Note that this can be applied regardless of platform and architecture to +# clean up the call sites. This will only apply the flag when necessary. +# +# TODO(jschuh): crbug.com/167187 fix this and delete this config. +config("no_size_t_to_int_warning") { + if (is_win && current_cpu == "x64") { + cflags = [ "/wd4267" ] + } +} + +# Some code presumes that pointers to structures/objects are compatible +# regardless of whether what they point to is already known to be valid. +# gcc 4.9 and earlier had no way of suppressing this warning without +# supressing the rest of them. Here we centralize the identification of +# the gcc 4.9 toolchains. +config("no_incompatible_pointer_warnings") { + cflags = [] + if (is_clang) { + cflags += [ "-Wno-incompatible-pointer-types" ] + } else if (current_cpu == "mipsel") { + cflags += [ "-w" ] + } else if (is_chromeos && current_cpu == "arm") { + cflags += [ "-w" ] + } +} + +# Optimization ----------------------------------------------------------------- +# +# The BUILDCONFIG file sets the "default_optimization" config on targets by +# default. It will be equivalent to either "optimize" (release) or +# "no_optimize" (debug) optimization configs. +# +# You can override the optimization level on a per-target basis by removing the +# default config and then adding the named one you want: +# +# configs -= [ "//build/config/compiler:default_optimization" ] +# configs += [ "//build/config/compiler:optimize_max" ] + +# Shared settings for both "optimize" and "optimize_max" configs. +# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags. +if (is_win) { + common_optimize_on_cflags = [ + "/Ob2", # Both explicit and auto inlining. + "/Oy-", # Disable omitting frame pointers, must be after /O2. + "/d2Zi+", # Improve debugging of optimized code. + "/Zc:inline", # Remove unreferenced COMDAT (faster links). + ] + if (!is_asan) { + common_optimize_on_cflags += [ + # Put data in separate COMDATs. This allows the linker + # to put bit-identical constants at the same address even if + # they're unrelated constants, which saves binary size. + # This optimization can't be used when ASan is enabled because + # it is not compatible with the ASan ODR checker. + "/Gw", + ] + } + common_optimize_on_ldflags = [ "/OPT:ICF" ] # Redundant COMDAT folding. + if (is_official_build) { + common_optimize_on_ldflags += [ + "/OPT:REF", # Remove unreferenced data. + "/LTCG", # Link-time code generation. + + # Set the number of LTCG code-gen threads to eight. The default is four. + # This gives a 5-10% link speedup. + "/cgthreads:8", + ] + if (full_wpo_on_official) { + arflags = [ "/LTCG" ] + } + } +} else { + common_optimize_on_cflags = [] + common_optimize_on_ldflags = [] + + if (is_android) { + # We don't omit frame pointers on arm64 since they are required + # to correctly unwind stackframes which contain system library + # function frames (crbug.com/391706). + if (!using_sanitizer && !enable_profiling && current_cpu != "arm64") { + common_optimize_on_cflags += [ "-fomit-frame-pointer" ] + } + + # TODO(jdduke) Re-enable on mips after resolving linking + # issues with libc++ (crbug.com/456380). + if (current_cpu != "mipsel" && current_cpu != "mips64el") { + common_optimize_on_ldflags += [ + # Warn in case of text relocations. + "-Wl,--warn-shared-textrel", + ] + } + } + + if (is_mac || is_ios) { + if (symbol_level == 2) { + # Mac dead code stripping requires symbols. + common_optimize_on_ldflags += [ "-Wl,-dead_strip" ] + } + } else { + # Non-Mac Posix flags. + + common_optimize_on_cflags += [ + # Don't emit the GCC version ident directives, they just end up in the + # .comment section taking up binary size. + "-fno-ident", + + # Put data and code in their own sections, so that unused symbols + # can be removed at link time with --gc-sections. + "-fdata-sections", + "-ffunction-sections", + ] + + common_optimize_on_ldflags += [ + # Specifically tell the linker to perform optimizations. + # See http://lwn.net/Articles/192624/ . + "-Wl,-O1", + "-Wl,--gc-sections", + ] + + if (!using_sanitizer) { + # Functions interposed by the sanitizers can make ld think + # that some libraries aren't needed when they actually are, + # http://crbug.com/234010. As workaround, disable --as-needed. + if (!is_nacl && !is_android) { + # TODO(pcc): Fix linker bug which requires us to link pthread + # unconditionally here (crbug.com/623236). + common_optimize_on_ldflags += [ + "-Wl,--no-as-needed", + "-lpthread", + ] + } + common_optimize_on_ldflags += [ "-Wl,--as-needed" ] + } + } +} + +# Default "optimization on" config. +config("optimize") { + if (is_win) { + # TODO(thakis): Remove is_clang here, https://crbug.com/598772 + if (is_official_build && full_wpo_on_official && !is_clang) { + common_optimize_on_cflags += [ + "/GL", # Whole program optimization. + + # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds. + # Probably anything that this would catch that wouldn't be caught in a + # normal build isn't going to actually be a bug, so the incremental + # value of C4702 for PGO builds is likely very small. + "/wd4702", + ] + } + + # Favor size over speed, /O1 must be before the common flags. The GYP + # build also specifies /Os and /GF but these are implied by /O1. + cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ] + } else if (optimize_for_size) { + # Favor size over speed. + cflags = [ "-Os" ] + common_optimize_on_cflags + } else { + cflags = [ "-O2" ] + common_optimize_on_cflags + } + ldflags = common_optimize_on_ldflags +} + +# Same config as 'optimize' but without the WPO flag. +config("optimize_no_wpo") { + if (is_win) { + # Favor size over speed, /O1 must be before the common flags. The GYP + # build also specifies /Os and /GF but these are implied by /O1. + cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ] + } else if (optimize_for_size) { + # Favor size over speed. + cflags = [ "-Os" ] + common_optimize_on_cflags + } else { + cflags = [ "-O2" ] + common_optimize_on_cflags + } + ldflags = common_optimize_on_ldflags +} + +# Turn off optimizations. +config("no_optimize") { + if (is_win) { + cflags = [ + "/Od", # Disable optimization. + "/Ob0", # Disable all inlining (on by default). + ] + } else if (is_android && !android_full_debug) { + # On Android we kind of optimize some things that don't affect debugging + # much even when optimization is disabled to get the binary size down. + cflags = [ + "-Os", + "-fdata-sections", + "-ffunction-sections", + ] + + # We don't omit frame pointers on arm64 since they are required + # to correctly unwind stackframes which contain system library + # function frames (crbug.com/391706). + if (!using_sanitizer && !enable_profiling && current_cpu != "arm64") { + cflags += [ "-fomit-frame-pointer" ] + } + + # Don't use gc-sections since it can cause links to succeed when they + # actually shouldn't. http://crbug.com/159847 + ldflags = common_optimize_on_ldflags - [ "-Wl,--gc-sections" ] + } else { + cflags = [ "-O0" ] + ldflags = [] + } +} + +# Turns up the optimization level. On Windows, this implies whole program +# optimization and link-time code generation which is very expensive and should +# be used sparingly. +config("optimize_max") { + if (is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # Various components do: + # if (!is_debug) { + # configs -= [ "//build/config/compiler:default_optimization" ] + # configs += [ "//build/config/compiler:optimize_max" ] + # } + # So this config has to have the selection logic just like + # "default_optimization", below. + configs = [ "//build/config/nacl:irt_optimize" ] + } else { + ldflags = common_optimize_on_ldflags + if (is_win) { + # Favor speed over size, /O2 must be before the common flags. The GYP + # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2. + cflags = [ "/O2" ] + common_optimize_on_cflags + + # TODO(thakis): Remove is_clang here, https://crbug.com/598772 + if (is_official_build && !is_clang) { + cflags += [ + "/GL", # Whole program optimization. + + # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds. + # Probably anything that this would catch that wouldn't be caught in a + # normal build isn't going to actually be a bug, so the incremental + # value of C4702 for PGO builds is likely very small. + "/wd4702", + ] + } + } else { + cflags = [ "-O2" ] + common_optimize_on_cflags + } + } +} + +# This config can be used to override the default settings for per-component +# and whole-program optimization, optimizing the particular target for speed +# instead of code size. This config is exactly the same as "optimize_max" +# except that we use -O3 instead of -O2 on non-win, non-IRT platforms. +# +# TODO(crbug.com/621335) - rework how all of these configs are related +# so that we don't need this disclaimer. +config("optimize_speed") { + if (is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # Various components do: + # if (!is_debug) { + # configs -= [ "//build/config/compiler:default_optimization" ] + # configs += [ "//build/config/compiler:optimize_max" ] + # } + # So this config has to have the selection logic just like + # "default_optimization", below. + configs = [ "//build/config/nacl:irt_optimize" ] + } else { + ldflags = common_optimize_on_ldflags + if (is_win) { + # Favor speed over size, /O2 must be before the common flags. The GYP + # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2. + cflags = [ "/O2" ] + common_optimize_on_cflags + + # TODO(thakis): Remove is_clang here, https://crbug.com/598772 + if (is_official_build && !is_clang) { + cflags += [ + "/GL", # Whole program optimization. + + # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds. + # Probably anything that this would catch that wouldn't be caught in a + # normal build isn't going to actually be a bug, so the incremental + # value of C4702 for PGO builds is likely very small. + "/wd4702", + ] + } + } else { + cflags = [ "-O3" ] + common_optimize_on_cflags + } + } +} + +# The default optimization applied to all targets. This will be equivalent to +# either "optimize" or "no_optimize", depending on the build flags. +config("default_optimization") { + if (is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # It gets optimized the same way regardless of the type of build. + configs = [ "//build/config/nacl:irt_optimize" ] + } else if (is_debug) { + configs = [ ":no_optimize" ] + } else { + configs = [ ":optimize" ] + } +} + +# Symbols ---------------------------------------------------------------------- + +# The BUILDCONFIG file sets the "default_symbols" config on targets by +# default. It will be equivalent to one the three specific symbol levels. +# +# You can override the symbol level on a per-target basis by removing the +# default config and then adding the named one you want: +# +# configs -= [ "//build/config/compiler:default_symbols" ] +# configs += [ "//build/config/compiler:symbols" ] + +# Full symbols. +config("symbols") { + if (is_win) { + import("//build/toolchain/goma.gni") + if (use_goma) { + # Disable symbols during goma compilation because otherwise the redundant + # debug information (repeated in every .obj file) makes linker memory + # consumption and link times unsustainable (crbug.com/630074). + cflags = [] + } else { + cflags = [ "/Zi" ] # Produce PDB file, no edit and continue. + } + if (is_win_fastlink && visual_studio_version != "2013") { + # Tell VS 2015+ to create a PDB that references debug + # information in .obj and .lib files instead of copying + # it all. This flag is incompatible with /PROFILE + ldflags = [ "/DEBUG:FASTLINK" ] + } else { + ldflags = [ "/DEBUG" ] + } + } else { + if (is_mac || is_ios) { + cflags = [ "-gdwarf-2" ] + if (is_mac && enable_dsyms) { + # If generating dSYMs, specify -fno-standalone-debug. This was + # originally specified for https://crbug.com/479841 because dsymutil + # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to + # version 7 also produces debug data that is incompatible with Breakpad + # dump_syms, so this is still required (https://crbug.com/622406). + cflags += [ "-fno-standalone-debug" ] + } + } else { + cflags = [ "-g2" ] + } + if (use_debug_fission) { + cflags += [ "-gsplit-dwarf" ] + } + asmflags = cflags + ldflags = [] + } +} + +# Minimal symbols. +config("minimal_symbols") { + if (is_win) { + # Linker symbols for backtraces only. + cflags = [] + if (is_win_fastlink && visual_studio_version != "2013") { + # Tell VS 2015+ to create a PDB that references debug + # information in .obj and .lib files instead of copying + # it all. This flag is incompatible with /PROFILE + ldflags = [ "/DEBUG:FASTLINK" ] + } else { + ldflags = [ "/DEBUG" ] + } + } else { + cflags = [ "-g1" ] + if (use_debug_fission) { + cflags += [ "-gsplit-dwarf" ] + } + asmflags = cflags + ldflags = [] + } +} + +# No symbols. +config("no_symbols") { + if (!is_win) { + cflags = [ "-g0" ] + asmflags = cflags + } +} + +# Default symbols. +config("default_symbols") { + if (symbol_level == 0) { + configs = [ ":no_symbols" ] + } else if (symbol_level == 1) { + configs = [ ":minimal_symbols" ] + } else if (symbol_level == 2) { + configs = [ ":symbols" ] + } else { + assert(false) + } +} + +if (is_ios || is_mac) { + # On Mac and iOS, this enables support for ARC (automatic ref-counting). + # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html. + config("enable_arc") { + common_flags = [ "-fobjc-arc" ] + cflags_objc = common_flags + cflags_objcc = common_flags + } +} diff --git a/build/config/compiler/compiler.gni b/build/config/compiler/compiler.gni new file mode 100644 index 00000000000..2e5072e2589 --- /dev/null +++ b/build/config/compiler/compiler.gni @@ -0,0 +1,96 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +declare_args() { + # How many symbols to include in the build. This affects the performance of + # the build since the symbols are large and dealing with them is slow. + # 2 means regular build with symbols. + # 1 means minimal symbols, usually enough for backtraces only. + # 0 means no symbols. + # -1 means auto-set according to debug/release and platform. + symbol_level = -1 + + # Compile in such a way as to enable profiling of the generated code. For + # example, don't omit the frame pointer and leave in symbols. + enable_profiling = false + + # use_debug_fission: whether to use split DWARF debug info + # files. This can reduce link time significantly, but is incompatible + # with some utilities such as icecc and ccache. Requires gold and + # gcc >= 4.8 or clang. + # http://gcc.gnu.org/wiki/DebugFission + # + # This is a placeholder value indicating that the code below should set + # the default. This is necessary to delay the evaluation of the default + # value expression until after its input values such as use_gold have + # been set, e.g. by a toolchain_args() block. + use_debug_fission = "default" + + # Tell VS to create a PDB that references information in .obj files rather + # than copying it all. This should improve linker performance. mspdbcmf.exe + # can be used to convert a fastlink pdb to a normal one. + is_win_fastlink = false + + # Specify the current PGO phase, only used for the Windows MSVS build. Here's + # the different values that can be used: + # 0 : Means that PGO is turned off. + # 1 : Used during the PGI (instrumentation) phase. + # 2 : Used during the PGO (optimization) phase. + # + # TODO(sebmarchand): Add support for the PGU (update) phase. + chrome_pgo_phase = 0 +} + +declare_args() { + # Whether or not the official builds should be built with full WPO. Enabled by + # default for the PGO and the x64 builds. + if (chrome_pgo_phase > 0) { + full_wpo_on_official = true + } else { + full_wpo_on_official = false + } +} + +declare_args() { + # Generate Syzygy optimized binaries. Syzygy optimize mode is a profile + # guided optimization that reorders code for better locality. + syzygy_optimize = is_win && target_cpu == "x86" && is_official_build && + !is_clang && !is_win_fastlink && !is_syzyasan +} + +declare_args() { + # Whether to use the gold linker from binutils instead of lld or bfd. + use_gold = !use_lld && !(is_chromecast && is_linux && + (current_cpu == "arm" || current_cpu == "mipsel")) && + ((is_linux && (current_cpu == "x64" || current_cpu == "x86" || + current_cpu == "arm" || current_cpu == "mipsel")) || + (is_android && (current_cpu == "x86" || current_cpu == "x64" || + current_cpu == "arm"))) +} + +# If it wasn't manually set, set to an appropriate default. +assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level") +if (symbol_level == -1) { + if (is_android && use_order_profiling) { + # With instrumentation enabled, debug info puts libchrome.so over 4gb, which + # causes the linker to produce an invalid ELF. http://crbug.com/574476 + symbol_level = 0 + } else if (!is_linux || is_debug || is_official_build || is_chromecast) { + # Linux is slowed by having symbols as part of the target binary, whereas + # Mac and Windows have them separate, so in Release Linux, default them off, + # but keep them on for Official builds and Chromecast builds. + symbol_level = 2 + } else if (using_sanitizer) { + # Sanitizers require symbols for filename suppressions to work. + symbol_level = 1 + } else { + symbol_level = 0 + } +} diff --git a/build/config/crypto.gni b/build/config/crypto.gni new file mode 100644 index 00000000000..0196e873e50 --- /dev/null +++ b/build/config/crypto.gni @@ -0,0 +1,23 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file declares build flags for the SSL library configuration. +# +# TODO(brettw) this should probably be moved to src/crypto or somewhere, and +# the global build dependency on it should be removed. +# +# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't +# required. See the declare_args block of BUILDCONFIG.gn for advice on how +# to set up feature flags. + +# True when we're using OpenSSL for representing certificates. When targeting +# Android, the platform certificate library is used for certificate +# verification. On NaCl, verification isn't supported. On other targets, this +# flag also enables OpenSSL for certificate verification, but this configuration +# is unsupported. +use_openssl_certs = is_android || is_nacl + +# True if NSS is used for certificate handling. It is possible to use OpenSSL +# for the crypto library, but NSS for the platform certificate library. +use_nss_certs = is_linux diff --git a/build/config/dcheck_always_on.gni b/build/config/dcheck_always_on.gni new file mode 100644 index 00000000000..60672cdf50e --- /dev/null +++ b/build/config/dcheck_always_on.gni @@ -0,0 +1,8 @@ +# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Set to true to enable dcheck in Release builds. + dcheck_always_on = false +} diff --git a/build/config/features.gni b/build/config/features.gni new file mode 100644 index 00000000000..d3e51f5a907 --- /dev/null +++ b/build/config/features.gni @@ -0,0 +1,189 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# These flags are effectively global. Your feature flag should go near the +# code it controls. Most of these items are here now because they control +# legacy global #defines passed to the compiler (now replaced with generated +# buildflag headers -- see //build/buildflag_header.gni). +# +# There is more advice on where to put build flags in the "Build flag" section +# of //build/config/BUILDCONFIG.gn. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +if (is_android) { + import("//build/config/android/config.gni") +} + +declare_args() { + # Multicast DNS. + enable_mdns = is_win || is_linux + + enable_extensions = !is_android && !is_ios + enable_plugins = (!is_android && !is_ios) || is_chromecast + enable_pdf = !is_android && !is_ios && !is_chromecast + + # Enables Native Client support. + # Temporarily disable nacl on arm64 linux to get rid of compilation errors. + # TODO(mcgrathr): When mipsel-nacl-clang is available, drop the exclusion. + enable_nacl = false + + # Non-SFI is not yet supported on mipsel + enable_nacl_nonsfi = current_cpu != "mipsel" + + # If debug_devtools is set to true, JavaScript files for DevTools are stored + # as is and loaded from disk. Otherwise, a concatenated file is stored in + # resources.pak. It is still possible to load JS files from disk by passing + # --debug-devtools cmdline switch. + debug_devtools = false + + # Enables WebRTC. + enable_webrtc = !is_ios + + # Enables the Media Router. + enable_media_router = !is_ios && !is_chromecast + + # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4. + # We always build Google Chrome and Chromecast with proprietary codecs. + proprietary_codecs = true + + enable_captive_portal_detection = !is_android && !is_ios && !is_chromecast + + # Enables use of the session service, which is enabled by default. + # Android stores them separately on the Java side. + enable_session_service = !is_android && !is_ios && !is_chromecast + + enable_plugin_installation = false + + enable_app_list = is_chromeos + + enable_supervised_users = !is_ios && !is_chromecast + + enable_remoting = false + + # Enable hole punching for the protected video. + enable_video_hole = is_android && !is_chromecast + + # Enables browser side Content Decryption Modules. Required for embedders + # (e.g. Android and ChromeCast) that use a browser side CDM. + enable_browser_cdms = is_android || is_chromecast + + # Hangout services is an extension that adds extra features to Hangouts. + # For official GYP builds, this flag is set. + enable_hangout_services_extension = false + + # Variable safe_browsing is used to control the build time configuration for + # safe browsing feature. Safe browsing can be compiled in 3 different levels: + # 0 disables it, 1 enables it fully, and 2 enables mobile protection via an + # external API. + if (is_android) { + safe_browsing_mode = 2 + } else if (is_ios || is_chromecast) { + safe_browsing_mode = 0 + } else { + safe_browsing_mode = 0 + } + + # Set to true make a build that disables activation of field trial tests + # specified in testing/variations/fieldtrial_testing_config_*.json. + # Note: this setting is ignored if is_chrome_branded. + fieldtrial_testing_like_official_build = is_chrome_branded + + use_cups = (is_desktop_linux || is_mac) && !is_chromecast + + # Enables Wi-Fi Display functionality + # WARNING: This enables MPEG Transport Stream (MPEG-TS) encoding! + enable_wifi_display = false + + # libudev usage. This currently only affects the content layer. + use_udev = is_linux && !is_chromecast + + use_dbus = is_linux && !is_chromecast + + # Option controlling the use of GConf (the classic GNOME configuration + # system). + use_gconf = is_linux && !is_chromeos && !is_chromecast + + use_gio = is_linux && !is_chromeos && !is_chromecast + + # Enable basic printing support and UI. + enable_basic_printing = !is_chromecast && !is_ios + + # Enable printing with print preview. It does not imply + # enable_basic_printing. It's possible to build Chrome with preview only. + enable_print_preview = !is_android && !is_chromecast && !is_ios + + # Whether or not to use external popup menu. + use_external_popup_menu = is_android || is_mac + + # Enable WebVR support by default on Android + # Still requires command line flag to access API + enable_webvr = is_android && is_component_build && + (current_cpu == "x86" || current_cpu == "x64" || + current_cpu == "arm" || current_cpu == "arm64") +} + +# Additional dependent variables ----------------------------------------------- + +# Enable the spell checker. +enable_spellcheck = !is_ios + +# Use the operating system's spellchecker rather than hunspell. +use_browser_spellchecker = is_android || is_mac + +# Use Minikin hyphenation engine. +use_minikin_hyphenation = is_android + +# Enables the use of CDMs in pepper plugins. +enable_pepper_cdms = + enable_plugins && (is_linux || is_mac || is_win) && !is_chromecast + +# The seccomp-bpf sandbox is only supported on five architectures +# currently. +# Do not disable seccomp_bpf anywhere without talking to +# security@chromium.org! +use_seccomp_bpf = + (is_linux || is_android) && + (current_cpu == "x86" || current_cpu == "x64" || current_cpu == "arm" || + current_cpu == "arm64" || current_cpu == "mipsel") + +# Enable notifications everywhere except iOS. +enable_notifications = !is_ios + +enable_web_speech = !is_android && !is_ios + +enable_task_manager = !is_ios && !is_android + +enable_themes = !is_android && !is_ios + +# Whether we are using the rlz library or not. Platforms like Android send +# rlz codes for searches but do not use the library. +enable_rlz_support = is_win || is_mac || is_ios || is_chromeos +enable_rlz = is_chrome_branded && enable_rlz_support + +enable_service_discovery = enable_mdns || is_mac + +# Image loader extension is enabled on ChromeOS only. +enable_image_loader_extension = is_chromeos + +# Chrome OS: whether to also build the upcoming version of +# ChromeVox, which can then be enabled via a command-line switch. +enable_chromevox_next = false + +# Use brlapi from brltty for braille display support. +use_brlapi = is_chromeos + +enable_configuration_policy = !is_ios + +enable_mac_keystone = is_mac && is_chrome_branded && is_official_build +# +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# See comment at the top. diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn new file mode 100644 index 00000000000..422ec0e92af --- /dev/null +++ b/build/config/gcc/BUILD.gn @@ -0,0 +1,123 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/toolchain.gni") + +# This config causes functions not to be automatically exported from shared +# libraries. By default, all symbols are exported but this means there are +# lots of exports that slow everything down. In general we explicitly mark +# which functiosn we want to export from components. +# +# Some third_party code assumes all functions are exported so this is separated +# into its own config so such libraries can remove this config to make symbols +# public again. +# +# See http://gcc.gnu.org/wiki/Visibility +config("symbol_visibility_hidden") { + # Note that -fvisibility-inlines-hidden is set globally in the compiler + # config since that can almost always be applied. + cflags = [ "-fvisibility=hidden" ] +} + +# This config is usually set when :symbol_visibility_hidden is removed. +# It's often a good idea to set visibility explicitly, as there're flags +# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast) +config("symbol_visibility_default") { + cflags = [ "-fvisibility=default" ] +} + +# The rpath is the dynamic library search path. Setting this config on a link +# step will put the directory where the build generates shared libraries into +# the rpath. +# +# It's important that this *not* be used for release builds we push out. +# Chrome uses some setuid binaries, and hard links preserve setuid bits. An +# unprivileged user could gain root privileges by hardlinking a setuid +# executable and then adding in whatever binaries they want to run into the lib +# directory. +# +# Example bug: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=520126 +# +# This is required for component builds since the build generates many shared +# libraries in the build directory that we expect to be automatically loaded. +# It will be automatically applied in this case by :executable_ldconfig. +# +# In non-component builds, certain test binaries may expect to load dynamic +# libraries from the current directory. As long as these aren't distributed, +# this is OK. For these cases use something like this: +# +# if (is_linux && !is_component_build) { +# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ] +# } +config("rpath_for_built_shared_libraries") { + if (!is_android) { + # Note: Android doesn't support rpath. + if (shlib_subdir != ".") { + rpath_link = "${shlib_subdir}/" + } else { + rpath_link = "." + } + ldflags = [ + # Want to pass "\$". GN will re-escape as required for ninja. + "-Wl,-rpath=\$ORIGIN/${rpath_link}", + "-Wl,-rpath-link=${rpath_link}", + ] + } +} + +# Settings for executables. +config("executable_ldconfig") { + # WARNING! //sandbox/linux:chrome_sandbox will not pick up this + # config, because it is a setuid binary that needs special flags. + # If you add things to this config, make sure you check to see + # if they should be added to that target as well. + ldflags = [] + if (is_android) { + ldflags += [ + "-Bdynamic", + "-Wl,-z,nocopyreloc", + ] + } else { + # See the rpath_for... config above for why this is necessary for component + # builds. Sanitizers use a custom libc++ where this is also necessary. + if (is_component_build || using_sanitizer) { + configs = [ ":rpath_for_built_shared_libraries" ] + } + if (current_cpu == "mipsel") { + ldflags += [ "-pie" ] + } + } + + if (!is_android || !use_gold) { + # Find the path containing shared libraries for this toolchain + # relative to the build directory. ${root_out_dir} will be a + # subdirectory of ${root_build_dir} when cross compiling. + _rpath_link = rebase_path(root_out_dir, root_build_dir) + if (shlib_subdir != ".") { + _rpath_link += "/$shlib_subdir" + } + if (is_android) { + _rebased_sysroot = rebase_path(sysroot, root_build_dir) + _rpath_link += ":$_rebased_sysroot/usr/lib" + } + + ldflags += [ + "-Wl,-rpath-link=$_rpath_link", + + # TODO(GYP): Do we need a check on the binutils version here? + # + # Newer binutils don't set DT_RPATH unless you disable "new" dtags + # and the new DT_RUNPATH doesn't work without --no-as-needed flag. + "-Wl,--disable-new-dtags", + ] + } +} + +config("no_exceptions") { + cflags_cc = [ "-fno-exceptions" ] + cflags_objcc = cflags_cc +} diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn new file mode 100644 index 00000000000..0dda3e52631 --- /dev/null +++ b/build/config/ios/BUILD.gn @@ -0,0 +1,66 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_sdk.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/toolchain.gni") + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is iOS-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + common_flags = [ + "-isysroot", + sysroot, + + "-stdlib=libc++", + ] + + if (use_ios_simulator) { + common_flags += [ "-mios-simulator-version-min=$ios_deployment_target" ] + } else { + common_flags += [ "-miphoneos-version-min=$ios_deployment_target" ] + } + + asmflags = common_flags + cflags = common_flags + ldflags = common_flags + + # TODO(crbug.com/634373): Remove once Xcode's libc++ has LLVM r256325. Most + # likely this means one Xcode 8 is released and required. + if (use_xcode_clang && get_path_info(ios_sdk_version, "name") != "10") { + common_cc_flags = [ + "-isystem", + rebase_path("//third_party/llvm-build/Release+Asserts/include/c++/v1", + root_build_dir), + ] + + cflags_cc = common_cc_flags + cflags_objcc = common_cc_flags + } +} + +config("ios_dynamic_flags") { + ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and class. +} + +config("xctest_config") { + common_flags = [ + "-F", + "$ios_sdk_platform_path/Developer/Library/Frameworks", + ] + + cflags = common_flags + ldflags = common_flags + + libs = [ + "Foundation.framework", + "XCTest.framework", + ] +} + +group("xctest") { + public_configs = [ ":xctest_config" ] +} diff --git a/build/config/ios/BuildInfo.plist b/build/config/ios/BuildInfo.plist new file mode 100644 index 00000000000..3595e5aefbb --- /dev/null +++ b/build/config/ios/BuildInfo.plist @@ -0,0 +1,35 @@ + + + + + BuildMachineOSBuild + ${BUILD_MACHINE_OS_BUILD} + CFBundleSupportedPlatforms + + ${IOS_SUPPORTED_PLATFORM} + + DTCompiler + ${GCC_VERSION} + DTPlatformName + ${IOS_PLATFORM_NAME} + DTPlatformVersion + ${IOS_PLATFORM_VERSION} + DTPlatformBuild + ${IOS_PLATFORM_BUILD} + DTSDKBuild + ${IOS_SDK_BUILD} + DTSDKName + ${IOS_SDK_NAME} + MinimumOSVersion + ${IOS_DEPLOYMENT_TARGET} + DTXcode + ${XCODE_VERSION} + DTXcodeBuild + ${XCODE_BUILD} + UIDeviceFamily + + 1 + 2 + + + diff --git a/build/config/ios/Host-Info.plist b/build/config/ios/Host-Info.plist new file mode 100644 index 00000000000..c306faad217 --- /dev/null +++ b/build/config/ios/Host-Info.plist @@ -0,0 +1,126 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleDisplayName + ${PRODUCT_NAME} + CFBundleExecutable + ${EXECUTABLE_NAME} + CFBundleIdentifier + org.chromium.test.ios_web_shell_test_host + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + ${PRODUCT_NAME} + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1.0 + LSRequiresIPhoneOS + + NSAppTransportSecurity + + NSAllowsArbitraryLoads + + + UIRequiredDeviceCapabilities + + armv7 + + UILaunchImages + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {320, 480} + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {320, 568} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {375, 667} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {414, 736} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Landscape + UILaunchImageSize + {414, 736} + + + UILaunchImages~ipad + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {768, 1024} + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Landscape + UILaunchImageSize + {768, 1024} + + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/build/config/ios/Module-Info.plist b/build/config/ios/Module-Info.plist new file mode 100644 index 00000000000..6131774765c --- /dev/null +++ b/build/config/ios/Module-Info.plist @@ -0,0 +1,24 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleExecutable + ${EXECUTABLE_NAME} + CFBundleIdentifier + org.chromium.test.ios_web_shell_test_host.ios_web_shell_test + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + ${PRODUCT_NAME} + CFBundlePackageType + BNDL + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1 + + diff --git a/build/config/ios/OWNERS b/build/config/ios/OWNERS new file mode 100644 index 00000000000..0e726c6a2c2 --- /dev/null +++ b/build/config/ios/OWNERS @@ -0,0 +1 @@ +file://build/config/mac/OWNERS diff --git a/build/config/ios/codesign.py b/build/config/ios/codesign.py new file mode 100644 index 00000000000..3d66b246837 --- /dev/null +++ b/build/config/ios/codesign.py @@ -0,0 +1,364 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import fnmatch +import glob +import os +import plistlib +import shutil +import subprocess +import sys +import tempfile + + +def GetProvisioningProfilesDir(): + """Returns the location of the installed mobile provisioning profiles. + + Returns: + The path to the directory containing the installed mobile provisioning + profiles as a string. + """ + return os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + + +def LoadPlistFile(plist_path): + """Loads property list file at |plist_path|. + + Args: + plist_path: path to the property list file to load. + + Returns: + The content of the property list file as a python object. + """ + return plistlib.readPlistFromString(subprocess.check_output([ + 'xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path])) + + +class Bundle(object): + """Wraps a bundle.""" + + def __init__(self, bundle_path): + """Initializes the Bundle object with data from bundle Info.plist file.""" + self._path = bundle_path + self._data = LoadPlistFile(os.path.join(self._path, 'Info.plist')) + + @property + def path(self): + return self._path + + @property + def identifier(self): + return self._data['CFBundleIdentifier'] + + @property + def binary_path(self): + return os.path.join(self._path, self._data['CFBundleExecutable']) + + +class ProvisioningProfile(object): + """Wraps a mobile provisioning profile file.""" + + def __init__(self, provisioning_profile_path): + """Initializes the ProvisioningProfile with data from profile file.""" + self._path = provisioning_profile_path + self._data = plistlib.readPlistFromString(subprocess.check_output([ + 'xcrun', 'security', 'cms', '-D', '-i', provisioning_profile_path])) + + @property + def path(self): + return self._path + + @property + def application_identifier_pattern(self): + return self._data.get('Entitlements', {}).get('application-identifier', '') + + @property + def team_identifier(self): + return self._data.get('TeamIdentifier', [''])[0] + + @property + def entitlements(self): + return self._data.get('Entitlements', {}) + + def ValidToSignBundle(self, bundle_identifier): + """Checks whether the provisioning profile can sign bundle_identifier. + + Args: + bundle_identifier: the identifier of the bundle that needs to be signed. + + Returns: + True if the mobile provisioning profile can be used to sign a bundle + with the corresponding bundle_identifier, False otherwise. + """ + return fnmatch.fnmatch( + '%s.%s' % (self.team_identifier, bundle_identifier), + self.application_identifier_pattern) + + def Install(self, bundle): + """Copies mobile provisioning profile info the bundle.""" + installation_path = os.path.join(bundle.path, 'embedded.mobileprovision') + shutil.copy2(self.path, installation_path) + + +class Entitlements(object): + """Wraps an Entitlement plist file.""" + + def __init__(self, entitlements_path): + """Initializes Entitlements object from entitlement file.""" + self._path = entitlements_path + self._data = LoadPlistFile(self._path) + + @property + def path(self): + return self._path + + def ExpandVariables(self, substitutions): + self._data = self._ExpandVariables(self._data, substitutions) + + def _ExpandVariables(self, data, substitutions): + if isinstance(data, str): + for key, substitution in substitutions.iteritems(): + data = data.replace('$(%s)' % (key,), substitution) + return data + + if isinstance(data, dict): + for key, value in data.iteritems(): + data[key] = self._ExpandVariables(value, substitutions) + return data + + if isinstance(data, list): + for i, value in enumerate(data): + data[i] = self._ExpandVariables(value, substitutions) + + return data + + def LoadDefaults(self, defaults): + for key, value in defaults.iteritems(): + if key not in self._data: + self._data[key] = value + + def WriteTo(self, target_path): + plistlib.writePlist(self._data, target_path) + + +def FindProvisioningProfile(bundle_identifier, required): + """Finds mobile provisioning profile to use to sign bundle. + + Args: + bundle_identifier: the identifier of the bundle to sign. + + Returns: + The ProvisioningProfile object that can be used to sign the Bundle + object or None if no matching provisioning profile was found. + """ + provisioning_profile_paths = glob.glob( + os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision')) + + # Iterate over all installed mobile provisioning profiles and filter those + # that can be used to sign the bundle. + valid_provisioning_profiles = [] + for provisioning_profile_path in provisioning_profile_paths: + provisioning_profile = ProvisioningProfile(provisioning_profile_path) + if provisioning_profile.ValidToSignBundle(bundle_identifier): + valid_provisioning_profiles.append(provisioning_profile) + + if not valid_provisioning_profiles: + if required: + sys.stderr.write( + 'No mobile provisioning profile found for "%s".\n' % + bundle_identifier) + sys.exit(1) + return None + + # Select the most specific mobile provisioning profile, i.e. the one with + # the longest application identifier pattern. + return max( + valid_provisioning_profiles, + key=lambda p: len(p.application_identifier_pattern)) + + +def CodeSignBundle(bundle_path, identity, extra_args): + process = subprocess.Popen(['xcrun', 'codesign', '--force', '--sign', + identity, '--timestamp=none'] + list(extra_args) + [bundle_path], + stderr=subprocess.PIPE) + _, stderr = process.communicate() + if process.returncode: + sys.stderr.write(stderr) + sys.exit(process.returncode) + for line in stderr.splitlines(): + if line.endswith(': replacing existing signature'): + # Ignore warning about replacing existing signature as this should only + # happen when re-signing system frameworks (and then it is expected). + continue + sys.stderr.write(line) + sys.stderr.write('\n') + + +def InstallSystemFramework(framework_path, bundle_path, args): + """Install framework from |framework_path| to |bundle| and code-re-sign it.""" + installed_framework_path = os.path.join( + bundle_path, 'Frameworks', os.path.basename(framework_path)) + + if os.path.exists(installed_framework_path): + shutil.rmtree(installed_framework_path) + + shutil.copytree(framework_path, installed_framework_path) + CodeSignBundle(installed_framework_path, args.identity, + ['--deep', '--preserve-metadata=identifier,entitlements']) + + +def GenerateEntitlements(path, provisioning_profile, bundle_identifier): + """Generates an entitlements file. + + Args: + path: path to the entitlements template file + provisioning_profile: ProvisioningProfile object to use, may be None + bundle_identifier: identifier of the bundle to sign. + """ + entitlements = Entitlements(path) + if provisioning_profile: + entitlements.LoadDefaults(provisioning_profile.entitlements) + app_identifier_prefix = provisioning_profile.team_identifier + '.' + else: + app_identifier_prefix = '*.' + entitlements.ExpandVariables({ + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + }) + return entitlements + + +class Action(object): + + """Class implementing one action supported by the script.""" + + @classmethod + def Register(cls, subparsers): + parser = subparsers.add_parser(cls.name, help=cls.help) + parser.set_defaults(func=cls._Execute) + cls._Register(parser) + + +class CodeSignBundleAction(Action): + + """Class implementing the code-sign-bundle action.""" + + name = 'code-sign-bundle' + help = 'perform code signature for a bundle' + + @staticmethod + def _Register(parser): + parser.add_argument( + '--entitlements', '-e', dest='entitlements_path', + help='path to the entitlements file to use') + parser.add_argument( + 'path', help='path to the iOS bundle to codesign') + parser.add_argument( + '--identity', '-i', required=True, + help='identity to use to codesign') + parser.add_argument( + '--binary', '-b', required=True, + help='path to the iOS bundle binary') + parser.add_argument( + '--framework', '-F', action='append', default=[], dest='frameworks', + help='install and resign system framework') + parser.add_argument( + '--disable-code-signature', action='store_false', dest='sign', + help='disable code signature') + parser.add_argument( + '--platform', '-t', required=True, + help='platform the signed bundle is targetting') + parser.set_defaults(sign=True) + + @staticmethod + def _Execute(args): + if not args.identity: + args.identity = '-' + + bundle = Bundle(args.path) + + # Find mobile provisioning profile and embeds it into the bundle (if a code + # signing identify has been provided, fails if no valid mobile provisioning + # is found). + provisioning_profile_required = args.identity != '-' + provisioning_profile = FindProvisioningProfile( + bundle.identifier, provisioning_profile_required) + if provisioning_profile and args.platform != 'iphonesimulator': + provisioning_profile.Install(bundle) + + # Delete existing code signature. + signature_file = os.path.join(args.path, '_CodeSignature', 'CodeResources') + if os.path.isfile(signature_file): + os.unlink(signature_file) + + # Install system frameworks if requested. + for framework_path in args.frameworks: + InstallSystemFramework(framework_path, args.path, args) + + # Copy main binary into bundle. + if os.path.isfile(bundle.binary_path): + os.unlink(bundle.binary_path) + shutil.copy(args.binary, bundle.binary_path) + + if not args.sign: + return + + # Embeds entitlements into the code signature (if code signing identify has + # been provided). + codesign_extra_args = [] + if provisioning_profile and args.platform != 'iphonesimulator': + temporary_entitlements_file = tempfile.NamedTemporaryFile(suffix='.xcent') + codesign_extra_args.extend( + ['--entitlements', temporary_entitlements_file.name]) + + entitlements = GenerateEntitlements( + args.entitlements_path, provisioning_profile, bundle.identifier) + entitlements.WriteTo(temporary_entitlements_file.name) + + CodeSignBundle(bundle.path, args.identity, codesign_extra_args) + + +class GenerateEntitlementsAction(Action): + + """Class implementing the generate-entitlements action.""" + + name = 'generate-entitlements' + help = 'generate entitlements file' + + @staticmethod + def _Register(parser): + parser.add_argument( + '--entitlements', '-e', dest='entitlements_path', + help='path to the entitlements file to use') + parser.add_argument( + 'path', help='path to the entitlements file to generate') + parser.add_argument( + '--info-plist', '-p', required=True, + help='path to the bundle Info.plist') + + @staticmethod + def _Execute(args): + info_plist = LoadPlistFile(args.info_plist) + bundle_identifier = info_plist['CFBundleIdentifier'] + provisioning_profile = FindProvisioningProfile(bundle_identifier, False) + entitlements = GenerateEntitlements( + args.entitlements_path, provisioning_profile, bundle_identifier) + entitlements.WriteTo(args.path) + + +def Main(): + parser = argparse.ArgumentParser('codesign iOS bundles') + subparsers = parser.add_subparsers() + + for action in [ CodeSignBundleAction, GenerateEntitlementsAction ]: + action.Register(subparsers) + + args = parser.parse_args() + args.func(args) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/build/config/ios/entitlements.plist b/build/config/ios/entitlements.plist new file mode 100644 index 00000000000..429762e3a3f --- /dev/null +++ b/build/config/ios/entitlements.plist @@ -0,0 +1,12 @@ + + + + + application-identifier + $(AppIdentifierPrefix)$(CFBundleIdentifier) + keychain-access-groups + + $(AppIdentifierPrefix)$(CFBundleIdentifier) + + + diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py new file mode 100644 index 00000000000..9c3d493d541 --- /dev/null +++ b/build/config/ios/find_signing_identity.py @@ -0,0 +1,35 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import subprocess +import sys +import re + +def ListIdentities(): + return subprocess.check_output([ + '/usr/bin/env', + 'xcrun', + 'security', + 'find-identity', + '-v', + '-p', + 'codesigning', + ]).strip() + + +def FindValidIdentity(): + lines = ListIdentities().splitlines() + # Look for something like "2) XYZ "iPhone Developer: Name (ABC)"" + exp = re.compile('[0-9]+\) ([A-F0-9]+) "([^"]*)"') + for line in lines: + res = exp.match(line) + if res is None: + continue + if "iPhone Developer" in res.group(2): + return res.group(1) + return "" + + +if __name__ == '__main__': + print FindValidIdentity() diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni new file mode 100644 index 00000000000..0a3cded59cf --- /dev/null +++ b/build/config/ios/ios_sdk.gni @@ -0,0 +1,106 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # SDK path to use. When empty this will use the default SDK based on the + # value of use_ios_simulator. + ios_sdk_path = "" + ios_sdk_name = "" + ios_sdk_version = "" + ios_sdk_platform = "" + ios_sdk_platform_path = "" + xcode_version = "" + xcode_build = "" + machine_os_build = "" + + use_ios_simulator = current_cpu == "x86" || current_cpu == "x64" + + # Version of iOS that we're targeting. + ios_deployment_target = "9.0" + + # The iOS Code signing identity to use + # TODO(GYP), TODO(sdfresne): Consider having a separate + # ios_enable_code_signing_flag= flag to make the invocation clearer. + ios_enable_code_signing = true + ios_code_signing_identity = "" + + # If non-empty, this list must contain valid cpu architecture, and the final + # build will be a multi-architecture build (aka fat build) supporting the + # main $target_cpu architecture and all of $additional_target_cpus. + # + # For example to build an application that will run on both arm64 and armv7 + # devices, you would use the following in args.gn file when running "gn args": + # + # target_os = "ios" + # target_cpu = "arm64" + # additional_target_cpus = [ "arm" ] + # + # You can also pass the value via "--args" parameter for "gn gen" command by + # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'. + additional_target_cpus = [] +} + +assert(custom_toolchain == "" || additional_target_cpus == [], + "cannot define both custom_toolchain and additional_target_cpus") + +# Initialize additional_toolchains from additional_target_cpus. Assert here +# that the list does not contains $target_cpu nor duplicates as this would +# cause weird errors during the build. +additional_toolchains = [] +if (additional_target_cpus != []) { + foreach(_additional_target_cpu, additional_target_cpus) { + assert(_additional_target_cpu != target_cpu, + "target_cpu must not be listed in additional_target_cpus") + + _toolchain = "//build/toolchain/mac:ios_clang_$_additional_target_cpu" + foreach(_additional_toolchain, additional_toolchains) { + assert(_toolchain != _additional_toolchain, + "additional_target_cpus must not contains duplicate values") + } + + additional_toolchains += [ _toolchain ] + } +} + +if (ios_sdk_path == "") { + # Compute default target. + if (use_ios_simulator) { + ios_sdk_name = "iphonesimulator" + ios_sdk_platform = "iPhoneSimulator" + } else { + ios_sdk_name = "iphoneos" + ios_sdk_platform = "iPhoneOS" + } + _ios_sdk_result = + exec_script("//build/config/mac/sdk_info.py", [ ios_sdk_name ], "scope") + ios_sdk_path = _ios_sdk_result.sdk_path + ios_sdk_version = _ios_sdk_result.sdk_version + ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path + ios_sdk_build = _ios_sdk_result.sdk_build + xcode_version = _ios_sdk_result.xcode_version + xcode_build = _ios_sdk_result.xcode_build + machine_os_build = _ios_sdk_result.machine_os_build + if (use_ios_simulator) { + # This is weird, but Xcode sets DTPlatformBuild to an empty field for + # simulator builds. + ios_platform_build = "" + } else { + ios_platform_build = ios_sdk_build + } +} + +if (ios_enable_code_signing && !use_ios_simulator) { + # If an identity is not provided, look for one on the host + if (ios_code_signing_identity == "") { + _ios_identities = exec_script("find_signing_identity.py", [], "list lines") + ios_code_signing_identity = _ios_identities[0] + } + + if (ios_code_signing_identity == "") { + print("Tried to prepare a device build without specifying a code signing") + print("identity and could not detect one automatically either.") + print("TIP: Simulator builds don't require code signing...") + assert(false) + } +} diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni new file mode 100644 index 00000000000..71cd75063c5 --- /dev/null +++ b/build/config/ios/rules.gni @@ -0,0 +1,1364 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_sdk.gni") +import("//build/config/mac/base_rules.gni") +import("//build/config/mac/symbols.gni") + +# Control whether an intermediate source_set is used when building executables +# and shared_libraries for ios_app_bundle and ios_framework_bundle. This is a +# temporary flag that will be removed once scoped_nsobject_unittest{_arc}.mm +# tests are passing with this flag set to true (see crbug.com/637065) +_use_intermediate_source_set = false + +# Invokes lipo on multiple arch-specific binaries to create a fat binary. +# +# Arguments +# +# arch_binary_target +# name of the target generating the arch-specific binaries, they must +# be named $target_out_dir/$toolchain_cpu/$arch_binary_output. +# +# arch_binary_output +# (optional, defaults to the name of $arch_binary_target) base name of +# the arch-specific binary generated by arch_binary_target. +# +# output_name +# (optional, defaults to $target_name) base name of the target output, +# the full path will be $target_out_dir/$output_name. +# +# configs +# (optional) a list of configurations, this is used to check whether +# the binary should be stripped, when "enable_stripping" is true. +# +template("lipo_binary") { + assert(defined(invoker.arch_binary_target), + "arch_binary_target must be defined for $target_name") + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _all_target_cpu = [ current_cpu ] + additional_target_cpus + _all_toolchains = [ current_toolchain ] + additional_toolchains + + _arch_binary_target = invoker.arch_binary_target + _arch_binary_output = get_label_info(_arch_binary_target, "name") + if (defined(invoker.arch_binary_output)) { + _arch_binary_output = invoker.arch_binary_output + } + + action(_target_name) { + forward_variables_from(invoker, + "*", + [ + "arch_binary_output", + "arch_binary_target", + "configs", + "output_name", + ]) + + script = "//build/toolchain/mac/linker_driver.py" + + outputs = [ + "$target_out_dir/$_output_name", + ] + + deps = [] + _index = 0 + inputs = [] + foreach(_cpu, _all_target_cpu) { + _toolchain = _all_toolchains[_index] + _index = _index + 1 + + inputs += + [ get_label_info("$_arch_binary_target($_toolchain)", + "target_out_dir") + "/$_cpu/$_arch_binary_output" ] + + deps += [ "$_arch_binary_target($_toolchain)" ] + } + + args = [ + "xcrun", + "lipo", + "-create", + "-output", + rebase_path("$target_out_dir/$_output_name", root_build_dir), + ] + rebase_path(inputs, root_build_dir) + + if (enable_dsyms) { + _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM" + outputs += [ + "$_dsyms_output_dir/", + "$_dsyms_output_dir/Contents/Info.plist", + "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name", + ] + args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ] + } + + if (enable_stripping) { + # Check whether //build/config/mac:strip_all has been removed from the + # configs variables (as this is how stripping is disabled for a single + # target). + _strip_all_in_config = false + if (defined(invoker.configs)) { + foreach(_config, invoker.configs) { + if (_config == "//build/config/mac:strip_all") { + _strip_all_in_config = true + } + } + } + + if (_strip_all_in_config) { + args += [ "-Wcrl,strip,-x,-S" ] + if (save_unstripped_output) { + outputs += [ "$root_out_dir/$_output_name.unstripped" ] + args += [ "-Wcrl,unstripped," + + rebase_path("$root_out_dir/.", root_build_dir) ] + } + } + } + } +} + +# Wrapper around create_bundle taking care of code signature settings. +# +# Arguments +# +# product_type +# string, product type for the generated Xcode project. +# +# bundle_deps +# (optional) list of additional dependencies +# +# bundle_deps_filter +# (optional) list of dependencies to filter (for more information +# see "gn help bundle_deps_filter") +# +# bundle_extension +# string, extension of the bundle, used to generate bundle name. +# +# bundle_binary_target +# string, label of the target generating the bundle main binary. +# +# bundle_binary_output +# (optional) string, base name of the binary generated by the +# bundle_binary_target target, defaults to the target name. +# +# extra_system_frameworks +# (optional) list of system framework to copy to the bundle. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +template("create_signed_bundle") { + assert(defined(invoker.product_type), + "product_type must be defined for $target_name") + assert(defined(invoker.bundle_extension), + "bundle_extension must be defined for $target_name") + assert(defined(invoker.bundle_binary_target), + "bundle_binary_target must be defined for $target_name") + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _bundle_binary_target = invoker.bundle_binary_target + _bundle_binary_output = get_label_info(_bundle_binary_target, "name") + if (defined(invoker.bundle_binary_output)) { + _bundle_binary_output = invoker.bundle_binary_output + } + + _bundle_extension = invoker.bundle_extension + _bundle_root_dir = "$root_out_dir/$_output_name$_bundle_extension" + + _entitlements_path = "//build/config/ios/entitlements.plist" + if (defined(invoker.entitlements_path)) { + _entitlements_path = invoker.entitlements_path + } + + _enable_code_signing = ios_enable_code_signing + if (defined(invoker.enable_code_signing)) { + _enable_code_signing = invoker.enable_code_signing + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_deps_filter", + "data_deps", + "deps", + "product_type", + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + + bundle_root_dir = _bundle_root_dir + bundle_resources_dir = _bundle_root_dir + bundle_executable_dir = _bundle_root_dir + bundle_plugins_dir = "$_bundle_root_dir/PlugIns" + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ _bundle_binary_target ] + + if (defined(invoker.bundle_deps)) { + if (!defined(deps)) { + deps = [] + } + deps += invoker.bundle_deps + } + + code_signing_script = "//build/config/ios/codesign.py" + code_signing_sources = [ + _entitlements_path, + get_label_info(_bundle_binary_target, "target_out_dir") + + "/$_bundle_binary_output", + ] + code_signing_outputs = [ "$_bundle_root_dir/$_output_name" ] + if (_enable_code_signing) { + code_signing_outputs += + [ "$_bundle_root_dir/_CodeSignature/CodeResources" ] + } + if (ios_code_signing_identity != "" && ios_sdk_name != "iphonesimulator") { + code_signing_outputs += [ "$_bundle_root_dir/embedded.mobileprovision" ] + } + + if (defined(invoker.extra_system_frameworks)) { + foreach(_framework, invoker.extra_system_frameworks) { + code_signing_outputs += [ "$bundle_root_dir/Frameworks/" + + get_path_info(_framework, "file") ] + } + } + + code_signing_args = [ + "code-sign-bundle", + "-t=" + ios_sdk_name, + "-i=" + ios_code_signing_identity, + "-e=" + rebase_path(_entitlements_path, root_build_dir), + "-b=" + rebase_path("$target_out_dir/$_output_name", root_build_dir), + rebase_path(bundle_root_dir, root_build_dir), + ] + if (!_enable_code_signing) { + code_signing_args += [ "--disable-code-signature" ] + } + if (defined(invoker.extra_system_frameworks)) { + # All framework in extra_system_frameworks are expected to be + # system framework and the path to be already system absolute + # so do not use rebase_path here. + foreach(_framework, invoker.extra_system_frameworks) { + code_signing_args += [ "-F=" + _framework ] + } + } + } +} + +# Generates Info.plist files for Mac apps and frameworks. +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("ios_info_plist") { + assert(defined(invoker.info_plist) != defined(invoker.info_plist_target), + "Only one of info_plist or info_plist_target may be specified in " + + target_name) + + if (defined(invoker.info_plist)) { + _info_plist = invoker.info_plist + } else { + _info_plist_target_output = get_target_outputs(invoker.info_plist_target) + _info_plist = _info_plist_target_output[0] + } + + info_plist(target_name) { + format = "binary1" + extra_substitutions = [] + if (defined(invoker.extra_substitutions)) { + extra_substitutions = invoker.extra_substitutions + } + extra_substitutions += [ + "IOS_DEPLOYMENT_TARGET=$ios_deployment_target", + "IOS_PLATFORM_BUILD=$ios_platform_build", + "IOS_PLATFORM_NAME=$ios_sdk_name", + "IOS_PLATFORM_VERSION=$ios_sdk_version", + "IOS_SDK_BUILD=$ios_sdk_build", + "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version", + "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform", + ] + plist_templates = [ + "//build/config/ios/BuildInfo.plist", + _info_plist, + ] + if (defined(invoker.info_plist_target)) { + deps = [ + invoker.info_plist_target, + ] + } + forward_variables_from(invoker, + [ + "executable_name", + "output_name", + "visibility", + ]) + } +} + +# Template to build an application bundle for iOS. +# +# This should be used instead of "executable" built-in target type on iOS. +# As the template forward the generation of the application executable to +# an "executable" target, all arguments supported by "executable" targets +# are also supported by this template. +# +# Arguments +# +# output_name: +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# extra_substitutions: +# (optional) list of string in "key=value" format, each value will +# be used as an additional variable substitution rule when generating +# the application Info.plist +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# entitlements_path: +# (optional) path to the template to use to generate the application +# entitlements by performing variable substitutions, defaults to +# //build/config/ios/entitlements.plist. +# +# bundle_extension: +# (optional) bundle extension including the dot, default to ".app". +# +# product_type +# (optional) string, product type for the generated Xcode project, +# default to "com.apple.product-type.application". Should generally +# not be overridden. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +# For more information, see "gn help executable". +template("ios_app_bundle") { + _output_name = target_name + _target_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _arch_executable_source = _target_name + "_arch_executable_sources" + _arch_executable_target = _target_name + "_arch_executable" + _lipo_executable_target = _target_name + "_executable" + + if (_use_intermediate_source_set) { + source_set(_arch_executable_source) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "bundle_extension", + "enable_code_signing", + "entitlements_path", + "extra_substitutions", + "extra_system_frameworks", + "info_plist", + "info_plist_target", + "output_name", + "product_type", + "visibility", + ]) + + visibility = [ ":$_arch_executable_target" ] + } + } else { + assert(_arch_executable_source != "", + "mark _arch_executable_source as used") + } + + if (use_ios_simulator) { + _generate_entitlements_target = _target_name + "_gen_entitlements" + _generate_entitlements_output = + get_label_info(":$_generate_entitlements_target($default_toolchain)", + "target_out_dir") + "/$_output_name.xcent" + } + + executable(_arch_executable_target) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "bundle_extension", + "enable_code_signing", + "entitlements_path", + "extra_substitutions", + "extra_system_frameworks", + "info_plist", + "info_plist_target", + "output_name", + "product_type", + "sources", + "visibility", + ]) + if (!_use_intermediate_source_set) { + forward_variables_from(invoker, [ "sources" ]) + } + + visibility = [ ":$_lipo_executable_target($default_toolchain)" ] + if (current_toolchain != default_toolchain) { + visibility += [ ":$_target_name" ] + } + + if (!defined(deps)) { + deps = [] + } + if (_use_intermediate_source_set) { + deps += [ ":$_arch_executable_source" ] + } + + if (!defined(libs)) { + libs = [] + } + libs += [ "UIKit.framework" ] + + if (use_ios_simulator) { + deps += [ ":$_generate_entitlements_target($default_toolchain)" ] + + if (!defined(inputs)) { + inputs = [] + } + inputs += [ _generate_entitlements_output ] + + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ + "-Xlinker", + "-sectcreate", + "-Xlinker", + "__TEXT", + "-Xlinker", + "__entitlements", + "-Xlinker", + rebase_path(_generate_entitlements_output, root_build_dir), + ] + } + + output_name = _output_name + output_prefix_override = true + output_dir = "$target_out_dir/$current_cpu" + } + + if (current_toolchain != default_toolchain) { + # For fat builds, only the default toolchain will generate an application + # bundle. For the other toolchains, the template is only used for building + # the arch-specific binary, thus the default target is just a group(). + + group(_target_name) { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + ]) + public_deps = [ + ":$_arch_executable_target", + ] + } + } else { + lipo_binary(_lipo_executable_target) { + forward_variables_from(invoker, + [ + "configs", + "testonly", + ]) + + visibility = [ ":$_target_name" ] + output_name = _output_name + arch_binary_target = ":$_arch_executable_target" + arch_binary_output = _output_name + } + + _generate_info_plist = target_name + "_generate_info_plist" + ios_info_plist(_generate_info_plist) { + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + ]) + + executable_name = _output_name + } + + if (use_ios_simulator) { + _entitlements_path = "//build/config/ios/entitlements.plist" + if (defined(invoker.entitlements_path)) { + _entitlements_path = invoker.entitlements_path + } + + action(_generate_entitlements_target) { + _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist") + _info_plist_path = _gen_info_plist_outputs[0] + + script = "//build/config/ios/codesign.py" + deps = [ + ":$_generate_info_plist", + ] + sources = [ + _entitlements_path, + _info_plist_path, + ] + outputs = [ + _generate_entitlements_output, + ] + args = [ + "generate-entitlements", + "-e=" + rebase_path(_entitlements_path, root_build_dir), + "-p=" + rebase_path(_info_plist_path, root_build_dir), + ] + rebase_path(outputs, root_build_dir) + } + } + + _bundle_data_info_plist = target_name + "_bundle_data_info_plist" + bundle_data(_bundle_data_info_plist) { + forward_variables_from(invoker, [ "testonly" ]) + + sources = get_target_outputs(":$_generate_info_plist") + outputs = [ + "{{bundle_root_dir}}/Info.plist", + ] + public_deps = [ + ":$_generate_info_plist", + ] + } + + create_signed_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "bundle_extension", + "data_deps", + "deps", + "enable_code_signing", + "entitlements_path", + "extra_system_frameworks", + "product_type", + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + + output_name = _output_name + bundle_binary_target = ":$_lipo_executable_target" + bundle_binary_output = _output_name + + if (!defined(bundle_deps)) { + bundle_deps = [] + } + bundle_deps += [ ":$_bundle_data_info_plist" ] + + if (use_ios_simulator) { + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ "//testing/iossim" ] + } + + if (!defined(product_type)) { + product_type = "com.apple.product-type.application" + } + + if (!defined(bundle_extension)) { + bundle_extension = ".app" + } + } + } +} + +set_defaults("ios_app_bundle") { + configs = default_executable_configs +} + +# Template to build an application extension bundle for iOS. +# +# This should be used instead of "executable" built-in target type on iOS. +# As the template forward the generation of the application executable to +# an "executable" target, all arguments supported by "executable" targets +# are also supported by this template. +# +# Arguments +# +# output_name: +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# extra_substitutions: +# (optional) list of string in "key=value" format, each value will +# be used as an additional variable substitution rule when generating +# the application Info.plist +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# For more information, see "gn help executable". +template("ios_appex_bundle") { + ios_app_bundle(target_name) { + forward_variables_from(invoker, + "*", + [ + "bundle_extension", + "product_type", + ]) + bundle_extension = ".appex" + product_type = "com.apple.product-type.app-extension" + + # Add linker flags required for an application extension (determined by + # inspecting the link command-line when using Xcode 9.0+). + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ + "-e", + "_NSExtensionMain", + "-fapplication-extension", + ] + } +} + +set_defaults("ios_appex_bundle") { + configs = default_executable_configs +} + +# Compile a xib or storyboard file and add it to a bundle_data so that it is +# available at runtime in the bundle. +# +# Arguments +# +# source: +# string, path of the xib or storyboard to compile. +# +# Forwards all variables to the bundle_data target. +template("bundle_data_xib") { + assert(defined(invoker.source), "source needs to be defined for $target_name") + + _source_extension = get_path_info(invoker.source, "extension") + assert(_source_extension == "xib" || _source_extension == "storyboard", + "source must be a .xib or .storyboard for $target_name") + + _target_name = target_name + _compile_xib = target_name + "_compile_xib" + + compile_xibs(_compile_xib) { + sources = [ + invoker.source, + ] + visibility = [ ":$_target_name" ] + ibtool_flags = [ + "--minimum-deployment-target", + ios_deployment_target, + "--auto-activate-custom-fonts", + "--target-device", + "iphone", + "--target-device", + "ipad", + ] + } + + bundle_data(_target_name) { + forward_variables_from(invoker, "*", [ "source" ]) + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_compile_xib" ] + + sources = get_target_outputs(":$_compile_xib") + + outputs = [ + "{{bundle_resources_dir}}/{{source_file_part}}", + ] + } +} + +# Compile a strings file and add it to a bundle_data so that it is available +# at runtime in the bundle. +# +# Arguments +# +# source: +# string, path of the strings file to compile. +# +# output: +# string, path of the compiled file in the final bundle. +# +# Forwards all variables to the bundle_data target. +template("bundle_data_strings") { + assert(defined(invoker.source), "source needs to be defined for $target_name") + assert(defined(invoker.output), "output needs to be defined for $target_name") + + _source_extension = get_path_info(invoker.source, "extension") + assert(_source_extension == "strings", + "source must be a .strings for $target_name") + + _target_name = target_name + _convert_target = target_name + "_compile_strings" + + convert_plist(_convert_target) { + visibility = [ ":$_target_name" ] + source = invoker.source + output = + "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file") + format = "binary1" + } + + bundle_data(_target_name) { + forward_variables_from(invoker, + "*", + [ + "source", + "output", + ]) + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_convert_target" ] + + sources = get_target_outputs(":$_convert_target") + + outputs = [ + invoker.output, + ] + } +} + +# Template to package a shared library into an iOS framework bundle. +# +# By default, the bundle target this template generates does not link the +# resulting framework into anything that depends on it. If a dependency wants +# a link-time (as well as build-time) dependency on the framework bundle, +# depend against "$target_name+link". If only the build-time dependency is +# required (e.g., for copying into another bundle), then use "$target_name". +# +# Arguments +# +# output_name: +# (optional) string, name of the generated framework without the +# .framework suffix. If omitted, defaults to target_name. +# +# public_headers: +# (optional) list of paths to header file that needs to be copied +# into the framework bundle Headers subdirectory. If omitted or +# empty then the Headers subdirectory is not created. +# +# sources +# (optional) list of files. Needs to be defined and non-empty if +# public_headers is defined and non-empty. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +# This template provides two targets for the resulting framework bundle. The +# link-time behavior varies depending on which of the two targets below is +# added as a dependency: +# - $target_name only adds a build-time dependency. Targets that depend on +# it will not link against the framework. +# - $target_name+link adds a build-time and link-time dependency. Targets +# that depend on it will link against the framework. +# +# The build-time-only dependency is used for when a target needs to use the +# framework either only for resources, or because the target loads it at run- +# time, via dlopen() or NSBundle. The link-time dependency will cause the +# dependee to have the framework loaded by dyld at launch. +# +# Example of build-time only dependency: +# +# framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# The GoatTeleporter.app will not directly link against +# CoreTeleportation.framework, but it will be included in the bundle's +# Frameworks directory. +# +# Example of link-time dependency: +# +# framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# ldflags = [ +# "-install_name", +# "@executable_path/../Frameworks/$target_name.framework" +# ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation+link" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# Note that the framework is still copied to the app's bundle, but dyld will +# load this library when the app is launched because it uses the "+link" +# target as a dependency. This also requires that the framework set its +# install_name so that dyld can locate it. +# +# See "gn help shared_library" for more information on arguments supported +# by shared library target. +template("ios_framework_bundle") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _has_public_headers = + defined(invoker.public_headers) && invoker.public_headers != [] + + if (_has_public_headers) { + _framework_headers_target = _target_name + "_framework_headers" + _framework_headers_config = _target_name + "_framework_headers_config" + _headers_map_config = _target_name + "_headers_map" + } + + _arch_shared_library_source = _target_name + "_arch_shared_library_sources" + _arch_shared_library_target = _target_name + "_arch_shared_library" + _lipo_shared_library_target = _target_name + "_shared_library" + + if (_use_intermediate_source_set) { + source_set(_arch_shared_library_source) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "enable_code_signing", + "info_plist", + "info_plist_target", + "output_name", + "visibility", + ]) + + visibility = [ ":$_arch_shared_library_target" ] + + if (_has_public_headers) { + configs += [ + ":$_framework_headers_config($default_toolchain)", + ":$_headers_map_config($default_toolchain)", + ] + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_framework_headers_target($default_toolchain)" ] + } + } + } else { + assert(_arch_shared_library_source != "", + "mark _arch_shared_library_source as used") + } + + shared_library(_arch_shared_library_target) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "enable_code_signing", + "info_plist", + "info_plist_target", + "output_name", + "sources", + "visibility", + ]) + if (!_use_intermediate_source_set) { + forward_variables_from(invoker, [ "sources" ]) + } + + visibility = [ ":$_lipo_shared_library_target($default_toolchain)" ] + if (current_toolchain != default_toolchain) { + visibility += [ ":$_target_name" ] + } + + if (!defined(deps)) { + deps = [] + } + if (_use_intermediate_source_set) { + deps += [ ":$_arch_shared_library_source" ] + } else { + if (_has_public_headers) { + configs += [ + ":$_framework_headers_config($default_toolchain)", + ":$_headers_map_config($default_toolchain)", + ] + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_framework_headers_target($default_toolchain)" ] + } + } + + output_extension = "" + output_name = _output_name + output_prefix_override = true + output_dir = "$target_out_dir/$current_cpu" + } + + if (current_toolchain != default_toolchain) { + # For fat builds, only the default toolchain will generate a framework + # bundle. For the other toolchains, the template is only used for building + # the arch-specific binary, thus the default target is just a group(). + + group(_target_name) { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + ]) + public_deps = [ + ":$_arch_shared_library_target", + ] + } + + group(_target_name + "+link") { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + ]) + public_deps = [ + ":$_target_name+link($default_toolchain)", + ] + } + + if (defined(invoker.bundle_deps)) { + assert(invoker.bundle_deps != [], "mark bundle_deps as used") + } + } else { + if (_has_public_headers) { + _public_headers = invoker.public_headers + _framework_root = "$root_out_dir/$_output_name.framework" + + _header_map_filename = "$target_gen_dir/$_output_name.headers.hmap" + + _compile_headers_map_target = _target_name + "_compile_headers_map" + action(_compile_headers_map_target) { + visibility = [ ":$_framework_headers_target" ] + script = "//build/config/ios/write_framework_hmap.py" + outputs = [ + _header_map_filename, + ] + + # The header map generation only wants the list of headers, not all of + # sources, so filter any non-header source files from "sources". It is + # less error prone that having the developer duplicate the list of all + # headers in addition to "sources". + set_sources_assignment_filter([ + "*.c", + "*.cc", + "*.cpp", + "*.m", + "*.mm", + ]) + sources = invoker.sources + set_sources_assignment_filter([]) + + args = [ + rebase_path(_header_map_filename), + rebase_path(_framework_root, root_build_dir), + ] + rebase_path(sources, root_build_dir) + } + + _create_module_map_target = _target_name + "_module_map" + action(_create_module_map_target) { + visibility = [ ":$_framework_headers_target" ] + script = "//build/config/ios/write_framework_modulemap.py" + outputs = [ + "$_framework_root/Modules/module.modulemap", + ] + args = [ rebase_path("$_framework_root", root_build_dir) ] + } + + _copy_public_headers_target = _target_name + "_copy_public_headers" + copy(_copy_public_headers_target) { + visibility = [ ":$_framework_headers_target" ] + sources = _public_headers + outputs = [ + "$_framework_root/Headers/{{source_file_part}}", + ] + } + + config(_headers_map_config) { + visibility = [ ":$_target_name" ] + include_dirs = [ _header_map_filename ] + ldflags = [ + "-install_name", + "@rpath/$_output_name.framework/$_output_name", + ] + } + + group(_framework_headers_target) { + deps = [ + ":$_compile_headers_map_target", + ":$_copy_public_headers_target", + ":$_create_module_map_target", + ] + } + + config(_framework_headers_config) { + # The link settings are inherited from the framework_bundle config. + cflags = [ + "-F", + rebase_path("$root_out_dir/.", root_build_dir), + ] + } + } + + lipo_binary(_lipo_shared_library_target) { + forward_variables_from(invoker, + [ + "configs", + "testonly", + ]) + + visibility = [ ":$_target_name" ] + output_name = _output_name + arch_binary_target = ":$_arch_shared_library_target" + arch_binary_output = _output_name + } + + _framework_public_config = _target_name + "_public_config" + config(_framework_public_config) { + # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs + # and include_dirs to avoid duplicate values on the command-line. + visibility = [ ":$_target_name" ] + ldflags = [ + "-F", + rebase_path("$root_out_dir/.", root_build_dir), + ] + lib_dirs = [ root_out_dir ] + libs = [ "$_output_name.framework" ] + } + + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" + ios_info_plist(_info_plist_target) { + visibility = [ ":$_info_plist_bundle" ] + executable_name = _output_name + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + ]) + } + + bundle_data(_info_plist_bundle) { + visibility = [ ":$_target_name" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_info_plist_target") + outputs = [ + "{{bundle_root_dir}}/Info.plist", + ] + public_deps = [ + ":$_info_plist_target", + ] + } + + create_signed_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "deps", + "enable_code_signing", + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + + product_type = "com.apple.product-type.framework" + bundle_extension = ".framework" + + output_name = _output_name + bundle_binary_target = ":$_lipo_shared_library_target" + bundle_binary_output = _output_name + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_info_plist_bundle" ] + } + + group(_target_name + "+link") { + forward_variables_from(invoker, + [ + "public_deps", + "public_configs", + "testonly", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_target_name" ] + if (!defined(public_configs)) { + public_configs = [] + } + public_configs += [ ":$_framework_public_config" ] + + if (_has_public_headers) { + public_configs += [ ":$_framework_headers_config" ] + } + } + + bundle_data(_target_name + "+bundle") { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + public_deps = [ + ":$_target_name", + ] + sources = [ + "$root_out_dir/$_output_name.framework", + ] + outputs = [ + "{{bundle_resources_dir}}/Frameworks/$_output_name.framework", + ] + } + } +} + +set_defaults("ios_framework_bundle") { + configs = default_shared_library_configs +} + +# For Chrome on iOS we want to run XCTests for all our build configurations +# (Debug, Release, ...). In addition, the symbols visibility is configured to +# private by default. To simplify testing with those constraints, our tests are +# compiled in the TEST_HOST target instead of the .xctest bundle. +template("ios_xctest_test") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _xctest_target = _target_name + _xctest_output = _output_name + + _host_target = _target_name + "_host" + _host_output = _output_name + "_host" + + _xctest_arch_loadable_module_target = _xctest_target + "_arch_loadable_module" + _xctest_lipo_loadable_module_target = _xctest_target + "_loadable_module" + + loadable_module(_xctest_arch_loadable_module_target) { + visibility = [ ":$_xctest_lipo_loadable_module_target($default_toolchain)" ] + if (current_toolchain != default_toolchain) { + visibility += [ ":$_target_name" ] + } + + sources = [ + "//build/config/ios/xctest_shell.mm", + ] + configs += [ "//build/config/ios:xctest_config" ] + + output_dir = "$target_out_dir/$current_cpu" + output_name = _xctest_output + output_prefix_override = true + output_extension = "" + } + + if (current_toolchain != default_toolchain) { + # For fat builds, only the default toolchain will generate a test bundle. + # For the other toolchains, the template is only used for building the + # arch-specific binary, thus the default target is just a group(). + group(_target_name) { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + ]) + public_deps = [ + ":$_xctest_arch_loadable_module_target", + ] + } + } else { + _xctest_info_plist_target = _xctest_target + "_info_plist" + _xctest_info_plist_bundle = _xctest_target + "_info_plist_bundle" + ios_info_plist(_xctest_info_plist_target) { + visibility = [ ":$_xctest_info_plist_bundle" ] + info_plist = "//build/config/ios/Module-Info.plist" + executable_name = _output_name + } + + bundle_data(_xctest_info_plist_bundle) { + visibility = [ ":$_xctest_target" ] + public_deps = [ + ":$_xctest_info_plist_target", + ] + sources = get_target_outputs(":$_xctest_info_plist_target") + outputs = [ + "{{bundle_root_dir}}/Info.plist", + ] + } + + lipo_binary(_xctest_lipo_loadable_module_target) { + forward_variables_from(invoker, + [ + "configs", + "testonly", + ]) + + visibility = [ ":$_xctest_target" ] + output_name = _xctest_output + arch_binary_target = ":$_xctest_arch_loadable_module_target" + arch_binary_output = _xctest_output + } + + _xctest_bundle = _xctest_target + "_bundle" + create_signed_bundle(_xctest_target) { + forward_variables_from(invoker, [ "enable_code_signing" ]) + visibility = [ ":$_xctest_bundle" ] + + product_type = "com.apple.product-type.bundle.unit-test" + bundle_extension = ".xctest" + + output_name = _xctest_output + bundle_binary_target = ":$_xctest_lipo_loadable_module_target" + bundle_binary_output = _xctest_output + + deps = [ + ":$_xctest_info_plist_bundle", + ] + } + + bundle_data(_xctest_bundle) { + visibility = [ ":$_host_target" ] + public_deps = [ + ":$_xctest_target", + ] + sources = [ + "$root_out_dir/$_xctest_output.xctest", + ] + outputs = [ + "{{bundle_plugins_dir}}/$_xctest_output.xctest", + ] + } + } + + ios_app_bundle(_host_target) { + forward_variables_from(invoker, "*", [ "testonly" ]) + + testonly = true + output_name = _host_output + configs += [ "//build/config/ios:xctest_config" ] + + if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) { + info_plist = "//build/config/ios/Host-Info.plist" + } + + # Xcode needs those two framework installed in the application (and signed) + # for the XCTest to run, so install them using extra_system_frameworks. + _ios_platform_library = "$ios_sdk_platform_path/Developer/Library" + extra_system_frameworks = [ + "$_ios_platform_library/Frameworks/XCTest.framework", + "$_ios_platform_library/PrivateFrameworks/IDEBundleInjection.framework", + ] + + if (current_toolchain == default_toolchain) { + if (!defined(bundle_deps)) { + bundle_deps = [] + } + bundle_deps += [ ":$_xctest_bundle" ] + } + + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ + "-Xlinker", + "-rpath", + "-Xlinker", + "@executable_path/Frameworks", + "-Xlinker", + "-rpath", + "-Xlinker", + "@loader_path/Frameworks", + ] + } +} + +set_defaults("ios_xctest_test") { + configs = default_executable_configs +} diff --git a/build/config/ios/write_framework_hmap.py b/build/config/ios/write_framework_hmap.py new file mode 100644 index 00000000000..8f6b1439d0d --- /dev/null +++ b/build/config/ios/write_framework_hmap.py @@ -0,0 +1,97 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import struct +import sys + +def Main(args): + if len(args) < 4: + print >> sys.stderr, "Usage: %s output.hmap Foo.framework header1.h..." %\ + (args[0]) + return 1 + + (out, framework, all_headers) = args[1], args[2], args[3:] + + framework_name = os.path.basename(framework).split('.')[0] + all_headers = map(os.path.abspath, all_headers) + filelist = {} + for header in all_headers: + filename = os.path.basename(header) + filelist[filename] = header + filelist[os.path.join(framework_name, filename)] = header + WriteHmap(out, filelist) + return 0 + + +def NextGreaterPowerOf2(x): + return 2**(x).bit_length() + + +def WriteHmap(output_name, filelist): + """Generates a header map based on |filelist|. + + Per Mark Mentovai: + A header map is structured essentially as a hash table, keyed by names used + in #includes, and providing pathnames to the actual files. + + The implementation below and the comment above comes from inspecting: + http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt + while also looking at the implementation in clang in: + https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp + """ + magic = 1751998832 + version = 1 + _reserved = 0 + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) + max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1]) + + out = open(output_name, 'wb') + out.write(struct.pack(' +#import + +// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all +// our build configurations (Debug, Release, ...). In addition, the symbols +// visibility is configured to private by default. To simplify testing with +// those constraints, our tests are compiled in the TEST_HOST target instead +// of the .xctest bundle that all link against this single test (just there to +// ensure that the bundle is not empty). + +@interface XCTestShellEmptyClass : NSObject +@end + +@implementation XCTestShellEmptyClass +@end diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn new file mode 100644 index 00000000000..c6867670026 --- /dev/null +++ b/build/config/linux/BUILD.gn @@ -0,0 +1,96 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") + +group("linux") { + visibility = [ "//:optimize_gn_gen" ] +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Linux-only. This is not applied to Android, but is applied to ChromeOS. +config("compiler") { + cflags = [ "-pthread" ] + ldflags = [ "-pthread" ] +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Linux-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like + # OS_LINUX and the like. + if (is_chromeos) { + defines = [ "OS_CHROMEOS" ] + } +} + +config("fontconfig") { + visibility = [ "//build/linux:fontconfig" ] + libs = [ "fontconfig" ] +} + +config("x11") { + libs = [ + "X11", + "Xcomposite", + "Xcursor", + "Xdamage", + "Xext", + "Xfixes", + "Xi", + "Xrender", + "Xtst", + ] +} + +config("xcomposite") { + libs = [ "Xcomposite" ] +} + +config("xext") { + libs = [ "Xext" ] +} + +config("xrandr") { + libs = [ "Xrandr" ] +} + +config("xscrnsaver") { + libs = [ "Xss" ] +} + +config("xfixes") { + libs = [ "Xfixes" ] +} + +config("libcap") { + libs = [ "cap" ] +} + +config("xi") { + libs = [ "Xi" ] +} + +config("xtst") { + libs = [ "Xtst" ] +} + +config("libresolv") { + libs = [ "resolv" ] +} + +if (use_glib) { + pkg_config("glib") { + packages = [ + "glib-2.0", + "gmodule-2.0", + "gobject-2.0", + "gthread-2.0", + ] + } +} diff --git a/build/config/linux/atk/BUILD.gn b/build/config/linux/atk/BUILD.gn new file mode 100644 index 00000000000..27e8f41a524 --- /dev/null +++ b/build/config/linux/atk/BUILD.gn @@ -0,0 +1,47 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") + +# CrOS doesn't install GTK, gconf or any gnome packages. +assert(!is_chromeos) + +# These packages should _only_ be expected when building for a target. +# If these extra checks are not run, gconf is required when building host +# tools for a CrOS build. +assert(current_toolchain == default_toolchain) + +if (use_atk) { + assert(use_glib, "use_atk=true requires that use_glib=true") +} + +pkg_config("atk_base") { + packages = [ "atk" ] + atk_lib_dir = exec_script(pkg_config_script, + pkg_config_args + [ + "--libdir", + "atk", + ], + "string") + defines = [ "ATK_LIB_DIR=\"$atk_lib_dir\"" ] +} + +# gn orders flags on a target before flags from configs. The default config +# adds -Wall, and these flags have to be after -Wall -- so they need to +# come from a config and can't be on the target directly. +config("atk") { + configs = [ ":atk_base" ] + + cflags = [ + # glib uses the pre-c++11 typedef-as-static_assert hack. + "-Wno-unused-local-typedef", + + # G_DEFINE_TYPE automatically generates a *get_instance_private + # inline function after glib 2.37. That's unused. Prevent to + # complain about it. + "-Wno-unused-function", + ] +} diff --git a/build/config/linux/dbus/BUILD.gn b/build/config/linux/dbus/BUILD.gn new file mode 100644 index 00000000000..f11cf7101cb --- /dev/null +++ b/build/config/linux/dbus/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") + +assert(use_dbus) + +# Note: if your target also depends on //dbus, you don't need to add this +# config (it will get added automatically if you depend on //dbus). +pkg_config("dbus") { + packages = [ "dbus-1" ] +} diff --git a/build/config/linux/gconf/BUILD.gn b/build/config/linux/gconf/BUILD.gn new file mode 100644 index 00000000000..262e96aa8e4 --- /dev/null +++ b/build/config/linux/gconf/BUILD.gn @@ -0,0 +1,19 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") + +# CrOS doesn't install GTK, gconf or any gnome packages. +assert(!is_chromeos && use_gconf) + +# These packages should _only_ be expected when building for a target. +# If these extra checks are not run, gconf is required when building host +# tools for a CrOS build. +assert(current_toolchain == default_toolchain) + +pkg_config("gconf") { + packages = [ "gconf-2.0" ] + defines = [ "USE_GCONF" ] +} diff --git a/build/config/linux/gtk2/BUILD.gn b/build/config/linux/gtk2/BUILD.gn new file mode 100644 index 00000000000..513588bff19 --- /dev/null +++ b/build/config/linux/gtk2/BUILD.gn @@ -0,0 +1,46 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +assert(is_linux, "This file should only be referenced on Linux") + +# Depend on //build/config/linux/gtk2 to use GTKv2. +# +# GN doesn't check visibility for configs so we give this an obviously internal +# name to discourage random targets from accidentally depending on this and +# bypassing the GTK target's visibility. +pkg_config("gtk2_internal_config") { + # Gtk requires gmodule, but it does not list it as a dependency in some + # misconfigured systems. + packages = [ + "gmodule-2.0", + "gtk+-2.0", + "gthread-2.0", + ] +} + +# Basically no parts of Chrome should depend on GTK. To prevent accidents, the +# parts that explicitly need GTK are whitelisted on this target. +group("gtk2") { + visibility = [ + "//chrome/browser/ui/libgtk2ui", + "//gpu/gles2_conform_support:gles2_conform_test_windowless", + "//remoting/host", + "//remoting/host/it2me:remote_assistance_host", + "//remoting/host:remoting_me2me_host_static", + "//remoting/test:it2me_standalone_host_main", + ] + public_configs = [ ":gtk2_internal_config" ] +} + +# Depend on "gtkprint" to get this. +pkg_config("gtkprint2_internal_config") { + packages = [ "gtk+-unix-print-2.0" ] +} + +group("gtkprint2") { + visibility = [ "//chrome/browser/ui/libgtk2ui" ] + public_configs = [ ":gtkprint2_internal_config" ] +} diff --git a/build/config/linux/gtk3/BUILD.gn b/build/config/linux/gtk3/BUILD.gn new file mode 100644 index 00000000000..b61ef94b1be --- /dev/null +++ b/build/config/linux/gtk3/BUILD.gn @@ -0,0 +1,45 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +assert(is_linux, "This file should only be referenced on Linux") + +# Depend on //build/config/linux/gtk3 to use GTKv3. +# +# GN doesn't check visibility for configs so we give this an obviously internal +# name to discourage random targets from accidentally depending on this and +# bypassing the GTK target's visibility. +pkg_config("gtk3_internal_config") { + # Gtk requires gmodule, but it does not list it as a dependency in some + # misconfigured systems. + packages = [ + "gmodule-2.0", + "gtk+-3.0", + "gthread-2.0", + ] +} + +# Basically no parts of Chrome should depend on GTK. To prevent accidents, the +# parts that explicitly need GTK are whitelisted on this target. +group("gtk3") { + visibility = [ + "//chrome/browser/ui/libgtk2ui", + "//gpu/gles2_conform_support:gles2_conform_test_windowless", + "//remoting/host", + "//remoting/host/it2me:remote_assistance_host", + "//remoting/host:remoting_me2me_host_static", + ] + public_configs = [ ":gtk3_internal_config" ] +} + +# Depend on "gtkprint" to get this. +pkg_config("gtkprint3_internal_config") { + packages = [ "gtk+-unix-print-3.0" ] +} + +group("gtkprint3") { + visibility = [ "//chrome/browser/ui/libgtk2ui" ] + public_configs = [ ":gtkprint3_internal_config" ] +} diff --git a/build/config/linux/libffi/BUILD.gn b/build/config/linux/libffi/BUILD.gn new file mode 100644 index 00000000000..a4041727b1c --- /dev/null +++ b/build/config/linux/libffi/BUILD.gn @@ -0,0 +1,9 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +pkg_config("libffi") { + packages = [ "libffi" ] +} diff --git a/build/config/linux/pangocairo/BUILD.gn b/build/config/linux/pangocairo/BUILD.gn new file mode 100644 index 00000000000..727b52d5333 --- /dev/null +++ b/build/config/linux/pangocairo/BUILD.gn @@ -0,0 +1,9 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +pkg_config("pangocairo") { + packages = [ "pangocairo" ] +} diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py new file mode 100644 index 00000000000..d63b2d65b91 --- /dev/null +++ b/build/config/linux/pkg-config.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import subprocess +import sys +import re +from optparse import OptionParser + +# This script runs pkg-config, optionally filtering out some results, and +# returns the result. +# +# The result will be [ , , , , ] +# where each member is itself a list of strings. +# +# You can filter out matches using "-v " where all results from +# pkgconfig matching the given regular expression will be ignored. You can +# specify more than one regular expression my specifying "-v" more than once. +# +# You can specify a sysroot using "-s " where sysroot is the absolute +# system path to the sysroot used for compiling. This script will attempt to +# generate correct paths for the sysroot. +# +# When using a sysroot, you must also specify the architecture via +# "-a " where arch is either "x86" or "x64". +# +# CrOS systemroots place pkgconfig files at /usr/share/pkgconfig +# and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig +# depending on whether the systemroot is for a 32 or 64 bit architecture. They +# specify the 'lib' or 'lib64' of the pkgconfig path by defining the +# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this +# variable to this script with the "--system_libdir " flag. If no +# flag is provided, then pkgconfig files are assumed to come from +# /usr/lib/pkgconfig. +# +# Additionally, you can specify the option --atleast-version. This will skip +# the normal outputting of a dictionary and instead print true or false, +# depending on the return value of pkg-config for the given package. + + +def SetConfigPath(options): + """Set the PKG_CONFIG_LIBDIR environment variable. + + This takes into account any sysroot and architecture specification from the + options on the given command line. + """ + + sysroot = options.sysroot + assert sysroot + + # Compute the library path name based on the architecture. + arch = options.arch + if sysroot and not arch: + print "You must specify an architecture via -a if using a sysroot." + sys.exit(1) + + libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig' + libdir += ':' + sysroot + '/usr/share/pkgconfig' + os.environ['PKG_CONFIG_LIBDIR'] = libdir + return libdir + + +def GetPkgConfigPrefixToStrip(args): + """Returns the prefix from pkg-config where packages are installed. + + This returned prefix is the one that should be stripped from the beginning of + directory names to take into account sysroots. + """ + # Some sysroots, like the Chromium OS ones, may generate paths that are not + # relative to the sysroot. For example, + # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all + # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) + # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). + # To support this correctly, it's necessary to extract the prefix to strip + # from pkg-config's |prefix| variable. + prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args, + env=os.environ) + if prefix[-4] == '/usr': + return prefix[4:] + return prefix + + +def MatchesAnyRegexp(flag, list_of_regexps): + """Returns true if the first argument matches any regular expression in the + given list.""" + for regexp in list_of_regexps: + if regexp.search(flag) != None: + return True + return False + + +def RewritePath(path, strip_prefix, sysroot): + """Rewrites a path by stripping the prefix and prepending the sysroot.""" + if os.path.isabs(path) and not path.startswith(sysroot): + if path.startswith(strip_prefix): + path = path[len(strip_prefix):] + path = path.lstrip('/') + return os.path.join(sysroot, path) + else: + return path + + +def main(): + # If this is run on non-Linux platforms, just return nothing and indicate + # success. This allows us to "kind of emulate" a Linux build from other + # platforms. + if "linux" not in sys.platform: + print "[[],[],[],[],[]]" + return 0 + + parser = OptionParser() + parser.add_option('-d', '--debug', action='store_true') + parser.add_option('-p', action='store', dest='pkg_config', type='string', + default='pkg-config') + parser.add_option('-v', action='append', dest='strip_out', type='string') + parser.add_option('-s', action='store', dest='sysroot', type='string') + parser.add_option('-a', action='store', dest='arch', type='string') + parser.add_option('--system_libdir', action='store', dest='system_libdir', + type='string', default='lib') + parser.add_option('--atleast-version', action='store', + dest='atleast_version', type='string') + parser.add_option('--libdir', action='store_true', dest='libdir') + (options, args) = parser.parse_args() + + # Make a list of regular expressions to strip out. + strip_out = [] + if options.strip_out != None: + for regexp in options.strip_out: + strip_out.append(re.compile(regexp)) + + if options.sysroot: + libdir = SetConfigPath(options) + if options.debug: + sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir) + prefix = GetPkgConfigPrefixToStrip(args) + else: + prefix = '' + + if options.atleast_version: + # When asking for the return value, just run pkg-config and print the return + # value, no need to do other work. + if not subprocess.call([options.pkg_config, + "--atleast-version=" + options.atleast_version] + + args): + print "true" + else: + print "false" + return 0 + + if options.libdir: + cmd = [options.pkg_config, "--variable=libdir"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % cmd) + try: + libdir = subprocess.check_output(cmd) + except: + print "Error from pkg-config." + return 1 + sys.stdout.write(libdir.strip()) + return 0 + + cmd = [options.pkg_config, "--cflags", "--libs"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % ' '.join(cmd)) + + try: + flag_string = subprocess.check_output(cmd) + except: + sys.stderr.write('Could not run pkg-config.\n') + return 1 + + # For now just split on spaces to get the args out. This will break if + # pkgconfig returns quoted things with spaces in them, but that doesn't seem + # to happen in practice. + all_flags = flag_string.strip().split(' ') + + + sysroot = options.sysroot + if not sysroot: + sysroot = '' + + includes = [] + cflags = [] + libs = [] + lib_dirs = [] + ldflags = [] + + for flag in all_flags[:]: + if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out): + continue; + + if flag[:2] == '-l': + libs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-L': + lib_dirs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-I': + includes.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:3] == '-Wl': + ldflags.append(flag) + elif flag == '-pthread': + # Many libs specify "-pthread" which we don't need since we always include + # this anyway. Removing it here prevents a bunch of duplicate inclusions + # on the command line. + pass + else: + cflags.append(flag) + + # Output a GN array, the first one is the cflags, the second are the libs. The + # JSON formatter prints GN compatible lists when everything is a list of + # strings. + print json.dumps([includes, cflags, libs, lib_dirs, ldflags]) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni new file mode 100644 index 00000000000..004405854b4 --- /dev/null +++ b/build/config/linux/pkg_config.gni @@ -0,0 +1,101 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sysroot.gni") + +# Defines a config specifying the result of running pkg-config for the given +# packages. Put the package names you want to query in the "packages" variable +# inside the template invocation. +# +# You can also add defines via the "defines" variable. This can be useful to +# add this to the config to pass defines that the library expects to get by +# users of its headers. +# +# Example: +# pkg_config("mything") { +# packages = [ "mything1", "mything2" ] +# defines = [ "ENABLE_AWESOME" ] +# } +# +# You can also use "extra args" to filter out results (see pkg-config.py): +# extra_args = [ "-v, "foo" ] +# To ignore libs and ldflags (only cflags/defines will be set, which is useful +# when doing manual dynamic linking), set: +# ignore_libs = true + +declare_args() { + # A pkg-config wrapper to call instead of trying to find and call the right + # pkg-config directly. Wrappers like this are common in cross-compilation + # environments. + # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on + # the sysroot mechanism to find the right .pc files. + pkg_config = "" + + # CrOS systemroots place pkgconfig files at /usr/share/pkgconfig + # and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig + # depending on whether the systemroot is for a 32 or 64 bit architecture. + # + # When build under GYP, CrOS board builds specify the 'system_libdir' variable + # as part of the GYP_DEFINES provided by the CrOS emerge build or simple + # chrome build scheme. This variable permits controlling this for GN builds + # in similar fashion by setting the `system_libdir` variable in the build's + # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture. + system_libdir = "lib" +} + +pkg_config_script = "//build/config/linux/pkg-config.py" + +# Define the args we pass to the pkg-config script for other build files that +# need to invoke it manually. +if (sysroot != "") { + # Pass the sysroot if we're using one (it requires the CPU arch also). + pkg_config_args = [ + "-s", + rebase_path(sysroot), + "-a", + current_cpu, + ] +} else if (pkg_config != "") { + pkg_config_args = [ + "-p", + pkg_config, + ] +} else { + pkg_config_args = [] +} + +# Only use the custom libdir when building with the target sysroot. +if (target_sysroot != "" && sysroot == target_sysroot) { + pkg_config_args += [ + "--system_libdir", + system_libdir, + ] +} + +template("pkg_config") { + assert(defined(invoker.packages), + "Variable |packages| must be defined to be a list in pkg_config.") + config(target_name) { + args = pkg_config_args + invoker.packages + if (defined(invoker.extra_args)) { + args += invoker.extra_args + } + + pkgresult = exec_script(pkg_config_script, args, "value") + include_dirs = pkgresult[0] + cflags = pkgresult[1] + + if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) { + libs = pkgresult[2] + lib_dirs = pkgresult[3] + ldflags = pkgresult[4] + } + + forward_variables_from(invoker, + [ + "defines", + "visibility", + ]) + } +} diff --git a/build/config/locales.gni b/build/config/locales.gni new file mode 100644 index 00000000000..588613283d8 --- /dev/null +++ b/build/config/locales.gni @@ -0,0 +1,171 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Chrome on iOS only ships with a subset of the locales supported by other +# version of Chrome as the corresponding locales are not supported by the +# operating system (but for simplicity, the corresponding .pak files are +# still generated). +if (is_ios) { + ios_unsupported_locales = [ + "am", + "bn", + "et", + "fil", + "gu", + "kn", + "lv", + "ml", + "mr", + "sl", + "sw", + "ta", + "te", + ] +} + +# Note: keep in sync with below. +locales = [ + "am", + "ar", + "bg", + "bn", + "ca", + "cs", + "da", + "de", + "el", + "en-GB", + "en-US", + "es", + "et", + "fa", + "fi", + "fil", + "fr", + "gu", + "he", + "hi", + "hr", + "hu", + "id", + "it", + "ja", + "kn", + "ko", + "lt", + "lv", + "ml", + "mr", + "ms", + "nb", + "nl", + "pl", + "pt-PT", + "ro", + "ru", + "sk", + "sl", + "sr", + "sv", + "sw", + "ta", + "te", + "th", + "tr", + "uk", + "vi", + "zh-CN", + "zh-TW", +] + +# Chrome on iOS uses different names for "es-419" and "pt-BR" (called +# respectively "es-MX" and "pt" on iOS). +if (!is_ios) { + locales += [ + "es-419", + "pt-BR", + ] +} else { + locales += [ + "es-MX", + "pt", + ] + + ios_packed_locales = locales - ios_unsupported_locales +} + +locales_with_fake_bidi = locales + [ "fake-bidi" ] + +# Same as the locales list but in the format Mac expects for output files: +# it uses underscores instead of hyphens, and "en" instead of "en-US". +locales_as_mac_outputs = [ + "am", + "ar", + "bg", + "bn", + "ca", + "cs", + "da", + "de", + "el", + "en_GB", + "en", + "es", + "et", + "fa", + "fi", + "fil", + "fr", + "gu", + "he", + "hi", + "hr", + "hu", + "id", + "it", + "ja", + "kn", + "ko", + "lt", + "lv", + "ml", + "mr", + "ms", + "nb", + "nl", + "pl", + "pt_PT", + "ro", + "ru", + "sk", + "sl", + "sr", + "sv", + "sw", + "ta", + "te", + "th", + "tr", + "uk", + "vi", + "zh_CN", + "zh_TW", +] + +# Chrome on iOS uses different names for "es-419" and "pt-BR" (called +# respectively "es-MX" and "pt" on iOS). +if (!is_ios) { + locales_as_mac_outputs += [ + "es_419", + "pt_BR", + ] +} else { + locales_as_mac_outputs += [ + "es_MX", + "pt", + ] + + ios_packed_locales_as_mac_outputs = + locales_as_mac_outputs - ios_unsupported_locales +} diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn new file mode 100644 index 00000000000..84180e6a65a --- /dev/null +++ b/build/config/mac/BUILD.gn @@ -0,0 +1,136 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sysroot.gni") +import("//build/config/mac/mac_sdk.gni") +import("//build/config/mac/symbols.gni") + +if (is_ios) { + # This needs to be imported after mac_sdk.gni as it overrides some of the + # variables defined by it. + import("//build/config/ios/ios_sdk.gni") +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. +# +# This is applied to BOTH desktop Mac and iOS targets. +config("compiler") { + # These flags are shared between the C compiler and linker. + common_mac_flags = [] + + # CPU architecture. + if (current_cpu == "x64") { + common_mac_flags += [ + "-arch", + "x86_64", + ] + } else if (current_cpu == "x86") { + common_mac_flags += [ + "-arch", + "i386", + ] + } else if (current_cpu == "armv7" || current_cpu == "arm") { + common_mac_flags += [ + "-arch", + "armv7", + ] + } else if (current_cpu == "arm64") { + common_mac_flags += [ + "-arch", + "arm64", + ] + } + + # This is here so that all files get recompiled after an Xcode update. + # (defines are passed via the command line, and build system rebuild things + # when their commandline changes). Nothing should ever read this define. + defines = [ "CR_XCODE_VERSION=$xcode_version" ] + + asmflags = common_mac_flags + cflags = common_mac_flags + + # Without this, the constructors and destructors of a C++ object inside + # an Objective C struct won't be called, which is very bad. + cflags_objcc = [ "-fobjc-call-cxx-cdtors" ] + + cflags_c = [ "-std=c99" ] + cflags_objc = cflags_c + + ldflags = common_mac_flags + + if (is_ios && additional_toolchains != []) { + # For fat build, the generation of the dSYM happens after the fat binary has + # been created with "lipo" thus the stripping cannot happen at link time but + # after running "lipo" too. + _save_unstripped_output = false + } else { + _save_unstripped_output = save_unstripped_output + } + + if (_save_unstripped_output) { + ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ] + } +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Mac-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + common_flags = [ + "-isysroot", + sysroot, + "-mmacosx-version-min=$mac_deployment_target", + ] + + asmflags = common_flags + cflags = common_flags + ldflags = common_flags + + if (is_mac) { + # Prevent Mac OS X AssertMacros.h from defining macros that collide + # with common names, like 'check', 'require', and 'verify'. + # (Included by system header. Also exists on iOS but not included.) + # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h + defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ] + } +} + +# On Mac, this is used for everything except static libraries. +config("mac_dynamic_flags") { + ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and classes. + + if (is_component_build) { + ldflags += [ + # Path for loading shared libraries for unbundled binaries. + "-Wl,-rpath,@loader_path/.", + + # Path for loading shared libraries for bundled binaries. Get back from + # Binary.app/Contents/MacOS. + "-Wl,-rpath,@loader_path/../../..", + ] + } +} + +# On Mac, this is used only for executables. +config("mac_executable_flags") { + # Remove this when targeting >=10.7 since it is the default in that config. + ldflags = [ "-Wl,-pie" ] # Position independent. +} + +# The ldflags referenced below are handled by +# //build/toolchain/mac/linker_driver.py. +# Remove this config if a target wishes to change the arguments passed to the +# strip command during linking. This config by default strips all symbols +# from a binary, but some targets may wish to specify a saves file to preserve +# specific symbols. +config("strip_all") { + # On iOS, the final applications are assembled using lipo (to support fat + # builds). This configuration is thus always empty and the correct flags + # are passed to the linker_driver.py script directly during the lipo call. + if (enable_stripping && !is_ios) { + ldflags = [ "-Wcrl,strip,-x,-S" ] + } +} diff --git a/build/config/mac/BuildInfo.plist b/build/config/mac/BuildInfo.plist new file mode 100644 index 00000000000..b0856e3591d --- /dev/null +++ b/build/config/mac/BuildInfo.plist @@ -0,0 +1,26 @@ + + + + + BuildMachineOSBuild + ${BUILD_MACHINE_OS_BUILD} + DTCompiler + ${GCC_VERSION} + DTSDKBuild + ${MAC_SDK_BUILD} + DTSDKName + ${MAC_SDK_NAME} + DTXcode + ${XCODE_VERSION} + DTXcodeBuild + ${XCODE_BUILD} + CFBundleShortVersionString + ${VERSION} + CFBundleVersion + ${VERSION_BUILD} + CFBundleIdentifier + org.chromium.${PRODUCT_NAME:rfc1034identifier} + SCM_REVISION + ${COMMIT_HASH} + + diff --git a/build/config/mac/OWNERS b/build/config/mac/OWNERS new file mode 100644 index 00000000000..0ed2e154d83 --- /dev/null +++ b/build/config/mac/OWNERS @@ -0,0 +1,2 @@ +rsesek@chromium.org +sdefresne@chromium.org diff --git a/build/config/mac/base_rules.gni b/build/config/mac/base_rules.gni new file mode 100644 index 00000000000..4e34cb53c80 --- /dev/null +++ b/build/config/mac/base_rules.gni @@ -0,0 +1,166 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains rules that are shared between Mac and iOS. + +import("//build/toolchain/toolchain.gni") +import("//build/config/mac/symbols.gni") + +if (is_mac) { + import("//build/config/mac/mac_sdk.gni") +} else if (is_ios) { + import("//build/config/ios/ios_sdk.gni") +} + +# Convert plist file to given format. +# +# Arguments +# +# source: +# string, path to the plist file to convert +# +# output: +# string, path to the converted plist, must be under $root_build_dir +# +# format: +# string, the format to `plutil -convert` the plist to. +template("convert_plist") { + assert(defined(invoker.source), "source must be defined for $target_name") + assert(defined(invoker.output), "output must be defined for $target_name") + assert(defined(invoker.format), "format must be defined for $target_name") + + action(target_name) { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + "deps", + ]) + + script = "//build/config/mac/xcrun.py" + sources = [ + invoker.source, + ] + outputs = [ + invoker.output, + ] + args = [ + "plutil", + "-convert", + invoker.format, + "-o", + rebase_path(invoker.output, root_build_dir), + rebase_path(invoker.source, root_build_dir), + ] + } +} + +# The base template used to generate Info.plist files for iOS and Mac apps and +# frameworks. +# +# Arguments +# +# plist_templates: +# string array, paths to plist files which will be used for the bundle. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# format: +# string, the format to `plutil -convert` the plist to when +# generating the output. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +# +# output_name: +# (optional) string, name of the generated plist file, default to +# "$target_gen_dir/$target_name.plist". +template("info_plist") { + assert(defined(invoker.plist_templates), + "A list of template plist files must be specified for $target_name") + assert(defined(invoker.executable_name), + "The executable_name must be specified for $target_name") + assert(defined(invoker.format), + "The plist format must be specified for $target_name") + executable_name = invoker.executable_name + + _output_name = "$target_gen_dir/$target_name.plist" + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + action(target_name) { + script = "//build/config/mac/gen_plist.py" + sources = invoker.plist_templates + outputs = [ + _output_name, + ] + extra_args = [] + if (defined(invoker.extra_substitutions)) { + foreach(substitution, invoker.extra_substitutions) { + extra_args += [ "-s=$substitution" ] + } + } + response_file_contents = + extra_args + [ + "-s=BUILD_MACHINE_OS_BUILD=$machine_os_build", + "-s=EXECUTABLE_NAME=$executable_name", + "-s=GCC_VERSION=com.apple.compilers.llvm.clang.1_0", + "-s=PRODUCT_NAME=$executable_name", + "-s=XCODE_BUILD=$xcode_build", + "-s=XCODE_VERSION=$xcode_version", + "-o=" + rebase_path(_output_name, root_build_dir), + "-f=" + invoker.format, + ] + rebase_path(sources, root_build_dir) + args = [ "@{{response_file_name}}" ] + forward_variables_from(invoker, + [ + "deps", + "testonly", + "visibility", + ]) + } +} + +# Template to combile .xib or .storyboard files. +# +# Arguments +# +# sources: +# list of string, sources to compile +# +# ibtool_flags: +# (optional) list of string, additional flags to pass to the ibtool +template("compile_xibs") { + action_foreach(target_name) { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + assert(defined(invoker.sources), + "Sources must be specified for $target_name") + + ibtool_flags = [] + if (defined(invoker.ibtool_flags)) { + ibtool_flags = invoker.ibtool_flags + } + + script = "//build/config/mac/compile_xib.py" + sources = invoker.sources + outputs = [ + "$target_gen_dir/$target_name/{{source_name_part}}.nib", + ] + args = + [ + "--input", + "{{source}}", + "--output", + rebase_path("$target_gen_dir/$target_name/{{source_name_part}}.nib"), + ] + ibtool_flags + } +} diff --git a/build/config/mac/compile_xib.py b/build/config/mac/compile_xib.py new file mode 100644 index 00000000000..845f8c17955 --- /dev/null +++ b/build/config/mac/compile_xib.py @@ -0,0 +1,51 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import re +import subprocess +import sys + + +def main(): + parser = argparse.ArgumentParser( + description='A script to compile xib and storyboard.', + fromfile_prefix_chars='@') + parser.add_argument('-o', '--output', required=True, + help='Path to output bundle.') + parser.add_argument('-i', '--input', required=True, + help='Path to input xib or storyboard.') + args, unknown_args = parser.parse_known_args() + + ibtool_args = [ + 'xcrun', 'ibtool', + '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text' + ] + ibtool_args += unknown_args + ibtool_args += [ + '--compile', + os.path.abspath(args.output), + os.path.abspath(args.input) + ] + + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + ibtoolout = subprocess.Popen(ibtool_args, stdout=subprocess.PIPE) + current_section_header = None + for line in ibtoolout.stdout: + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + sys.stdout.write(current_section_header) + current_section_header = None + sys.stdout.write(line) + return ibtoolout.returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/config/mac/gen_plist.py b/build/config/mac/gen_plist.py new file mode 100644 index 00000000000..0004179505e --- /dev/null +++ b/build/config/mac/gen_plist.py @@ -0,0 +1,209 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import plistlib +import os +import re +import subprocess +import sys +import tempfile +import shlex + + +# Xcode substitutes variables like ${PRODUCT_NAME} when compiling Info.plist. +# It also supports supports modifiers like :identifier or :rfc1034identifier. +# SUBST_RE matches a variable substitution pattern with an optional modifier, +# while IDENT_RE matches all characters that are not valid in an "identifier" +# value (used when applying the modifier). +SUBST_RE = re.compile(r'\$\{(?P[^}]*?)(?P:[^}]*)?\}') +IDENT_RE = re.compile(r'[_/\s]') + + +class ArgumentParser(argparse.ArgumentParser): + """Subclass of argparse.ArgumentParser to work with GN response files. + + GN response file writes all the arguments on a single line and assumes + that the python script uses shlext.split() to extract them. Since the + default ArgumentParser expects a single argument per line, we need to + provide a subclass to have the correct support for @{{response_file_name}}. + """ + + def convert_arg_line_to_args(self, arg_line): + return shlex.split(arg_line) + + +def InterpolateList(values, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + values: a list of values + substitutions: a mapping of variable names to values + + Returns: + A new list of values with all variables references ${VARIABLE} replaced + by their value in |substitutions| or None if any of the variable has no + subsitution. + """ + result = [] + for value in values: + interpolated = InterpolateValue(value, substitutions) + if interpolated is None: + return None + result.append(interpolated) + return result + + +def InterpolateString(value, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + value: a string + substitutions: a mapping of variable names to values + + Returns: + A new string with all variables references ${VARIABLES} replaced by their + value in |substitutions| or None if any of the variable has no substitution. + """ + result = value + for match in reversed(list(SUBST_RE.finditer(value))): + variable = match.group('id') + if variable not in substitutions: + return None + # Some values need to be identifier and thus the variables references may + # contains :modifier attributes to indicate how they should be converted + # to identifiers ("identifier" replaces all invalid characters by '_' and + # "rfc1034identifier" replaces them by "-" to make valid URI too). + modifier = match.group('modifier') + if modifier == ':identifier': + interpolated = IDENT_RE.sub('_', substitutions[variable]) + elif modifier == ':rfc1034identifier': + interpolated = IDENT_RE.sub('-', substitutions[variable]) + else: + interpolated = substitutions[variable] + result = result[:match.start()] + interpolated + result[match.end():] + return result + + +def InterpolateValue(value, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + value: a value, can be a dictionary, list, string or other + substitutions: a mapping of variable names to values + + Returns: + A new value with all variables references ${VARIABLES} replaced by their + value in |substitutions| or None if any of the variable has no substitution. + """ + if isinstance(value, dict): + return Interpolate(value, substitutions) + if isinstance(value, list): + return InterpolateList(value, substitutions) + if isinstance(value, str): + return InterpolateString(value, substitutions) + return value + + +def Interpolate(plist, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + plist: a dictionary representing a Property List (.plist) file + substitutions: a mapping of variable names to values + + Returns: + A new plist with all variables references ${VARIABLES} replaced by their + value in |substitutions|. All values that contains references with no + substitutions will be removed and the corresponding key will be cleared + from the plist (not recursively). + """ + result = {} + for key in plist: + value = InterpolateValue(plist[key], substitutions) + if value is not None: + result[key] = value + return result + + +def LoadPList(path): + """Loads Plist at |path| and returns it as a dictionary.""" + fd, name = tempfile.mkstemp() + try: + subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path]) + with os.fdopen(fd, 'r') as f: + return plistlib.readPlist(f) + finally: + os.unlink(name) + + +def SavePList(path, format, data): + """Saves |data| as a Plist to |path| in the specified |format|.""" + fd, name = tempfile.mkstemp() + try: + with os.fdopen(fd, 'w') as f: + plistlib.writePlist(data, f) + subprocess.check_call(['plutil', '-convert', format, '-o', path, name]) + finally: + os.unlink(name) + + +def MergePList(plist1, plist2): + """Merges |plist1| with |plist2| recursively. + + Creates a new dictionary representing a Property List (.plist) files by + merging the two dictionary |plist1| and |plist2| recursively (only for + dictionary values). + + Args: + plist1: a dictionary representing a Property List (.plist) file + plist2: a dictionary representing a Property List (.plist) file + + Returns: + A new dictionary representing a Property List (.plist) file by merging + |plist1| with |plist2|. If any value is a dictionary, they are merged + recursively, otherwise |plist2| value is used. + """ + if not isinstance(plist1, dict) or not isinstance(plist2, dict): + if plist2 is not None: + return plist2 + else: + return plist1 + result = {} + for key in set(plist1) | set(plist2): + if key in plist2: + value = plist2[key] + else: + value = plist1[key] + if isinstance(value, dict): + value = MergePList(plist1.get(key, None), plist2.get(key, None)) + result[key] = value + return result + + +def main(): + parser = ArgumentParser( + description='A script to generate iOS application Info.plist.', + fromfile_prefix_chars='@') + parser.add_argument('-o', '--output', required=True, + help='Path to output plist file.') + parser.add_argument('-s', '--subst', action='append', default=[], + help='Substitution rule in the format "key=value".') + parser.add_argument('-f', '--format', required=True, + help='Plist format (e.g. binary1, xml1) to output.') + parser.add_argument('path', nargs="+", help='Path to input plist files.') + args = parser.parse_args() + substitutions = {} + for subst in args.subst: + key, value = subst.split('=', 1) + substitutions[key] = value + data = {} + for filename in args.path: + data = MergePList(data, LoadPList(filename)) + data = Interpolate(data, substitutions) + SavePList(args.output, args.format, data) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni new file mode 100644 index 00000000000..587f43ddbe3 --- /dev/null +++ b/build/config/mac/mac_sdk.gni @@ -0,0 +1,90 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") + +# See https://bugs.chromium.org/p/webrtc/issues/detail?id=5453. +# We can drop the rtc_require_mac_10_7_deployment flag when Chromium +# also requires a 10.7 deployment target. +import("//build_overrides/build.gni") + +declare_args() { + # Minimum supported version of the Mac SDK. + mac_sdk_min = mac_sdk_min_build_override + + # Minimum supported version of OSX. + mac_deployment_target = mac_deployment_target_build_override + + # Path to a specific version of the Mac SDK, not including a backslash at + # the end. If empty, the path to the lowest version greater than or equal to + # mac_sdk_min is used. + mac_sdk_path = "" + + # The SDK name as accepted by xcodebuild. + mac_sdk_name = "macosx" +} + +# Check that the version of macOS SDK used is the one requested when building +# a version of Chrome shipped to the users. Disable the check if building for +# iOS as the version macOS SDK used is not relevant for the tool build for the +# host (they are not shipped) --- this is required as Chrome on iOS is usually +# build with the latest version of Xcode that may not ship with the version of +# the macOS SDK used to build Chrome on mac. +# TODO(crbug.com/635745): the check for target_os should be replaced by a +# check that current_toolchain is default_toolchain, and the file should +# assert that current_os is "mac" once this file is no longer included by +# iOS toolchains. +_verify_sdk = is_chrome_branded && is_official_build && target_os != "ios" + +find_sdk_args = [ "--print_sdk_path" ] +if (_verify_sdk) { + find_sdk_args += [ + "--verify", + mac_sdk_min, + "--sdk_path=" + mac_sdk_path, + ] +} else { + find_sdk_args += [ mac_sdk_min ] +} + +# The tool will print the SDK path on the first line, and the version on the +# second line. +find_sdk_lines = + exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines") +mac_sdk_version = find_sdk_lines[1] +if (mac_sdk_path == "") { + mac_sdk_path = find_sdk_lines[0] +} + +script_name = "//build/config/mac/sdk_info.py" +_mac_sdk_result = exec_script(script_name, [ mac_sdk_name ], "scope") +mac_sdk_build = _mac_sdk_result.sdk_build +xcode_version = _mac_sdk_result.xcode_version +xcode_build = _mac_sdk_result.xcode_build +machine_os_build = _mac_sdk_result.machine_os_build + +if (mac_sdk_version != mac_sdk_min_build_override && + exec_script("//build/check_return_value.py", + [ + "test", + xcode_version, + "-ge", + "0730", + ], + "value") != 1) { + print( + "********************************************************************************") + print( + " WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,") + print( + " either upgrade Xcode to the latest version or install the Mac OS X") + print( + " $mac_sdk_min_build_override SDK. For more information, see https://crbug.com/620127.") + print() + print(" Current SDK Version: $mac_sdk_version") + print(" Current Xcode Version: $xcode_version ($xcode_build)") + print( + "********************************************************************************") + assert(false, "SDK is incompatible with Xcode") +} diff --git a/build/config/mac/package_framework.py b/build/config/mac/package_framework.py new file mode 100644 index 00000000000..981bc8b60e9 --- /dev/null +++ b/build/config/mac/package_framework.py @@ -0,0 +1,65 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys + +# Packages a framework bundle by setting up symlinks for the "Current" version. +# Usage: python /path/to/Foo.framework current_version + +def Main(args): + if len(args) != 3: + print >> sys.stderr, "Usage: %s /path/to/Something.framework A", (args[0],) + return 1 + + (framework, version) = args[1:] + + # Find the name of the binary based on the part before the ".framework". + binary = os.path.splitext(os.path.basename(framework))[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return 0 + + # Move into the framework directory to set the symlinks correctly. + os.chdir(framework) + + # Set up the Current version. + _Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + _Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + _Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # The following directories are optional but should also be symlinked + # in the root. + EXTRA_DIRS = [ + 'Helpers', + 'Internet Plug-Ins', + 'Libraries', + 'XPCServices', + ] + for extra_dir in EXTRA_DIRS: + extra_dir_target = os.path.join(VERSIONS, version, extra_dir) + if os.path.exists(extra_dir_target): + _Relink(extra_dir_target, extra_dir) + + return 0 + + +def _Relink(dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + +if __name__ == '__main__': + sys.exit(Main(sys.argv)) diff --git a/build/config/mac/rules.gni b/build/config/mac/rules.gni new file mode 100644 index 00000000000..832f635955f --- /dev/null +++ b/build/config/mac/rules.gni @@ -0,0 +1,612 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/mac/base_rules.gni") + +# Generates Info.plist files for Mac apps and frameworks. +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("mac_info_plist") { + assert(defined(invoker.info_plist) != defined(invoker.info_plist_target), + "Only one of info_plist or info_plist_target may be specified in " + + target_name) + + if (defined(invoker.info_plist)) { + _info_plist = invoker.info_plist + } else { + _info_plist_target_output = get_target_outputs(invoker.info_plist_target) + _info_plist = _info_plist_target_output[0] + } + + info_plist(target_name) { + format = "xml1" + extra_substitutions = [] + if (defined(invoker.extra_substitutions)) { + extra_substitutions = invoker.extra_substitutions + } + extra_substitutions += [ + "MAC_SDK_BUILD=$mac_sdk_build", + "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version", + ] + plist_templates = [ + "//build/config/mac/BuildInfo.plist", + _info_plist, + ] + if (defined(invoker.info_plist_target)) { + deps = [ + invoker.info_plist_target, + ] + } + forward_variables_from(invoker, + [ + "testonly", + "executable_name", + ]) + } +} + +# Template to compile and package Mac XIB files as bundle data. +# +# Arguments +# +# sources: +# list of string, sources to comiple +# +# output_path: +# (optional) string, the path to use for the outputs list in the +# bundle_data step. If unspecified, defaults to bundle_resources_dir. +template("mac_xib_bundle_data") { + _target_name = target_name + _compile_target_name = _target_name + "_compile_ibtool" + + compile_xibs(_compile_target_name) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = invoker.sources + ibtool_flags = [ + "--minimum-deployment-target", + mac_deployment_target, + + # TODO(rsesek): Enable this once all the bots are on Xcode 7+. + # "--target-device", + # "mac", + ] + } + + bundle_data(_target_name) { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + + public_deps = [ + ":$_compile_target_name", + ] + sources = get_target_outputs(":$_compile_target_name") + + _output_path = "{{bundle_resources_dir}}" + if (defined(invoker.output_path)) { + _output_path = invoker.output_path + } + + outputs = [ + "$_output_path/{{source_file_part}}", + ] + } +} + +# Template to package a shared library into a Mac framework bundle. +# +# By default, the bundle target this template generates does not link the +# resulting framework into anything that depends on it. If a dependency wants +# a link-time (as well as build-time) dependency on the framework bundle, +# depend against "$target_name+link". If only the build-time dependency is +# required (e.g., for copying into another bundle), then use "$target_name". +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# output_name: +# (optional) string, name of the generated framework without the +# .framework suffix. If omitted, defaults to target_name. +# +# framework_version: +# (optional) string, version of the framework. Typically this is a +# single letter, like "A". If omitted, the Versions/ subdirectory +# structure will not be created, and build output will go directly +# into the framework subdirectory. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +# +# This template provides two targets for the resulting framework bundle. The +# link-time behavior varies depending on which of the two targets below is +# added as a dependency: +# - $target_name only adds a build-time dependency. Targets that depend on +# it will not link against the framework. +# - $target_name+link adds a build-time and link-time dependency. Targets +# that depend on it will link against the framework. +# +# The build-time-only dependency is used for when a target needs to use the +# framework either only for resources, or because the target loads it at run- +# time, via dlopen() or NSBundle. The link-time dependency will cause the +# dependee to have the framework loaded by dyld at launch. +# +# Example of build-time only dependency: +# +# mac_framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# The GoatTeleporter.app will not directly link against +# CoreTeleportation.framework, but it will be included in the bundle's +# Frameworks directory. +# +# Example of link-time dependency: +# +# mac_framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# ldflags = [ +# "-install_name", +# "@executable_path/../Frameworks/$target_name.framework" +# ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation+link" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# Note that the framework is still copied to the app's bundle, but dyld will +# load this library when the app is launched because it uses the "+link" +# target as a dependency. This also requires that the framework set its +# install_name so that dyld can locate it. +# +# See "gn help shared_library" for more information on arguments supported +# by shared library target. +template("mac_framework_bundle") { + assert(defined(invoker.deps), + "Dependencies must be specified for $target_name") + + _info_plist_target = target_name + "_info_plist" + + mac_info_plist(_info_plist_target) { + executable_name = target_name + if (defined(invoker.output_name)) { + executable_name = invoker.output_name + } + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + "testonly", + ]) + } + + _info_plist_bundle_data = _info_plist_target + "_bundle_data" + + bundle_data(_info_plist_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_info_plist_target") + outputs = [ + "{{bundle_resources_dir}}/Info.plist", + ] + public_deps = [ + ":$_info_plist_target", + ] + } + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + # If the framework is unversioned, the final _target_name will be the + # create_bundle(_framework_target), otherwise an action with the name + # _target_name will depends on the the create_bundle() in order to prepare + # the versioned directory structure. + _framework_target = _target_name + _framework_name = _output_name + ".framework" + _framework_root_dir = "$root_out_dir/$_framework_name" + if (defined(invoker.framework_version) && invoker.framework_version != "") { + _framework_version = invoker.framework_version + _framework_root_dir += "/Versions/$_framework_version" + _framework_target = _target_name + "_create_bundle" + } + + _link_shared_library_target = target_name + "_shared_library" + _shared_library_bundle_data = target_name + "_shared_library_bundle_data" + + shared_library(_link_shared_library_target) { + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "bundle_deps", + "code_signing_enabled", + "data_deps", + "info_plist", + "info_plist_target", + "output_name", + "visibility", + ]) + visibility = [ ":$_shared_library_bundle_data" ] + output_name = _output_name + output_prefix_override = true + output_extension = "" + output_dir = "$target_out_dir/$_link_shared_library_target" + } + + bundle_data(_shared_library_bundle_data) { + visibility = [ ":$_framework_target" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = [ + "$target_out_dir/$_link_shared_library_target/$_output_name", + ] + outputs = [ + "{{bundle_executable_dir}}/$_output_name", + ] + public_deps = [ + ":$_link_shared_library_target", + ] + } + + _framework_public_config = _target_name + "_public_config" + config(_framework_public_config) { + # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs + # and include_dirs to avoid duplicate values on the command-line. + visibility = [ ":$_framework_target" ] + ldflags = [ + "-F", + rebase_path("$root_out_dir/.", root_build_dir), + ] + lib_dirs = [ root_out_dir ] + libs = [ _framework_name ] + } + + create_bundle(_framework_target) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + ]) + + if (defined(_framework_version)) { + visibility = [ ":$_target_name" ] + } else { + if (defined(invoker.visibility)) { + visibility = invoker.visibility + visibility += [ ":$_target_name+link" ] + } + } + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_info_plist_bundle_data" ] + + if (defined(invoker.bundle_deps)) { + deps += invoker.bundle_deps + } + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_shared_library_bundle_data" ] + + bundle_root_dir = _framework_root_dir + bundle_resources_dir = "$bundle_root_dir/Resources" + bundle_executable_dir = "$bundle_root_dir" + } + + if (defined(_framework_version)) { + action(_target_name) { + forward_variables_from(invoker, [ "testonly" ]) + + if (defined(invoker.visibility)) { + visibility = invoker.visibility + visibility += [ ":$_target_name+link" ] + } + + script = "//build/config/mac/package_framework.py" + outputs = [ + "$root_out_dir/$_framework_name/Versions/Current", + ] + args = [ + "$_framework_name", + "$_framework_version", + ] + public_deps = [ + ":$_framework_target", + ] + } + } + + group(_target_name + "+link") { + forward_variables_from(invoker, + [ + "public_configs", + "testonly", + "visibility", + ]) + public_deps = [ + ":$_target_name", + ] + if (!defined(public_configs)) { + public_configs = [] + } + public_configs += [ ":$_framework_public_config" ] + } +} + +set_defaults("mac_framework_bundle") { + configs = default_shared_library_configs +} + +# Template to create a Mac executable application bundle. +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# output_name: +# (optional) string, name of the generated app without the +# .app suffix. If omitted, defaults to target_name. +# +# extra_configs: +# (optional) list of label, additional configs to apply to the +# executable target. +# +# remove_configs: +# (optional) list of label, default configs to remove from the target. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("mac_app_bundle") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _executable_target = target_name + "_executable" + _executable_bundle_data = _executable_target + "_bundle_data" + + _info_plist_target = target_name + "_info_plist" + + mac_info_plist(_info_plist_target) { + executable_name = _output_name + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + "testonly", + ]) + } + + _pkg_info_target = target_name + "_pkg_info" + + action(_pkg_info_target) { + forward_variables_from(invoker, [ "testonly" ]) + script = "//build/config/mac/write_pkg_info.py" + sources = get_target_outputs(":$_info_plist_target") + outputs = [ + "$target_gen_dir/$_pkg_info_target", + ] + args = [ "--plist" ] + rebase_path(sources, root_build_dir) + + [ "--output" ] + rebase_path(outputs, root_build_dir) + deps = [ + ":$_info_plist_target", + ] + } + + executable(_executable_target) { + visibility = [ ":$_executable_bundle_data" ] + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "data_deps", + "info_plist", + "output_name", + "visibility", + ]) + if (defined(extra_configs)) { + configs += extra_configs + } + if (defined(remove_configs)) { + configs -= remove_configs + } + output_name = _output_name + output_dir = "$target_out_dir/$_executable_target" + } + + bundle_data(_executable_bundle_data) { + visibility = [ ":$_target_name" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = [ + "$target_out_dir/$_executable_target/$_output_name", + ] + outputs = [ + "{{bundle_executable_dir}}/$_output_name", + ] + public_deps = [ + ":$_executable_target", + ] + } + + _info_plist_bundle_data = _info_plist_target + "_bundle_data" + + bundle_data(_info_plist_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = get_target_outputs(":$_info_plist_target") + outputs = [ + "{{bundle_root_dir}}/Info.plist", + ] + public_deps = [ + ":$_info_plist_target", + ] + } + + _pkg_info_bundle_data = _pkg_info_target + "_bundle_data" + + bundle_data(_pkg_info_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = get_target_outputs(":$_pkg_info_target") + outputs = [ + "{{bundle_root_dir}}/PkgInfo", + ] + public_deps = [ + ":$_pkg_info_target", + ] + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + ]) + if (!defined(deps)) { + deps = [] + } + deps += [ + ":$_executable_bundle_data", + ":$_info_plist_bundle_data", + ":$_pkg_info_bundle_data", + ] + product_type = "com.apple.product-type.application" + bundle_root_dir = "$root_out_dir/${_output_name}.app/Contents" + bundle_resources_dir = "$bundle_root_dir/Resources" + bundle_executable_dir = "$bundle_root_dir/MacOS" + } +} + +# Template to package a loadable_module into a .plugin bundle. +# +# This takes no extra arguments that differ from a loadable_module. +template("mac_plugin_bundle") { + assert(defined(invoker.deps), + "Dependencies must be specified for $target_name") + + _target_name = target_name + _loadable_module_target = _target_name + "_loadable_module" + _loadable_module_bundle_data = _loadable_module_target + "_bundle_data" + + _output_name = _target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + loadable_module(_loadable_module_target) { + visibility = [ ":$_loadable_module_bundle_data" ] + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "data_deps", + "output_name", + "visibility", + ]) + output_dir = "$target_out_dir" + output_name = _output_name + } + + bundle_data(_loadable_module_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = [ + "$target_out_dir/${_output_name}.so", + ] + outputs = [ + "{{bundle_executable_dir}}/$_output_name", + ] + public_deps = [ + ":$_loadable_module_target", + ] + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_loadable_module_bundle_data" ] + + bundle_root_dir = "$root_out_dir/$_output_name.plugin/Contents" + bundle_executable_dir = "$bundle_root_dir/MacOS" + } +} diff --git a/build/config/mac/sdk_info.py b/build/config/mac/sdk_info.py new file mode 100644 index 00000000000..52b252449aa --- /dev/null +++ b/build/config/mac/sdk_info.py @@ -0,0 +1,72 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import subprocess +import sys + +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) +import mac_toolchain + +# This script prints information about the build system, the operating +# system and the iOS or Mac SDK (depending on the platform "iphonesimulator", +# "iphoneos" or "macosx" generally). +# +# In the GYP build, this is done inside GYP itself based on the SDKROOT +# variable. + +def FormatVersion(version): + """Converts Xcode version to a format required for Info.plist.""" + version = version.replace('.', '') + version = version + '0' * (3 - len(version)) + return version.zfill(4) + + +def FillXcodeVersion(settings): + """Fills the Xcode version and build number into |settings|.""" + lines = subprocess.check_output(['xcodebuild', '-version']).splitlines() + settings['xcode_version'] = FormatVersion(lines[0].split()[-1]) + settings['xcode_build'] = lines[-1].split()[-1] + + +def FillMachineOSBuild(settings): + """Fills OS build number into |settings|.""" + settings['machine_os_build'] = subprocess.check_output( + ['sw_vers', '-buildVersion']).strip() + + +def FillSDKPathAndVersion(settings, platform, xcode_version): + """Fills the SDK path and version for |platform| into |settings|.""" + settings['sdk_path'] = subprocess.check_output([ + 'xcrun', '-sdk', platform, '--show-sdk-path']).strip() + settings['sdk_version'] = subprocess.check_output([ + 'xcrun', '-sdk', platform, '--show-sdk-version']).strip() + settings['sdk_platform_path'] = subprocess.check_output([ + 'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip() + # TODO: unconditionally use --show-sdk-build-version once Xcode 7.2 or + # higher is required to build Chrome for iOS or OS X. + if xcode_version >= '0720': + settings['sdk_build'] = subprocess.check_output([ + 'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip() + else: + settings['sdk_build'] = settings['sdk_version'] + + +if __name__ == '__main__': + if len(sys.argv) != 2: + sys.stderr.write( + 'usage: %s [iphoneos|iphonesimulator|macosx]\n' % + os.path.basename(sys.argv[0])) + sys.exit(1) + + # Try using the toolchain in mac_files. + mac_toolchain.SetToolchainEnvironment() + + settings = {} + FillMachineOSBuild(settings) + FillXcodeVersion(settings) + FillSDKPathAndVersion(settings, sys.argv[1], settings['xcode_version']) + + for key in sorted(settings): + print '%s="%s"' % (key, settings[key]) diff --git a/build/config/mac/symbols.gni b/build/config/mac/symbols.gni new file mode 100644 index 00000000000..6166b123d1b --- /dev/null +++ b/build/config/mac/symbols.gni @@ -0,0 +1,30 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/sanitizers/sanitizers.gni") + +# This file declares arguments and configs that control whether dSYM debug +# info is produced and whether build products are stripped. + +declare_args() { + # Produce dSYM files for targets that are configured to do so. dSYM + # generation is controlled globally as it is a linker output (produced via + # the //build/toolchain/mac/linker_driver.py. Enabling this will result in + # all shared library, loadable module, and executable targets having a dSYM + # generated. + enable_dsyms = is_official_build || using_sanitizer + + # Strip symbols from linked targets by default. If this is enabled, the + # //build/config/mac:strip_all config will be applied to all linked targets. + # If custom stripping parameters are required, remove that config from a + # linked target and apply custom -Wcrl,strip flags. See + # //build/toolchain/mac/linker_driver.py for more information. + enable_stripping = is_official_build +} + +# Save unstripped copies of targets with a ".unstripped" suffix. This is +# useful to preserve the original output when enable_stripping=true but +# we're not actually generating real dSYMs. +save_unstripped_output = enable_stripping && !enable_dsyms diff --git a/build/config/mac/write_pkg_info.py b/build/config/mac/write_pkg_info.py new file mode 100644 index 00000000000..b03e7792805 --- /dev/null +++ b/build/config/mac/write_pkg_info.py @@ -0,0 +1,47 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import plistlib +import sys + +# This script creates a PkgInfo file for an OS X .app bundle's plist. +# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \ +# --output Foo.app/Contents/PkgInfo + +def Main(): + parser = argparse.ArgumentParser( + description='A script to write PkgInfo files for .app bundles.') + parser.add_argument('--plist', required=True, + help='Path to the Info.plist for the .app.') + parser.add_argument('--output', required=True, + help='Path to the desired output file.') + args = parser.parse_args() + + # Remove the output if it exists already. + if os.path.exists(args.output): + os.unlink(args.output) + + plist = plistlib.readPlist(args.plist) + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \ + ('AAPL', package_type)) + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: + raise ValueError('CFBundleSignature should be exactly four characters, ' + + 'got %s' % signature_code) + + with open(args.output, 'w') as fp: + fp.write('%s%s' % (package_type, signature_code)) + return 0 + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/build/config/mac/xcrun.py b/build/config/mac/xcrun.py new file mode 100644 index 00000000000..e2a775e5158 --- /dev/null +++ b/build/config/mac/xcrun.py @@ -0,0 +1,23 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='A script to execute a command via xcrun.') + parser.add_argument('--stamp', action='store', type=str, + help='Write a stamp file to this path on success.') + args, unknown_args = parser.parse_known_args() + + rv = subprocess.check_call(['xcrun'] + unknown_args) + if rv == 0 and args.stamp: + if os.path.exists(args.stamp): + os.unlink(args.stamp) + open(args.stamp, 'w+').close() + + sys.exit(rv) diff --git a/build/config/mips.gni b/build/config/mips.gni new file mode 100644 index 00000000000..646a55ba722 --- /dev/null +++ b/build/config/mips.gni @@ -0,0 +1,55 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/v8_target_cpu.gni") + +# These are primarily relevant in current_cpu == "mips*" contexts, where +# MIPS code is being compiled. But they can also be relevant in the +# other contexts when the code will change its behavior based on the +# cpu it wants to generate code for. +if (current_cpu == "mipsel" || v8_current_cpu == "mipsel") { + declare_args() { + # MIPS arch variant. Possible values are: + # "r1" + # "r2" + # "r6" + mips_arch_variant = "r1" + + # MIPS DSP ASE revision. Possible values are: + # 0: unavailable + # 1: revision 1 + # 2: revision 2 + mips_dsp_rev = 0 + + # MIPS SIMD Arch compilation flag. + mips_use_msa = true + + # MIPS floating-point ABI. Possible values are: + # "hard": sets the GCC -mhard-float option. + # "soft": sets the GCC -msoft-float option. + mips_float_abi = "hard" + + # MIPS32 floating-point register width. Possible values are: + # "fp32": sets the GCC -mfp32 option. + # "fp64": sets the GCC -mfp64 option. + # "fpxx": sets the GCC -mfpxx option. + mips_fpu_mode = "fp32" + } +} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el") { + # MIPS arch variant. Possible values are: + # "r2" + # "r6" + if (current_os == "android" || target_os == "android") { + declare_args() { + mips_arch_variant = "r6" + + # MIPS SIMD Arch compilation flag. + mips_use_msa = true + } + } else { + declare_args() { + mips_arch_variant = "r2" + } + } +} diff --git a/build/config/nacl/BUILD.gn b/build/config/nacl/BUILD.gn new file mode 100644 index 00000000000..d7b22ecf2cb --- /dev/null +++ b/build/config/nacl/BUILD.gn @@ -0,0 +1,143 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") + +# Native Client Definitions +config("nacl_defines") { + if (is_linux || is_android || is_nacl) { + defines = [ + "_POSIX_C_SOURCE=199506", + "_XOPEN_SOURCE=600", + "_GNU_SOURCE=1", + "__STDC_LIMIT_MACROS=1", + ] + } else if (is_win) { + defines = [ "__STDC_LIMIT_MACROS=1" ] + } + + if (current_cpu == "pnacl" && !is_nacl_nonsfi) { + # TODO: Remove the following definition once NACL_BUILD_ARCH and + # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain. + defines += [ "NACL_BUILD_ARCH=pnacl" ] + } +} + +config("nexe_defines") { + defines = [ + "DYNAMIC_ANNOTATIONS_ENABLED=1", + "DYNAMIC_ANNOTATIONS_PREFIX=NACL_", + ] +} + +config("nacl_warnings") { + if (is_win) { + # Some NaCl code uses forward declarations of static const variables, + # with initialized definitions later on. (The alternative would be + # many, many more forward declarations of everything used in that + # const variable's initializer before the definition.) The Windows + # compiler is too stupid to notice that there is an initializer later + # in the file, and warns about the forward declaration. + cflags = [ "/wd4132" ] + } +} + +# The base target that all targets in the NaCl build should depend on. +# This allows configs to be modified for everything in the NaCl build, even when +# the NaCl build is composed into the Chrome build. (GN has no functionality to +# add flags to everything in //native_client, having a base target works around +# that limitation.) +source_set("nacl_base") { + public_configs = [ + ":nacl_defines", + ":nacl_warnings", + ] + if (current_os == "nacl") { + public_configs += [ ":nexe_defines" ] + } +} + +config("compiler") { + configs = [] + cflags = [] + ldflags = [] + libs = [] + + if (is_clang && current_cpu != "pnacl") { + # -no-integrated-as is the default in nacl-clang for historical + # compatibility with inline assembly code and so forth. But there + # are no such cases in Chromium code, and -integrated-as is nicer in + # general. Moreover, the IRT must be built using LLVM's assembler + # on x86-64 to preserve sandbox base address hiding. Use it + # everywhere for consistency (and possibly quicker builds). + cflags += [ "-integrated-as" ] + } + if (is_nacl_nonsfi) { + cflags += [ "--pnacl-allow-translate" ] + ldflags += [ + "--pnacl-allow-translate", + "--pnacl-allow-native", + "-Wl,--noirt", + "-Wt,--noirt", + "-Wt,--noirtshim", + + # The clang driver automatically injects -lpthread when using libc++, but + # the toolchain doesn't have it yet. To get around this, use + # -nodefaultlibs and make each executable target depend on + # "//native_client/src/nonsfi/irt:nacl_sys_private". + "-nodefaultlibs", + ] + libs += [ + "c++", + "m", + "c", + "pnaclmm", + ] + include_dirs = [ "//native_client/src/public/linux_syscalls" ] + } + + asmflags = cflags +} + +config("compiler_codegen") { + cflags = [] + + if (is_nacl_irt) { + cflags += [ + # A debugger should be able to unwind IRT call frames. This is + # the default behavior on x86-64 and when compiling C++ with + # exceptions enabled; the change is for the benefit of x86-32 C. + # The frame pointer is unnecessary when unwind tables are used. + "-fasynchronous-unwind-tables", + "-fomit-frame-pointer", + ] + + if (current_cpu == "x86") { + # The x86-32 IRT needs to be callable with an under-aligned + # stack; so we disable SSE instructions, which can fault on + # misaligned addresses. See + # https://code.google.com/p/nativeclient/issues/detail?id=3935 + cflags += [ + "-mstackrealign", + "-mno-sse", + ] + } + } + + asmflags = cflags +} + +config("irt_optimize") { + cflags = [ + # Optimize for space, keep the IRT nexe small. + "-Os", + + # These are omitted from non-IRT libraries to keep the libraries + # themselves small. + "-ffunction-sections", + "-fdata-sections", + ] + + ldflags = [ "-Wl,--gc-sections" ] +} diff --git a/build/config/nacl/config.gni b/build/config/nacl/config.gni new file mode 100644 index 00000000000..ad8936ed985 --- /dev/null +++ b/build/config/nacl/config.gni @@ -0,0 +1,52 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") + +declare_args() { + # Native Client supports both Newlib and Glibc C libraries where Newlib + # is assumed to be the default one; use this to determine whether Glibc + # is being used instead. + is_nacl_glibc = false +} + +is_nacl_irt = false +is_nacl_nonsfi = false + +if (enable_nacl) { + nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86" + + if (is_nacl_glibc) { + if (current_cpu == "x86" || current_cpu == "x64") { + nacl_toolchain_package = "nacl_x86_glibc" + } else if (current_cpu == "arm") { + nacl_toolchain_package = "nacl_arm_glibc" + } + } else { + nacl_toolchain_package = "pnacl_newlib" + } + + if (current_cpu == "pnacl") { + _nacl_tuple = "pnacl" + } else if (current_cpu == "x86" || current_cpu == "x64") { + _nacl_tuple = "x86_64-nacl" + } else if (current_cpu == "arm") { + _nacl_tuple = "arm-nacl" + } else if (current_cpu == "mipsel") { + _nacl_tuple = "mipsel-nacl" + } + + nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin" + nacl_toolchain_tooldir = + "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}" + nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-" + + nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu + is_nacl_irt = current_toolchain == nacl_irt_toolchain + + # Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC + # applications. + nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi" + is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain +} diff --git a/build/config/nacl/rules.gni b/build/config/nacl/rules.gni new file mode 100644 index 00000000000..0d6d03640a3 --- /dev/null +++ b/build/config/nacl/rules.gni @@ -0,0 +1,181 @@ +# Copyright 2015 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/nacl/config.gni") + +# Generate a nmf file +# +# Native Client Manifest (nmf) is a JSON file that tells the browser where to +# download and load Native Client application files and libraries. +# +# Variables: +# executables: .nexe/.pexe/.bc executables to generate nmf for +# lib_prefix: path to prepend to shared libraries in the nmf +# nmf: the name and the path of the output file +# nmfflags: additional flags for the nmf generator +# stage_dependencies: directory for staging libraries +template("generate_nmf") { + assert(defined(invoker.executables), "Must define executables") + assert(defined(invoker.nmf), "Must define nmf") + + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "executables", + "lib_prefix", + "nmf", + "nmfflags", + "public_deps", + "stage_dependencies", + "testonly", + "visibility", + ]) + if (!defined(nmfflags)) { + nmfflags = [] + } + + # TODO(phosek): Remove this conditional once + # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is + # resolved. + if (current_cpu == "pnacl") { + objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump") + } else { + objdump = rebase_path("${nacl_toolprefix}objdump") + } + if (host_os == "win") { + objdump += ".exe" + } + + script = "//native_client_sdk/src/tools/create_nmf.py" + inputs = [ + objdump, + ] + sources = executables + outputs = [ + nmf, + ] + if (is_nacl_glibc) { + if (defined(stage_dependencies)) { + nmfflags += [ "--stage-dependencies=" + + rebase_path(stage_dependencies, root_build_dir) ] + lib_path = stage_dependencies + } else { + lib_path = root_build_dir + } + if (defined(lib_prefix)) { + nmfflags += [ "--lib-prefix=" + lib_prefix ] + lib_path += "/${lib_prefix}" + } + + # Starts empty so the code below can use += everywhere. + data = [] + + nmfflags += [ "--library-path=" + rebase_path(root_out_dir) ] + + # NOTE: There is no explicit dependency for the lib directory + # (lib32 and lib64 for x86/x64) created in the product directory. + # They are created as a side-effect of nmf creation. + if (current_cpu != "x86" && current_cpu != "x64") { + nmfflags += + [ "--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib") ] + data += [ "${lib_path}/lib/" ] + } else { + # For x86-32, the lib/ directory is called lib32/ instead. + if (current_cpu == "x86") { + nmfflags += [ "--library-path=" + + rebase_path("${nacl_toolchain_tooldir}/lib32") ] + data += [ "${lib_path}/lib32/" ] + } + + # x86-32 Windows needs to build both x86-32 and x86-64 NaCl + # binaries into the same nmf covering both architectures. That + # gets handled at a higher level (see the nacl_test_data template), + # so a single generate_nmf invocation gets both x86-32 and x86-64 + # nexes listed in executables. + if (current_cpu == "x64" || target_os == "win") { + # For x86-64, the lib/ directory is called lib64/ instead + # when copied by create_nmf.py. + glibc_tc = "//build/toolchain/nacl:glibc" + assert(current_toolchain == "${glibc_tc}_${current_cpu}") + if (current_cpu == "x64") { + x64_out_dir = root_out_dir + } else { + x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)", + "root_out_dir") + } + nmfflags += [ + "--library-path=" + rebase_path(x64_out_dir), + "--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib"), + ] + data += [ "${lib_path}/lib64/" ] + } + } + } + args = [ + "--no-default-libpath", + "--objdump=" + objdump, + "--output=" + rebase_path(nmf, root_build_dir), + ] + nmfflags + rebase_path(sources, root_build_dir) + if (is_nacl_glibc && current_cpu == "arm") { + deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ] + } + } +} + +# Generate a nmf file for Non-SFI tests +# +# Non-SFI tests use a different manifest format from regular Native Client and +# as such requires a different generator. +# +# Variables: +# executable: Non-SFI .nexe executable to generate nmf for +# nmf: the name and the path of the output file +# nmfflags: additional flags for the nmf generator +template("generate_nonsfi_test_nmf") { + assert(defined(invoker.executable), "Must define executable") + assert(defined(invoker.nmf), "Must define nmf") + + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "executable", + "nmf", + "testonly", + "public_deps", + "visibility", + ]) + + script = "//ppapi/tests/create_nonsfi_test_nmf.py" + sources = [ + executable, + ] + outputs = [ + nmf, + ] + + # NOTE: We use target_cpu rather than current_cpu on purpose because + # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI + # .nexe executable is always translated to run on the target machine. + if (target_cpu == "x86") { + arch = "x86-32" + } else if (target_cpu == "x64") { + arch = "x86-64" + } else { + arch = target_cpu + } + args = [ + "--program=" + rebase_path(executable, root_build_dir), + "--arch=${arch}", + "--output=" + rebase_path(nmf, root_build_dir), + ] + if (defined(invoker.nmfflags)) { + args += invoker.nmfflags + } + } +} diff --git a/build/config/posix/BUILD.gn b/build/config/posix/BUILD.gn new file mode 100644 index 00000000000..d7e917af441 --- /dev/null +++ b/build/config/posix/BUILD.gn @@ -0,0 +1,49 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/toolchain.gni") + +assert(is_posix) + +group("posix") { + visibility = [ "//:optimize_gn_gen" ] +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all Posix targets. It is here to separate out the logic that is +# Posix-only. Note that this is in addition to an OS-specific variant of this +# config. +config("compiler") { + if ((allow_posix_link_time_opt || is_cfi) && !is_nacl) { + arflags = [ + "--plugin", + rebase_path("$clang_base_path/lib/LLVMgold.so", root_build_dir), + ] + } +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Posix-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + if (!is_mac && !is_ios && sysroot != "") { + # Pass the sysroot to all C compiler variants, the assembler, and linker. + cflags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ] + asmflags = cflags + ldflags = cflags + + # Need to get some linker flags out of the sysroot. + ldflags += exec_script("sysroot_ld_path.py", + [ + rebase_path("//build/linux/sysroot_ld_path.sh", + root_build_dir), + rebase_path(sysroot), + ], + "list lines") + } +} diff --git a/build/config/posix/sysroot_ld_path.py b/build/config/posix/sysroot_ld_path.py new file mode 100644 index 00000000000..a90e54f236d --- /dev/null +++ b/build/config/posix/sysroot_ld_path.py @@ -0,0 +1,20 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file takes two arguments, the relative location of the shell script that +# does the checking, and the name of the sysroot. + +# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in +# Python in this file. + +import subprocess +import sys + +if len(sys.argv) != 3: + print "Need two arguments" + sys.exit(1) + +result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip() + +print result.replace(" ", "\n") diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn new file mode 100644 index 00000000000..4b96dd446c1 --- /dev/null +++ b/build/config/sanitizers/BUILD.gn @@ -0,0 +1,503 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build_overrides/build.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/clang/clang.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +# Contains the dependencies needed for sanitizers to link into executables and +# shared_libraries. Unconditionally depend upon this target as it is empty if +# |is_asan|, |is_lsan|, |is_tsan|, |is_msan| and |use_custom_libcxx| are false. +group("deps") { + public_deps = [ + ":deps_no_options", + ] + if (using_sanitizer) { + public_configs = [ + ":sanitizer_options_link_helper", + + # Even when a target removes default_sanitizer_flags, it may be depending + # on a library that did not remove default_sanitizer_flags. Thus, we need + # to add the ldflags here as well as in default_sanitizer_flags. + ":default_sanitizer_ldflags", + ] + deps = [ + ":options_sources", + ] + } + if (use_afl) { + deps += [ "//third_party/afl" ] + } +} + +group("deps_no_options") { + if (using_sanitizer) { + public_configs = [ + # Even when a target removes default_sanitizer_flags, it may be depending + # on a library that did not remove default_sanitizer_flags. Thus, we need + # to add the ldflags here as well as in default_sanitizer_flags. + ":default_sanitizer_ldflags", + ] + deps = [] + public_deps = [] + + data = [ + "//tools/valgrind/asan/", + ] + if (is_win) { + exe = ".exe" + } else { + exe = "" + } + data += [ "$clang_base_path/bin/llvm-symbolizer${exe}" ] + if (is_linux) { + # llvm-symbolizer needs this. + data += [ "$clang_base_path/lib/libstdc++.so.6" ] + } + + if (use_prebuilt_instrumented_libraries || + use_locally_built_instrumented_libraries) { + deps += [ "//third_party/instrumented_libraries:deps" ] + } + if (use_custom_libcxx) { + public_deps += [ "//buildtools/third_party/libc++:libcxx_proxy" ] + data += [ "$root_out_dir/libc++.so" ] + } + if (is_mac || is_win) { + data_deps = [ + ":copy_asan_runtime", + ] + } + if (is_mac) { + public_deps += [ ":asan_runtime_bundle_data" ] + } + } +} + +if ((is_mac || is_win) && using_sanitizer) { + copy("copy_asan_runtime") { + if (is_mac) { + clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib" + } else if (is_win && target_cpu == "x86") { + clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll" + } else if (is_win && target_cpu == "x64") { + clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll" + } + sources = [ + "$clang_base_path/lib/clang/$clang_version/lib/$clang_rt_dso_path", + ] + outputs = [ + "$root_out_dir/{{source_file_part}}", + ] + } + + if (is_mac) { + bundle_data("asan_runtime_bundle_data") { + sources = get_target_outputs(":copy_asan_runtime") + outputs = [ + "{{bundle_executable_dir}}/{{source_file_part}}", + ] + public_deps = [ + ":copy_asan_runtime", + ] + } + } +} + +config("sanitizer_options_link_helper") { + if (is_mac) { + ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ] + } else if (!is_win) { + ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ] + } +} + +static_library("options_sources") { + # This is a static_library instead of a source_set, as it shouldn't be + # unconditionally linked into targets. + visibility = [ + ":deps", + "//:gn_visibility", + ] + sources = [ + "//build/sanitizers/sanitizer_options.cc", + ] + + # Don't compile this target with any sanitizer code. It can be called from + # the sanitizer runtimes, so instrumenting these functions could cause + # recursive calls into the runtime if there is an error. + configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ] + + if (is_asan) { + sources += [ asan_suppressions_file ] + } + + if (is_lsan) { + sources += [ lsan_suppressions_file ] + } + + if (is_tsan) { + sources += [ tsan_suppressions_file ] + } +} + +# Applies linker flags necessary when either :deps or :default_sanitizer_flags +# are used. +config("default_sanitizer_ldflags") { + visibility = [ + ":default_sanitizer_flags", + ":deps", + ] + + if (is_posix) { + ldflags = [] + if (is_asan) { + ldflags += [ "-fsanitize=address" ] + } + if (is_lsan) { + ldflags += [ "-fsanitize=leak" ] + } + if (is_tsan) { + ldflags += [ "-fsanitize=thread" ] + } + if (is_msan) { + ldflags += [ "-fsanitize=memory" ] + } + if (is_ubsan || is_ubsan_security) { + ldflags += [ "-fsanitize=undefined" ] + } + if (is_ubsan_null) { + ldflags += [ "-fsanitize=null" ] + } + if (is_ubsan_vptr) { + ldflags += [ "-fsanitize=vptr" ] + } + + if (use_sanitizer_coverage) { + ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ] + } + + if (is_cfi && !is_nacl) { + ldflags += [ "-fsanitize=cfi-vcall" ] + if (use_cfi_cast) { + ldflags += [ + "-fsanitize=cfi-derived-cast", + "-fsanitize=cfi-unrelated-cast", + ] + } + if (use_cfi_diag) { + ldflags += [ + "-fno-sanitize-trap=cfi", + "-fsanitize-recover=cfi", + ] + } + } + } else if (is_win && is_asan) { + # Windows directly calls link.exe instead of the compiler driver when + # linking. Hence, pass the runtime libraries instead of -fsanitize=address. + # In the static-library build, libraries are different for executables + # and dlls, see link_executable and link_shared_library below. + # This here handles only the component build. + if (target_cpu == "x64") { + # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready. + if (is_component_build) { + assert(false, "win/asan does not work in 64-bit yet") + libs = [ + "clang_rt.asan_dynamic-x86_64.lib", + "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib", + ] + } + } else { + assert(target_cpu == "x86", "WinASan unsupported architecture") + if (is_component_build) { + libs = [ + "clang_rt.asan_dynamic-i386.lib", + "clang_rt.asan_dynamic_runtime_thunk-i386.lib", + ] + } + } + } +} + +config("common_sanitizer_flags") { + cflags = [] + cflags_cc = [] + + # Sanitizers need line table info for stack traces. They don't need type info + # or variable info, so we can leave that out to speed up the build. + if (using_sanitizer) { + assert(is_clang, "sanitizers only supported with clang") + cflags += [ "-gline-tables-only" ] + } + + # Common options for AddressSanitizer, LeakSanitizer, ThreadSanitizer, + # MemorySanitizer and non-official CFI builds. + if (using_sanitizer || (is_cfi && !is_official_build)) { + if (is_posix) { + cflags += [ "-fno-omit-frame-pointer" ] + } else { + cflags += [ "/Oy-" ] + } + } + + if (use_custom_libcxx) { + prefix = "//buildtools/third_party" + include = "trunk/include" + cflags_cc += [ + "-nostdinc++", + "-isystem" + rebase_path("$prefix/libc++/$include", root_build_dir), + "-isystem" + rebase_path("$prefix/libc++abi/$include", root_build_dir), + ] + } +} + +config("asan_flags") { + cflags = [] + if (is_asan) { + cflags += [ "-fsanitize=address" ] + if (!asan_globals) { + cflags += [ + "-mllvm", + "-asan-globals=0", + ] + } + if (is_win) { + cflags += [ "-fsanitize-blacklist=" + + rebase_path("//tools/memory/asan/blacklist_win.txt", + root_build_dir) ] + } else { + # TODO(rnk): Remove this as discussed in http://crbug.com/427202. + cflags += + [ "-fsanitize-blacklist=" + + rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir) ] + } + } +} + +config("link_executable") { + if (is_asan && is_win && !is_component_build) { + if (target_cpu == "x64") { + # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready. + assert(false, "win/asan does not work in 64-bit yet") + libs = [ "clang_rt.asan-x86_64.lib" ] + } else { + assert(target_cpu == "x86", "WinASan unsupported architecture") + libs = [ "clang_rt.asan-i386.lib" ] + } + } +} + +config("link_shared_library") { + if (is_asan && is_win && !is_component_build) { + if (target_cpu == "x64") { + # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready. + assert(false, "win/asan does not work in 64-bit yet") + libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ] + } else { + assert(target_cpu == "x86", "WinASan unsupported architecture") + libs = [ "clang_rt.asan_dll_thunk-i386.lib" ] + } + } +} + +config("cfi_flags") { + cflags = [] + if (is_cfi && !is_nacl) { + cfi_blacklist_path = + rebase_path("//tools/cfi/blacklist.txt", root_build_dir) + cflags += [ + "-fsanitize=cfi-vcall", + "-fsanitize-blacklist=$cfi_blacklist_path", + ] + + if (use_cfi_cast) { + cflags += [ + "-fsanitize=cfi-derived-cast", + "-fsanitize=cfi-unrelated-cast", + ] + } + + if (use_cfi_diag) { + cflags += [ + "-fno-sanitize-trap=cfi", + "-fsanitize-recover=cfi", + "-fno-inline-functions", + "-fno-inline", + "-fno-omit-frame-pointer", + "-O1", + ] + } else { + defines = [ "CFI_ENFORCEMENT" ] + } + } +} + +config("coverage_flags") { + cflags = [] + + if (use_sanitizer_coverage) { + cflags += [ + "-fsanitize-coverage=$sanitizer_coverage_flags", + "-mllvm", + "-sanitizer-coverage-prune-blocks=1", + ] + if (current_cpu == "arm") { + # http://crbug.com/517105 + cflags += [ + "-mllvm", + "-sanitizer-coverage-block-threshold=0", + ] + } + defines = [ "SANITIZER_COVERAGE" ] + } +} + +config("lsan_flags") { + if (is_lsan) { + cflags = [ "-fsanitize=leak" ] + } +} + +config("msan_flags") { + if (is_msan) { + assert(is_linux, "msan only supported on linux x86_64") + msan_blacklist_path = + rebase_path("//tools/msan/blacklist.txt", root_build_dir) + cflags = [ + "-fsanitize=memory", + "-fsanitize-memory-track-origins=$msan_track_origins", + "-fsanitize-blacklist=$msan_blacklist_path", + ] + } +} + +config("tsan_flags") { + if (is_tsan) { + assert(is_linux, "tsan only supported on linux x86_64") + tsan_blacklist_path = + rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir) + cflags = [ + "-fsanitize=thread", + "-fsanitize-blacklist=$tsan_blacklist_path", + ] + } +} + +config("ubsan_flags") { + cflags = [] + if (is_ubsan) { + ubsan_blacklist_path = + rebase_path("//tools/ubsan/blacklist.txt", root_build_dir) + cflags += [ + # Yasm dies with an "Illegal instruction" error when bounds checking is + # enabled. See http://crbug.com/489901 + # "-fsanitize=bounds", + "-fsanitize=float-divide-by-zero", + "-fsanitize=integer-divide-by-zero", + "-fsanitize=null", + "-fsanitize=object-size", + "-fsanitize=return", + "-fsanitize=returns-nonnull-attribute", + "-fsanitize=shift-exponent", + "-fsanitize=signed-integer-overflow", + "-fsanitize=unreachable", + "-fsanitize=vla-bound", + "-fsanitize-blacklist=$ubsan_blacklist_path", + ] + + # Chromecast ubsan builds fail to compile with these + # experimental flags, so only add them to non-chromecast ubsan builds. + if (!is_chromecast) { + cflags += [ + # Employ the experimental PBQP register allocator to avoid slow + # compilation on files with too many basic blocks. + # See http://crbug.com/426271. + "-mllvm", + "-regalloc=pbqp", + + # Speculatively use coalescing to slightly improve the code generated + # by PBQP regallocator. May increase compile time. + "-mllvm", + "-pbqp-coalescing", + ] + } + } +} + +config("ubsan_no_recover") { + if (is_ubsan_no_recover) { + cflags = [ "-fno-sanitize-recover=undefined" ] + } +} + +config("ubsan_security_flags") { + if (is_ubsan_security) { + ubsan_security_blacklist_path = + rebase_path("//tools/ubsan/security_blacklist.txt", root_build_dir) + cflags = [ + "-fsanitize=signed-integer-overflow,shift,vptr", + "-fsanitize-blacklist=$ubsan_security_blacklist_path", + ] + } +} + +config("ubsan_null_flags") { + if (is_ubsan_null) { + cflags = [ "-fsanitize=null" ] + } +} + +config("ubsan_vptr_flags") { + if (is_ubsan_vptr) { + ubsan_vptr_blacklist_path = + rebase_path("//tools/ubsan/vptr_blacklist.txt", root_build_dir) + cflags = [ + "-fsanitize=vptr", + "-fsanitize-blacklist=$ubsan_vptr_blacklist_path", + ] + } +} + +all_sanitizer_configs = [ + ":common_sanitizer_flags", + ":coverage_flags", + ":default_sanitizer_ldflags", + ":asan_flags", + ":cfi_flags", + ":lsan_flags", + ":msan_flags", + ":tsan_flags", + ":ubsan_flags", + ":ubsan_no_recover", + ":ubsan_null_flags", + ":ubsan_security_flags", + ":ubsan_vptr_flags", +] + +# This config is applied by default to all targets. It sets the compiler flags +# for sanitizer usage, or, if no sanitizer is set, does nothing. +# +# This needs to be in a separate config so that targets can opt out of +# sanitizers (by removing the config) if they desire. Even if a target +# removes this config, executables & shared libraries should still depend on +# :deps if any of their dependencies have not opted out of sanitizers. +# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr. +config("default_sanitizer_flags") { + configs = all_sanitizer_configs +} + +# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr. +# This allows to selectively disable ubsan_vptr, when needed. In particular, +# if some third_party code is required to be compiled without rtti, which +# is a requirement for ubsan_vptr. +config("default_sanitizer_flags_but_ubsan_vptr") { + configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ] +} + +config("default_sanitizer_flags_but_coverage") { + configs = all_sanitizer_configs - [ ":coverage_flags" ] +} diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni new file mode 100644 index 00000000000..132512d2529 --- /dev/null +++ b/build/config/sanitizers/sanitizers.gni @@ -0,0 +1,179 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") + +declare_args() { + # Compile for Address Sanitizer to find memory bugs. + is_asan = false + + # Compile for Leak Sanitizer to find leaks. + is_lsan = false + + # Compile for Memory Sanitizer to find uninitialized reads. + is_msan = false + + # Compile for Thread Sanitizer to find threading bugs. + is_tsan = false + + # Compile for Undefined Behaviour Sanitizer to find various types of + # undefined behaviour (excludes vptr checks). + is_ubsan = false + + # Halt the program if a problem is detected. + is_ubsan_no_recover = false + + # Compile for Undefined Behaviour Sanitizer's null pointer checks. + is_ubsan_null = false + + # Compile for Undefined Behaviour Sanitizer's vptr checks. + is_ubsan_vptr = false + + # Track where uninitialized memory originates from. From fastest to slowest: + # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the + # chain of stores leading from allocation site to use site. + msan_track_origins = 2 + + # Use dynamic libraries instrumented by one of the sanitizers instead of the + # standard system libraries. Set this flag to download prebuilt binaries from + # GCS. + use_prebuilt_instrumented_libraries = false + + # Use dynamic libraries instrumented by one of the sanitizers instead of the + # standard system libraries. Set this flag to build the libraries from source. + use_locally_built_instrumented_libraries = false + + # Enable building with SyzyAsan which can find certain types of memory + # errors. Only works on Windows. See + # https://github.com/google/syzygy/wiki/SyzyASanHowTo + is_syzyasan = false + + # Compile with Control Flow Integrity to protect virtual calls and casts. + # See http://clang.llvm.org/docs/ControlFlowIntegrity.html + # + # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds. + is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" && + is_chrome_branded && is_official_build + + # Enable checks for bad casts: derived cast and unrelated cast. + # TODO(krasin): remove this, when we're ready to add these checks by default. + # https://crbug.com/626794 + use_cfi_cast = false + + # By default, Control Flow Integrity will crash the program if it detects a + # violation. Set this to true to print detailed diagnostics instead. + use_cfi_diag = false + + # Compile for fuzzing with LLVM LibFuzzer. + # See http://www.chromium.org/developers/testing/libfuzzer + use_libfuzzer = false + + # Compile for fuzzing with AFL. + use_afl = false + + # Enables core ubsan security features. Will later be removed once it matches + # is_ubsan. + is_ubsan_security = false + + # Compile for fuzzing with Dr. Fuzz + # See http://www.chromium.org/developers/testing/dr-fuzz + use_drfuzz = false + + # Helper variable for testing builds with disabled libfuzzer. + # Not for client use. + disable_libfuzzer = false + + # Value for -fsanitize-coverage flag. Setting this causes + # use_sanitizer_coverage to be enabled. + # Default value when unset and use_afl=true: + # trace-pc + # Default value when unset and use_sanitizer_coverage=true: + # edge,indirect-calls,8bit-counters + sanitizer_coverage_flags = "" +} + +# Disable sanitizers for non-default toolchains. +if (current_toolchain != default_toolchain) { + is_asan = false + is_cfi = false + is_lsan = false + is_msan = false + is_syzyasan = false + is_tsan = false + is_ubsan = false + is_ubsan_null = false + is_ubsan_no_recover = false + is_ubsan_security = false + is_ubsan_vptr = false + msan_track_origins = 0 + sanitizer_coverage_flags = "" + use_cfi_diag = false + use_custom_libcxx = false + use_drfuzz = false + use_libfuzzer = false + use_prebuilt_instrumented_libraries = false + use_locally_built_instrumented_libraries = false + use_sanitizer_coverage = false +} + +# Args that are in turn dependent on other args must be in a separate +# declare_args block. User overrides are only applied at the end of a +# declare_args block. +declare_args() { + # Use libc++ (buildtools/third_party/libc++ and + # buildtools/third_party/libc++abi) instead of stdlibc++ as standard library. + # This is intended to be used for instrumented builds. + use_custom_libcxx = + (is_asan && is_linux && !is_chromeos) || is_tsan || is_msan || is_ubsan || + is_ubsan_security || use_libfuzzer || use_afl + + # Enable -fsanitize-coverage. + use_sanitizer_coverage = + use_libfuzzer || use_afl || sanitizer_coverage_flags != "" + + # Detect overflow/underflow for global objects. + # + # Android build relies on -Wl,--gc-sections removing unreachable code. + # ASan instrumentation for globals inhibits this and results in a + # library with unresolvable relocations. + # TODO(eugenis): find a way to reenable this. + # + # Mac: http://crbug.com/352073 + asan_globals = !is_android && !is_mac +} + +if (use_afl && sanitizer_coverage_flags == "") { + sanitizer_coverage_flags = "trace-pc" +} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") { + sanitizer_coverage_flags = "edge,indirect-calls,8bit-counters" +} + +using_sanitizer = + is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null || + is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage + +assert(!using_sanitizer || is_clang, + "Sanitizers (is_*san) require setting is_clang = true in 'gn args'") + +prebuilt_instrumented_libraries_available = + is_msan && (msan_track_origins == 0 || msan_track_origins == 2) + +# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The +# same is possibly true for the other non-ASan sanitizers. But regardless of +# whether it links, one would normally never run a sanitizer in debug mode. +# Running in debug mode probably indicates you forgot to set the "is_debug = +# false" flag in the build args. ASan seems to run fine in debug mode. +# +# If you find a use-case where you want to compile a sanitizer in debug mode +# and have verified it works, ask brettw and we can consider removing it from +# this condition. We may also be able to find another way to enable your case +# without having people accidentally get broken builds by compiling an +# unsupported or unadvisable configurations. +# +# For one-off testing, just comment this assertion out. +assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr), + "Sanitizers should generally be used in release (set is_debug=false).") + +assert(!is_msan || (is_linux && current_cpu == "x64"), + "MSan currently only works on 64-bit Linux and ChromeOS builds.") diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni new file mode 100644 index 00000000000..b815d2ac650 --- /dev/null +++ b/build/config/sysroot.gni @@ -0,0 +1,92 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This header file defines the "sysroot" variable which is the absolute path +# of the sysroot. If no sysroot applies, the variable will be an empty string. + +import("//build/config/chrome_build.gni") + +declare_args() { + # The absolute path of the sysroot that is applied when compiling using + # the target toolchain. + target_sysroot = "" + + # The absolute path to directory containing sysroots for linux 32 and 64bit + target_sysroot_dir = "" + + use_sysroot = true +} + +if (is_linux && target_sysroot_dir != "") { + if (current_cpu == "x64") { + sysroot = target_sysroot_dir + "/wheezy-x64" + } else if (current_cpu == "x86") { + sysroot = target_sysroot_dir + "/wheezy-ia32" + } +} else if (current_os == target_os && current_cpu == target_cpu && + target_sysroot != "") { + sysroot = target_sysroot +} else if (is_android) { + import("//build/config/android/config.gni") + if (current_cpu == "x86") { + sysroot = "$android_ndk_root/$x86_android_sysroot_subdir" + } else if (current_cpu == "arm") { + sysroot = "$android_ndk_root/$arm_android_sysroot_subdir" + } else if (current_cpu == "mipsel") { + sysroot = "$android_ndk_root/$mips_android_sysroot_subdir" + } else if (current_cpu == "x64") { + sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir" + } else if (current_cpu == "arm64") { + sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir" + } else if (current_cpu == "mips64el") { + sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir" + } else { + sysroot = "" + } +} else if (is_linux && !is_chromeos && use_sysroot) { + # By default build against a sysroot image downloaded from Cloud Storage + # during gclient runhooks. + if (current_cpu == "x64") { + sysroot = "//build/linux/debian_wheezy_amd64-sysroot" + } else if (current_cpu == "x86") { + sysroot = "//build/linux/debian_wheezy_i386-sysroot" + } else if (current_cpu == "mipsel") { + sysroot = "//build/linux/debian_wheezy_mips-sysroot" + } else if (current_cpu == "arm") { + sysroot = "//build/linux/debian_wheezy_arm-sysroot" + } else if (current_cpu == "arm64") { + sysroot = "//build/linux/debian_jessie_arm64-sysroot" + } else { + # Any other builds don't use a sysroot. + sysroot = "" + } + + if (sysroot != "") { + # Our sysroot images only contains gcc 4.6 headers, but chromium requires + # gcc 4.9. Clang is able to detect and work with the 4.6 headers while + # gcc is not. This check can be removed if we ever update to a more modern + # sysroot. + assert(is_clang, "sysroot images require clang (try use_sysroot=false)") + + _script_arch = current_cpu + if (_script_arch == "x86") { + _script_arch = "i386" + } else if (_script_arch == "x64") { + _script_arch = "amd64" + } + assert( + exec_script("//build/dir_exists.py", + [ rebase_path(sysroot) ], + "string") == "True", + "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch") + } +} else if (is_mac) { + import("//build/config/mac/mac_sdk.gni") + sysroot = mac_sdk_path +} else if (is_ios) { + import("//build/config/ios/ios_sdk.gni") + sysroot = ios_sdk_path +} else { + sysroot = "" +} diff --git a/build/config/ui.gni b/build/config/ui.gni new file mode 100644 index 00000000000..1212ad2cc8b --- /dev/null +++ b/build/config/ui.gni @@ -0,0 +1,97 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# These flags are effectively global. Your feature flag should go near the +# code it controls. Most of these items are here now because they control +# legacy global #defines passed to the compiler (now replaced with generated +# buildflag headers -- see //build/buildflag_header.gni). +# +# These flags are ui-related so should eventually be moved to various places +# in //ui/*. +# +# There is more advice on where to put build flags in the "Build flag" section +# of //build/config/BUILDCONFIG.gn. + +import("//build/config/chromecast_build.gni") + +declare_args() { + # Indicates if Ash is enabled. Ash is the Aura Shell which provides a + # desktop-like environment for Aura. Requires use_aura = true + use_ash = is_chromeos && !is_chromecast + + # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux + # that does not require X11. Enabling this feature disables use of glib, x11, + # Pango, and Cairo. Default to false on non-Chromecast builds. + use_ozone = is_chromecast && !is_android + + # Indicates if Aura is enabled. Aura is a low-level windowing library, sort + # of a replacement for GDI or GTK. + use_aura = is_win || is_linux + + # True means the UI is built using the "views" framework. + toolkit_views = + (is_mac || is_win || is_chromeos || use_aura) && !is_chromecast + + # Whether the entire browser uses toolkit-views on Mac instead of Cocoa. + mac_views_browser = false + + # Whether we should use GTKv3 instead of GTKv2. + use_gtk3 = false + + # Optional system libraries. + use_xkbcommon = false + + # Whether we should use glib, a low level C utility library. + use_glib = is_linux + + # Indicates if Wayland display server support is enabled. + enable_wayland_server = is_chromeos + + # Enable experimental vulkan backend. + enable_vulkan = false + + # Allow aura to access x11 clipboard. + enable_clipboard_aurax11 = false +} + +# Additional dependent variables ----------------------------------------------- +# +# These variables depend on other variables and can't be set externally. + +# Use GPU accelerated cross process image transport by default on linux builds +# with the Aura window manager. +ui_compositor_image_transport = use_aura && is_linux + +use_default_render_theme = use_aura && !is_android + +# Indicates if the UI toolkit depends on X11. +use_x11 = is_linux && !use_ozone + +# Turn off glib if Ozone is enabled. +if (use_ozone) { + use_glib = false +} + +if (is_linux && !use_ozone) { + use_cairo = true + use_pango = true +} else { + use_cairo = false + use_pango = false +} + +# Whether to use atk, the Accessibility ToolKit library +use_atk = is_desktop_linux && use_x11 + +use_clipboard_aurax11 = + (is_desktop_linux && use_aura && use_x11) || enable_clipboard_aurax11 +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# See comment at the top. diff --git a/build/config/v8_target_cpu.gni b/build/config/v8_target_cpu.gni new file mode 100644 index 00000000000..13755d2be50 --- /dev/null +++ b/build/config/v8_target_cpu.gni @@ -0,0 +1,61 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +declare_args() { + # This arg is used when we want to tell the JIT-generating v8 code + # that we want to have it generate for an architecture that is different + # than the architecture that v8 will actually run on; we then run the + # code under an emulator. For example, we might run v8 on x86, but + # generate arm code and run that under emulation. + # + # This arg is defined here rather than in the v8 project because we want + # some of the common architecture-specific args (like arm_float_abi or + # mips_arch_variant) to be set to their defaults either if the current_cpu + # applies *or* if the v8_current_cpu applies. + # + # As described below, you can also specify the v8_target_cpu to use + # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the + # name after the normal toolchain. + # + # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"` + # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting + # `custom_toolchain` is more verbose but makes the toolchain that is + # (effectively) being used explicit. + # + # v8_target_cpu can only be used to target one architecture in a build, + # so if you wish to build multiple copies of v8 that are targetting + # different architectures, you will need to do something more + # complicated involving multiple toolchains along the lines of + # custom_toolchain, above. + v8_target_cpu = "" +} + +if (v8_target_cpu == "") { + if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") { + v8_target_cpu = "arm64" + } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") { + v8_target_cpu = "arm" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x86_v8_mips64el") { + v8_target_cpu = "mips64el" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x86_v8_mipsel") { + v8_target_cpu = "mipsel" + } else if (is_msan) { + # If we're running under a sanitizer, if we configure v8 to generate + # code that will be run under a simulator, then the generated code + # also gets the benefits of the sanitizer. + v8_target_cpu = "arm64" + } else { + v8_target_cpu = target_cpu + } +} + +declare_args() { + # This argument is declared here so that it can be overridden in toolchains. + # It should never be explicitly set by the user. + v8_current_cpu = v8_target_cpu +} diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn new file mode 100644 index 00000000000..c778bf2ab4f --- /dev/null +++ b/build/config/win/BUILD.gn @@ -0,0 +1,414 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/visual_studio_version.gni") +import("//build/toolchain/toolchain.gni") + +assert(is_win) + +declare_args() { + # Set this to true to enable static analysis through Visual Studio's + # /analyze. This dramatically slows compiles and reports thousands of + # warnings, so normally this is done on a build machine and only the new + # warnings are examined. + use_vs_code_analysis = false + + # Turn this on to have the linker output extra timing information. + win_linker_timing = false +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Windows-only. +config("compiler") { + if (current_cpu == "x86") { + asmflags = [ + # When /safeseh is specified, the linker will only produce an image if it + # can also produce a table of the image's safe exception handlers. This + # table specifies for the operating system which exception handlers are + # valid for the image. Note that /SAFESEH isn't accepted on the command + # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe. + "/safeseh", + ] + } + + cflags = [ + "/Gy", # Enable function-level linking. + "/FS", # Preserve previous PDB behavior. + "/bigobj", # Some of our files are bigger than the regular limits. + ] + + # Force C/C++ mode for the given GN detected file type. This is necessary + # for precompiled headers where the same source file is compiled in both + # modes. + cflags_c = [ "/TC" ] + cflags_cc = [ "/TP" ] + + # Flags not supported in version 2013. + if (visual_studio_version != "2013" && visual_studio_version != "2013e") { + cflags += [ + # Tell the compiler to crash on failures. This is undocumented + # and unsupported but very handy. + "/d2FastFail", + ] + } + + if (visual_studio_version == "2015") { + cflags += [ + # Work around crbug.com/526851, bug in VS 2015 RTM compiler. + "/Zc:sizedDealloc-", + + # Disable thread-safe statics to avoid overhead and because + # they are disabled on other platforms. See crbug.com/587210 + # and -fno-threadsafe-statics. + "/Zc:threadSafeInit-", + ] + } + + # Building with Clang on Windows is a work in progress and very + # experimental. See crbug.com/82385. + # Keep this in sync with the similar block in build/common.gypi + if (is_clang) { + if (visual_studio_version == "2013") { + cflags += [ "-fmsc-version=1800" ] + } else if (visual_studio_version == "2015") { + cflags += [ "-fmsc-version=1900" ] + } + + if (current_cpu == "x86") { + cflags += [ "-m32" ] + } else { + cflags += [ "-m64" ] + } + + if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") == + "True") { + cflags += [ + # cmd.exe doesn't understand ANSI escape codes by default, + # so only enable them if something emulating them is around. + "-fansi-escape-codes", + ] + } + + # Clang runtime libraries, such as the sanitizer runtimes, live here. + lib_dirs = [ "$clang_base_path/lib/clang/$clang_version/lib/windows" ] + } + + # Ensures that the PDB file contains FIXUP information (growing the PDB file + # by about 5%) but does not otherwise alter the output binary. This + # information is used by the Syzygy optimization tool when decomposing the + # release image. + if (!is_debug && !is_win_fastlink && !is_clang) { + ldflags = [ "/PROFILE" ] + } + + # arflags apply only to static_libraries. The normal linker configs are only + # set for executable and shared library targets so arflags must be set + # elsewhere. Since this is relatively contained, we just apply them in this + # more general config and they will only have an effect on static libraries. + arflags = [ + # "No public symbols found; archive member will be inaccessible." This + # means that one or more object files in the library can never be + # pulled in to targets that link to this library. It's just a warning that + # the source file is a no-op. + "/ignore:4221", + ] +} + +config("vs_code_analysis") { + if (use_vs_code_analysis) { + # When use_vs_code_analysis is specified add the /analyze switch to enable + # static analysis. Specifying /analyze:WX- says that /analyze warnings + # should not be treated as errors. + cflags = [ "/analyze:WX-" ] + + # Also, disable various noisy warnings that have low value. + cflags += [ + "/wd6011", # Dereferencing NULL pointer + + # C6285 is ~16% of raw warnings and has low value + "/wd6285", # non-zero constant || non-zero constant + "/wd6308", # realloc might return null pointer + + # Possible infinite loop: use of the constant + # EXCEPTION_CONTINUE_EXECUTION in the exception-filter + "/wd6312", + + "/wd6322", # Empty _except block + "/wd6330", # 'char' used instead of 'unsigned char' for istype() call + + # C6334 is ~80% of raw warnings and has low value + "/wd6334", # sizeof applied to an expression with an operator + "/wd6326", # Potential comparison of constant with constant + "/wd6340", # Sign mismatch in function parameter + "/wd28159", # Consider using 'GetTickCount64' + "/wd28196", # The precondition is not satisfied + "/wd28204", # Inconsistent SAL annotations + "/wd28251", # Inconsistent SAL annotations + "/wd28252", # Inconsistent SAL annotations + "/wd28253", # Inconsistent SAL annotations + "/wd28278", # Function appears with no prototype in scope + "/wd28285", # syntax error in SAL annotation (in algorithm) + "/wd28301", # Inconsistent SAL annotations + "/wd28182", # Dereferencing NULL pointer + ] + } +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Windows-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + cflags = [] + + # Defines that set up the CRT. + defines = [ + "__STD_C", + "_CRT_RAND_S", + "_CRT_SECURE_NO_DEPRECATE", + "_HAS_EXCEPTIONS=0", + "_SCL_SECURE_NO_DEPRECATE", + ] + + # Defines that set up the Windows SDK. + defines += [ + "_ATL_NO_OPENGL", + "_WINDOWS", + "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS", + "PSAPI_VERSION=1", + "WIN32", + "_SECURE_ATL", + ] + + if (!use_vs_code_analysis) { + # This is required for ATL to use XP-safe versions of its functions. + # However it is prohibited when using /analyze + defines += [ "_USING_V110_SDK71_" ] + } + + if (is_component_build) { + # Component mode: dynamic CRT. Since the library is shared, it requires + # exceptions or will give errors about things not matching, so keep + # exceptions on. + if (is_debug) { + cflags += [ "/MDd" ] + } else { + cflags += [ "/MD" ] + } + } else { + if (current_os != "win") { + # WindowsRT: use the dynamic CRT. + if (is_debug) { + cflags += [ "/MDd" ] + } else { + cflags += [ "/MD" ] + } + } else { + # Desktop Windows: static CRT. + if (is_debug) { + cflags += [ "/MTd" ] + } else { + cflags += [ "/MT" ] + } + } + } +} + +# Sets the default Windows build version. This is separated because some +# targets need to manually override it for their compiles. +config("winver") { + defines = [ + "NTDDI_VERSION=0x0A000000", + "_WIN32_WINNT=0x0A00", + "WINVER=0x0A00", + ] +} + +# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs. +config("sdk_link") { + if (current_cpu == "x64") { + ldflags = [ "/MACHINE:X64" ] + lib_dirs = [ + "$windows_sdk_path\Lib\winv6.3\um\x64", + "$visual_studio_path\VC\lib\amd64", + "$visual_studio_path\VC\atlmfc\lib\amd64", + ] + } else { + ldflags = [ + "/MACHINE:X86", + "/SAFESEH", # Not compatible with x64 so use only for x86. + "/largeaddressaware", + ] + lib_dirs = [ + "$windows_sdk_path\Lib\winv6.3\um\x86", + "$visual_studio_path\VC\lib", + "$visual_studio_path\VC\atlmfc\lib", + ] + } +} + +# This default linker setup is provided separately from the SDK setup so +# targets who want different library configurations can remove this and specify +# their own. +config("common_linker_setup") { + ldflags = [ + "/FIXED:NO", + "/ignore:4199", + "/ignore:4221", + "/NXCOMPAT", + + # Suggested by Microsoft Devrel to avoid + # LINK : fatal error LNK1248: image size (80000000) + # exceeds maximum allowable size (80000000) + # which started happening more regularly after VS2013 Update 4. + # Needs to be a bit lower for VS2015, or else errors out. + "/maxilksize:0x7ff00000", + ] + + ldflags += [ + # Tell the linker to crash on failures. + "/fastfail", + ] + + # ASLR makes debugging with windbg difficult because Chrome.exe and + # Chrome.dll share the same base name. As result, windbg will name the + # Chrome.dll module like chrome_, where + # typically changes with each launch. This in turn means that breakpoints in + # Chrome.dll don't stick from one launch to the next. For this reason, we + # turn ASLR off in debug builds. + if (is_debug) { + ldflags += [ "/DYNAMICBASE:NO" ] + } else { + ldflags += [ "/DYNAMICBASE" ] + } + + if (win_linker_timing) { + ldflags += [ + "/time", + "/verbose:incr", + ] + } +} + +# Subsystem -------------------------------------------------------------------- + +# This is appended to the subsystem to specify a minimum version. +if (current_cpu == "x64") { + # The number after the comma is the minimum required OS version. + # 5.02 = Windows Server 2003. + subsystem_version_suffix = ",5.02" +} else { + # 5.01 = Windows XP. + subsystem_version_suffix = ",5.01" +} + +config("console") { + ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ] +} +config("windowed") { + ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ] +} + +# Incremental linking ---------------------------------------------------------- + +incremental_linking_on_switch = [ "/INCREMENTAL" ] +incremental_linking_off_switch = [ "/INCREMENTAL:NO" ] + +# Disable incremental linking for syzyasan +if (is_debug && !is_syzyasan) { + default_incremental_linking_switch = incremental_linking_on_switch +} else { + default_incremental_linking_switch = incremental_linking_off_switch +} + +# Applies incremental linking or not depending on the current configuration. +config("default_incremental_linking") { + ldflags = default_incremental_linking_switch +} + +# Explicitly on or off incremental linking +config("incremental_linking") { + ldflags = incremental_linking_on_switch +} +config("no_incremental_linking") { + ldflags = incremental_linking_off_switch +} + +# Some large modules can't handle incremental linking in some situations. This +# config should be applied to large modules to turn off incremental linking +# when it won't work. +config("default_large_module_incremental_linking") { + if (symbol_level > 0 && (current_cpu == "x86" || !is_component_build)) { + # When symbols are on, things get so large that the tools fail due to the + # size of the .ilk files. + ldflags = incremental_linking_off_switch + } else { + # Otherwise just do the default incremental linking for this build type. + ldflags = default_incremental_linking_switch + } +} + +# Character set ---------------------------------------------------------------- + +# Not including this config means "ansi" (8-bit system codepage). +config("unicode") { + defines = [ + "_UNICODE", + "UNICODE", + ] +} + +# Lean and mean ---------------------------------------------------------------- + +# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have +# to have a separate config for it. Remove this config from your target to +# get the "bloaty and accomodating" version of windows.h. +config("lean_and_mean") { + defines = [ "WIN32_LEAN_AND_MEAN" ] +} + +# Nominmax -------------------------------------------------------------------- + +# Some third party code defines NOMINMAX before including windows.h, which +# then causes warnings when it's been previously defined on the command line. +# For such targets, this config can be removed. + +config("nominmax") { + defines = [ "NOMINMAX" ] +} + +# Target WinRT ---------------------------------------------------------------- + +# When targeting Windows Runtime, certain compiler/linker flags are necessary. + +config("target_winrt") { + defines = [ + "WINRT", + "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP", + ] + cflags_cc = [ + "/ZW", + "/EHsc", + ] +} + +# Internal stuff -------------------------------------------------------------- + +# Config used by the MIDL template to disable warnings. +config("midl_warnings") { + if (is_clang) { + cflags = [ + # MIDL generates code like "#endif !_MIDL_USE_GUIDDEF_". + "-Wno-extra-tokens", + + # TODO(thakis): Remove this once clang is rolled past r279116, + # https://crbug.com/637456 + "-Wno-extern-initializer", + ] + } +} diff --git a/build/config/win/console_app.gni b/build/config/win/console_app.gni new file mode 100644 index 00000000000..cac2ef5d731 --- /dev/null +++ b/build/config/win/console_app.gni @@ -0,0 +1,18 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +declare_args() { + # If true, builds as a console app (rather than a windowed app), which allows + # logging to be printed to the user. This will cause a terminal window to pop + # up when the executable is not run from the command line, so should only be + # used for development. Only has an effect on Windows builds. + win_console_app = false +} + +if (is_win && is_asan) { + # AddressSanitizer build should be a console app since it writes to stderr. + win_console_app = true +} diff --git a/build/config/win/manifest.gni b/build/config/win/manifest.gni new file mode 100644 index 00000000000..3dcaddfb3bc --- /dev/null +++ b/build/config/win/manifest.gni @@ -0,0 +1,191 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# HOW MANIFESTS WORK IN THE GN BUILD +# +# Use the windows_manifest template to declare a manifest generation step. +# This will combine all listed .manifest files and generate a resource file +# referencing the resulting manifest. To link this manifest, just depend on +# the manifest target from your executable or shared library. +# +# This will define an empty placeholder target on non-Windows platforms so +# the manifest declarations and dependencies do not need to be inside of OS +# conditionals. +# +# Manifests uses different resource IDs for EXE and DLL targets. You will need +# to specify this in the manifest target declaration and only use that manifest +# target from the correct type of binary target. +# +# A binary can depend on only one manifest target, but the manifest target +# can depend on many individual .manifest files which will be merged. As a +# result, only executables and shared libraries should depend on manifest +# targets. If you want to add a manifest to a component, put the dependency +# behind a "if (is_component_build)" conditional. +# +# Generally you will just want the defaults for the Chrome build. In this case +# the binary should just depend on one of the targets in //build/win/. There +# are also individual manifest files in that directory you can reference via +# the *_manifest variables defined below to pick and choose only some defaults. +# You might combine these with a custom manifest file to get specific behavior. + +# Reference this manifest as a source from windows_manifest targets to get +# the default Chrome OS compatibility list. +default_compatibility_manifest = "//build/win/compatibility.manifest" + +# Reference this manifest as a source from windows_manifest targets to get +# the default Chrome common constrols compatibility. +common_controls_manifest = "//build/win/common_controls.manifest" + +# Reference this manifest to request that Windows not perform any elevation +# when running your program. Otherwise, it might do some autodetection and +# request elevated privileges from the user. This is normally what you want. +as_invoker_manifest = "//build/win/as_invoker.manifest" + +# An alternative to as_invoker_manifest when you want the application to always +# elevate. +require_administrator_manifest = "//build/win/require_administrator.manifest" + +# Construct a target to combine the given manifest files into a .rc file. +# +# Variables for the windows_manifest template: +# +# sources: (required) +# List of source .manifest files to add. +# +# type: "dll" or "exe" (required) +# Indicates the type of target that this manifest will be used for. +# DLLs and EXEs have different manifest resource IDs. +# +# deps: (optional) +# visibility: (optional) +# Normal meaning. +# +# Example: +# +# windows_manifest("doom_melon_manifest") { +# sources = [ +# "doom_melon.manifest", # Custom values in here. +# default_compatibility_manifest, # Want the normal OS compat list. +# ] +# type = "exe" +# } +# +# executable("doom_melon") { +# deps = [ ":doom_melon_manifest" ] +# ... +# } + +if (is_win) { + # This is the environment file that gyp-win-tool will use for the current + # toolchain. It is placed in root_build_dir by the toolchain setup. This + # variable is the path relative to the root_build_dir which is what + # gyp-win-tool expects as an argument. + _environment_file = "environment.$current_cpu" + + template("windows_manifest") { + manifest_action_name = "${target_name}__gen_manifest" + rc_action_name = "${target_name}__gen_rc" + source_set_name = target_name + + output_manifest = "$target_gen_dir/$source_set_name.manifest" + rcfile = "$output_manifest.rc" + + # Make the final .manifest file. + action(manifest_action_name) { + visibility = [ + ":$source_set_name", + ":$rc_action_name", + ] + + script = "$root_build_dir/gyp-win-tool" + + assert(defined(invoker.sources), + "\"sources\" must be defined for a windows_manifest target") + inputs = invoker.sources + + outputs = [ + output_manifest, + ] + + args = [ + "manifest-wrapper", + _environment_file, + "mt.exe", + "-nologo", + "-manifest", + ] + args += rebase_path(invoker.sources, root_build_dir) + args += [ "-out:" + rebase_path(output_manifest, root_build_dir) ] + + # Apply any dependencies from the invoker to this target, since those + # dependencies may have created the input manifest files. + forward_variables_from(invoker, [ "deps" ]) + } + + # Make the .rc file that references the final manifest file. + # + # This could easily be combined into one step, but this current separation + # of .manifest and .rc matches GYP and allows us to re-use gyp-win-tool. + action(rc_action_name) { + visibility = [ ":$source_set_name" ] + + script = "$root_build_dir/gyp-win-tool" + + outputs = [ + rcfile, + ] + + # EXEs have a resource ID of 1 for their manifest, DLLs use 2. + assert(defined(invoker.type), + "\"type\" must be defined for a windows_manifest") + if (invoker.type == "exe") { + manifest_resource_id = "1" + } else if (invoker.type == "dll") { + manifest_resource_id = "2" + } else { + assert(false, "Bad value of \"type\", Must be \"exe\" or \"dll\"") + } + + args = [ + "manifest-to-rc", + "$_environment_file", + rebase_path(output_manifest), + rebase_path(rcfile, root_build_dir), + manifest_resource_id, + ] + + # Although generating this file doesn't technically depend on the + # generated manifest, this dependency causes the .rc timestamp to be + # updated every time the manifest is updated. Otherwise, updating the + # manifest will not cause a recompilation of the .rc file. + deps = [ + ":$manifest_action_name", + ] + } + + # This source set only exists to compile and link the resource file. + source_set(source_set_name) { + forward_variables_from(invoker, [ "visibility" ]) + sources = [ + rcfile, + ] + deps = [ + ":$manifest_action_name", + ":$rc_action_name", + ] + } + } +} else { + # Make a no-op group on non-Windows platforms so windows_manifest + # instantiations don't need to be inside windows blocks. + template("windows_manifest") { + group(target_name) { + # Prevent unused variable warnings on non-Windows platforms. + assert(invoker.type == "exe" || invoker.type == "dll") + assert(invoker.sources != "") + assert(!defined(invoker.deps) || invoker.deps != "") + assert(!defined(invoker.visibility) || invoker.visibility != "") + } + } +} diff --git a/build/config/win/msvs_dependencies.isolate b/build/config/win/msvs_dependencies.isolate new file mode 100644 index 00000000000..d33aec4950b --- /dev/null +++ b/build/config/win/msvs_dependencies.isolate @@ -0,0 +1,179 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'conditions': [ + # Copy the VS runtime DLLs into the isolate so that they + # don't have to be preinstalled on the target machine. + # + # VS2013 runtimes + ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/x64/msvcp120d.dll', + '<(PRODUCT_DIR)/x64/msvcr120d.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Release"', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/x64/msvcp120.dll', + '<(PRODUCT_DIR)/x64/msvcr120.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/msvcp120d.dll', + '<(PRODUCT_DIR)/msvcr120d.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/msvcp120.dll', + '<(PRODUCT_DIR)/msvcr120.dll', + ], + }, + }], + # VS2015 runtimes + ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/x64/msvcp140d.dll', + '<(PRODUCT_DIR)/x64/vccorlib140d.dll', + '<(PRODUCT_DIR)/x64/vcruntime140d.dll', + '<(PRODUCT_DIR)/x64/ucrtbased.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Release"', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/x64/msvcp140.dll', + '<(PRODUCT_DIR)/x64/vccorlib140.dll', + '<(PRODUCT_DIR)/x64/vcruntime140.dll', + '<(PRODUCT_DIR)/x64/ucrtbase.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/msvcp140d.dll', + '<(PRODUCT_DIR)/vccorlib140d.dll', + '<(PRODUCT_DIR)/vcruntime140d.dll', + '<(PRODUCT_DIR)/ucrtbased.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', { + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/msvcp140.dll', + '<(PRODUCT_DIR)/vccorlib140.dll', + '<(PRODUCT_DIR)/vcruntime140.dll', + '<(PRODUCT_DIR)/ucrtbase.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2015 and component=="shared_library" and target_arch=="ia32"', { + # 32-bit builds have an x64 directory which also needs Windows 10 + # Universal C Runtime binaries copied over. + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/x64/api-ms-win-core-console-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-datetime-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-debug-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-errorhandling-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l1-2-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-file-l2-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-handle-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-heap-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-interlocked-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-libraryloader-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-localization-l1-2-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-memory-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-namedpipe-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-processenvironment-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-processthreads-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-processthreads-l1-1-1.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-profile-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-rtlsupport-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-string-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-synch-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-synch-l1-2-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-sysinfo-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-timezone-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-core-util-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-conio-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-convert-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-environment-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-filesystem-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-heap-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-locale-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-math-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-multibyte-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-private-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-process-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-runtime-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-stdio-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-string-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-time-l1-1-0.dll', + '<(PRODUCT_DIR)/x64/api-ms-win-crt-utility-l1-1-0.dll', + ], + }, + }], + ['OS=="win" and msvs_version==2015 and component=="shared_library"', { + # Windows 10 Universal C Runtime binaries. + 'variables': { + 'files': [ + '<(PRODUCT_DIR)/api-ms-win-core-console-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-datetime-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-debug-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-errorhandling-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-file-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-file-l1-2-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-file-l2-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-handle-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-heap-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-interlocked-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-libraryloader-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-localization-l1-2-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-memory-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-namedpipe-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-processenvironment-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-processthreads-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-processthreads-l1-1-1.dll', + '<(PRODUCT_DIR)/api-ms-win-core-profile-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-rtlsupport-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-string-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-synch-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-synch-l1-2-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-sysinfo-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-timezone-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-core-util-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-conio-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-convert-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-environment-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-filesystem-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-heap-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-locale-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-math-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-multibyte-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-private-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-process-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-runtime-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-stdio-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-string-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-time-l1-1-0.dll', + '<(PRODUCT_DIR)/api-ms-win-crt-utility-l1-1-0.dll', + ], + }, + }], + ], +} diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni new file mode 100644 index 00000000000..5bfa9a76202 --- /dev/null +++ b/build/config/win/visual_studio_version.gni @@ -0,0 +1,39 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Path to Visual Studio. If empty, the default is used which is to use the + # automatic toolchain in depot_tools. If set, you must also set the + # visual_studio_version and wdk_path. + visual_studio_path = "" + + # Version of Visual Studio pointed to by the visual_studio_path. + # Use "2013" for Visual Studio 2013, or "2013e" for the Express version. + visual_studio_version = "" + + # Directory of the Windows driver kit. If visual_studio_path is empty, this + # will be auto-filled. + wdk_path = "" + + # Full path to the Windows SDK, not including a backslash at the end. + # This value is the default location, override if you have a different + # installation location. + windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10" +} + +if (visual_studio_path == "") { + toolchain_data = + exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope") + visual_studio_path = toolchain_data.vs_path + windows_sdk_path = toolchain_data.sdk_path + visual_studio_version = toolchain_data.vs_version + wdk_path = toolchain_data.wdk_dir + visual_studio_runtime_dirs = toolchain_data.runtime_dirs +} else { + assert(visual_studio_version != "", + "You must set the visual_studio_version if you set the path") + assert(wdk_path != "", + "You must set the wdk_path if you set the visual studio path") + visual_studio_runtime_dirs = [] +} diff --git a/build/config/zip.gni b/build/config/zip.gni new file mode 100644 index 00000000000..a81af4d0bc7 --- /dev/null +++ b/build/config/zip.gni @@ -0,0 +1,56 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Creates a zip archive of the inputs. +# +# inputs (required) +# List of input files relative to the current directory. +# +# output (required) +# File name to write. +# +# base_dir (optional) +# If provided, the archive paths will be relative to this directory. +# +# deps, public_deps, data_deps, testonly, visibility (optional) +# Normal meaning. +template("zip") { + action(target_name) { + script = "//build/android/gn/zip.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = invoker.inputs + outputs = [ + depfile, + invoker.output, + ] + + assert(defined(invoker.inputs)) + rebase_inputs = rebase_path(invoker.inputs, root_build_dir) + + assert(defined(invoker.output)) + rebase_output = rebase_path(invoker.output, root_build_dir) + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--inputs=$rebase_inputs", + "--output=$rebase_output", + ] + if (defined(invoker.base_dir)) { + args += [ + "--base-dir", + rebase_path(invoker.base_dir, root_build_dir), + ] + } + + forward_variables_from(invoker, + [ + "testonly", + "deps", + "public_deps", + "data_deps", + "visibility", + ]) + } +} diff --git a/build/copy_test_data_ios.gypi b/build/copy_test_data_ios.gypi new file mode 100644 index 00000000000..56a222f9f84 --- /dev/null +++ b/build/copy_test_data_ios.gypi @@ -0,0 +1,48 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to copy test data files into +# an iOS app bundle. To use this the following variables need to be defined: +# test_data_files: list: paths to test data files or directories +# test_data_prefix: string: a directory prefix that will be prepended to each +# output path. Generally, this should be the base +# directory of the gypi file containing the unittest +# target (e.g. "base" or "chrome"). +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_unittests', +# 'conditions': [ +# ['OS == "ios"', { +# 'actions': [ +# { +# 'action_name': 'copy_test_data', +# 'variables': { +# 'test_data_files': [ +# 'path/to/datafile.txt', +# 'path/to/data/directory/', +# ] +# 'test_data_prefix' : 'prefix', +# }, +# 'includes': ['path/to/this/gypi/file'], +# }, +# ], +# }], +# } +# + +{ + 'inputs': [ + ' [--inputs] [--outputs] ' + parser.set_usage(usage) + parser.add_option('-o', dest='output_dir') + parser.add_option('--inputs', action='store_true', dest='list_inputs') + parser.add_option('--outputs', action='store_true', dest='list_outputs') + options, arglist = parser.parse_args(argv) + + if len(arglist) == 0: + raise WrongNumberOfArgumentsException(' required.') + + files_to_copy = CalcInputs(arglist) + escaped_files = [EscapePath(x) for x in CalcInputs(arglist)] + if options.list_inputs: + return '\n'.join(escaped_files) + + if not options.output_dir: + raise WrongNumberOfArgumentsException('-o required.') + + if options.list_outputs: + outputs = [os.path.join(options.output_dir, x) for x in escaped_files] + return '\n'.join(outputs) + + CopyFiles(files_to_copy, options.output_dir) + return + +def main(argv): + try: + result = DoMain(argv[1:]) + except WrongNumberOfArgumentsException, e: + print >>sys.stderr, e + return 1 + if result: + print result + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/cp.py b/build/cp.py new file mode 100644 index 00000000000..0f32536b624 --- /dev/null +++ b/build/cp.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copy a file. + +This module works much like the cp posix command - it takes 2 arguments: +(src, dst) and copies the file with path |src| to |dst|. +""" + +import os +import shutil +import sys + + +def Main(src, dst): + # Use copy instead of copyfile to ensure the executable bit is copied. + return shutil.copy(src, os.path.normpath(dst)) + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1], sys.argv[2])) diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py new file mode 100644 index 00000000000..ccfbb6e5703 --- /dev/null +++ b/build/detect_host_arch.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Outputs host CPU architecture in format recognized by gyp.""" + +import platform +import re +import sys + + +def HostArch(): + """Returns the host architecture with a predictable string.""" + host_arch = platform.machine() + + # Convert machine type to format recognized by gyp. + if re.match(r'i.86', host_arch) or host_arch == 'i86pc': + host_arch = 'ia32' + elif host_arch in ['x86_64', 'amd64']: + host_arch = 'x64' + elif host_arch.startswith('arm'): + host_arch = 'arm' + elif host_arch.startswith('mips'): + host_arch = 'mips' + + # platform.machine is based on running kernel. It's possible to use 64-bit + # kernel with 32-bit userland, e.g. to give linker slightly more memory. + # Distinguish between different userland bitness by querying + # the python binary. + if host_arch == 'x64' and platform.architecture()[0] == '32bit': + host_arch = 'ia32' + + return host_arch + +def DoMain(_): + """Hook to be called from gyp without starting a separate python + interpreter.""" + return HostArch() + +if __name__ == '__main__': + print DoMain([]) diff --git a/build/dir_exists.py b/build/dir_exists.py new file mode 100644 index 00000000000..70d367ec269 --- /dev/null +++ b/build/dir_exists.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes True if the argument is a directory.""" + +import os.path +import sys + +def main(): + sys.stdout.write(_is_dir(sys.argv[1])) + return 0 + +def _is_dir(dir_name): + return str(os.path.isdir(dir_name)) + +def DoMain(args): + """Hook to be called from gyp without starting a separate python + interpreter.""" + return _is_dir(args[0]) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/download_gold_plugin.py b/build/download_gold_plugin.py new file mode 100644 index 00000000000..8123167806d --- /dev/null +++ b/build/download_gold_plugin.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Script to download LLVM gold plugin from google storage.""" + +import find_depot_tools +import json +import os +import shutil +import subprocess +import sys +import zipfile + +SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) +CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) + + +DEPOT_PATH = find_depot_tools.add_depot_tools_to_path() +GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py') + +LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build', + 'Release+Asserts') +CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts', + 'update.py') +CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip() + +CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64' + +def main(): + targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION + remote_path = '%s/%s' % (CLANG_BUCKET, targz_name) + + os.chdir(LLVM_BUILD_PATH) + + subprocess.check_call(['python', GSUTIL_PATH, + 'cp', remote_path, targz_name]) + subprocess.check_call(['tar', 'xzf', targz_name]) + os.remove(targz_name) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py new file mode 100644 index 00000000000..cccecce9ef9 --- /dev/null +++ b/build/download_nacl_toolchains.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Shim to run nacl toolchain download script only if there is a nacl dir.""" + +import os +import shutil +import sys + + +def Main(args): + # Exit early if disable_nacl=1. + if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''): + return 0 + if 'OS=android' in os.environ.get('GYP_DEFINES', ''): + return 0 + script_dir = os.path.dirname(os.path.abspath(__file__)) + src_dir = os.path.dirname(script_dir) + nacl_dir = os.path.join(src_dir, 'native_client') + nacl_build_dir = os.path.join(nacl_dir, 'build') + package_version_dir = os.path.join(nacl_build_dir, 'package_version') + package_version = os.path.join(package_version_dir, 'package_version.py') + if not os.path.exists(package_version): + print "Can't find '%s'" % package_version + print 'Presumably you are intentionally building without NativeClient.' + print 'Skipping NativeClient toolchain download.' + sys.exit(0) + sys.path.insert(0, package_version_dir) + import package_version + + # BUG: + # We remove this --optional-pnacl argument, and instead replace it with + # --no-pnacl for most cases. However, if the bot name is an sdk + # bot then we will go ahead and download it. This prevents increasing the + # gclient sync time for developers, or standard Chrome bots. + if '--optional-pnacl' in args: + args.remove('--optional-pnacl') + use_pnacl = False + buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '') + if 'pnacl' in buildbot_name and 'sdk' in buildbot_name: + use_pnacl = True + if use_pnacl: + print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n' + else: + args = ['--exclude', 'pnacl_newlib'] + args + + # Only download the ARM gcc toolchain if we are building for ARM + # TODO(olonho): we need to invent more reliable way to get build + # configuration info, to know if we're building for ARM. + if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''): + args = ['--exclude', 'nacl_arm_newlib'] + args + + package_version.main(args) + + return 0 + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/build/env_dump.py b/build/env_dump.py new file mode 100644 index 00000000000..21edfe633c7 --- /dev/null +++ b/build/env_dump.py @@ -0,0 +1,56 @@ +#!/usr/bin/python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script can either source a file and dump the enironment changes done by +# it, or just simply dump the current environment as JSON into a file. + +import json +import optparse +import os +import pipes +import subprocess +import sys + + +def main(): + parser = optparse.OptionParser() + parser.add_option('-f', '--output-json', + help='File to dump the environment as JSON into.') + parser.add_option( + '-d', '--dump-mode', action='store_true', + help='Dump the environment to sys.stdout and exit immediately.') + + parser.disable_interspersed_args() + options, args = parser.parse_args() + if options.dump_mode: + if args or options.output_json: + parser.error('Cannot specify args or --output-json with --dump-mode.') + json.dump(dict(os.environ), sys.stdout) + else: + if not options.output_json: + parser.error('Requires --output-json option.') + + envsetup_cmd = ' '.join(map(pipes.quote, args)) + full_cmd = [ + 'bash', '-c', + '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__)) + ] + try: + output = subprocess.check_output(full_cmd) + except Exception as e: + sys.exit('Error running %s and dumping environment.' % envsetup_cmd) + + env_diff = {} + new_env = json.loads(output) + for k, val in new_env.items(): + if k == '_' or (k in os.environ and os.environ[k] == val): + continue + env_diff[k] = val + with open(options.output_json, 'w') as f: + json.dump(env_diff, f) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/experimental/install-build-deps.py b/build/experimental/install-build-deps.py new file mode 100644 index 00000000000..e5a2e80d8be --- /dev/null +++ b/build/experimental/install-build-deps.py @@ -0,0 +1,436 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import operator +import os +import platform +import re +import subprocess +import sys + + +SUPPORTED_UBUNTU_VERSIONS = ( + {'number': '12.04', 'codename': 'precise'}, + {'number': '14.04', 'codename': 'trusty'}, + {'number': '14.10', 'codename': 'utopic'}, + {'number': '15.04', 'codename': 'vivid'}, + {'number': '15.10', 'codename': 'wily'}, +) + + +# Packages needed for chromeos only. +_packages_chromeos_dev = ( + 'libbluetooth-dev', + 'libxkbcommon-dev', + 'realpath', +) + + +# Packages needed for development. +_packages_dev = ( + 'bison', + 'cdbs', + 'curl', + 'devscripts', + 'dpkg-dev', + 'elfutils', + 'fakeroot', + 'flex', + 'fonts-thai-tlwg', + 'g++', + 'git-core', + 'git-svn', + 'gperf', + 'language-pack-da', + 'language-pack-fr', + 'language-pack-he', + 'language-pack-zh-hant', + 'libapache2-mod-php5', + 'libasound2-dev', + 'libav-tools', + 'libbrlapi-dev', + 'libbz2-dev', + 'libcairo2-dev', + 'libcap-dev', + 'libcups2-dev', + 'libcurl4-gnutls-dev', + 'libdrm-dev', + 'libelf-dev', + 'libgconf2-dev', + 'libglib2.0-dev', + 'libglu1-mesa-dev', + 'libgnome-keyring-dev', + 'libgtk2.0-dev', + 'libkrb5-dev', + 'libnspr4-dev', + 'libnss3-dev', + 'libpam0g-dev', + 'libpci-dev', + 'libpulse-dev', + 'libsctp-dev', + 'libspeechd-dev', + 'libsqlite3-dev', + 'libssl-dev', + 'libudev-dev', + 'libwww-perl', + 'libxslt1-dev', + 'libxss-dev', + 'libxt-dev', + 'libxtst-dev', + 'openbox', + 'patch', + 'perl', + 'php5-cgi', + 'pkg-config', + 'python', + 'python-cherrypy3', + 'python-crypto', + 'python-dev', + 'python-numpy', + 'python-opencv', + 'python-openssl', + 'python-psutil', + 'python-yaml', + 'rpm', + 'ruby', + 'subversion', + 'ttf-dejavu-core', + 'ttf-indic-fonts', + 'ttf-kochi-gothic', + 'ttf-kochi-mincho', + 'wdiff', + 'zip', +) + + +# Run-time libraries required by chromeos only. +_packages_chromeos_lib = ( + 'libbz2-1.0', + 'libpulse0', +) + + +# Full list of required run-time libraries. +_packages_lib = ( + 'libasound2', + 'libatk1.0-0', + 'libc6', + 'libcairo2', + 'libcap2', + 'libcups2', + 'libexpat1', + 'libfontconfig1', + 'libfreetype6', + 'libglib2.0-0', + 'libgnome-keyring0', + 'libgtk2.0-0', + 'libpam0g', + 'libpango1.0-0', + 'libpci3', + 'libpcre3', + 'libpixman-1-0', + 'libpng12-0', + 'libspeechd2', + 'libsqlite3-0', + 'libstdc++6', + 'libx11-6', + 'libx11-xcb1', + 'libxau6', + 'libxcb1', + 'libxcomposite1', + 'libxcursor1', + 'libxdamage1', + 'libxdmcp6', + 'libxext6', + 'libxfixes3', + 'libxi6', + 'libxinerama1', + 'libxrandr2', + 'libxrender1', + 'libxtst6', + 'zlib1g', +) + + +# Debugging symbols for all of the run-time libraries. +_packages_dbg = ( + 'libatk1.0-dbg', + 'libc6-dbg', + 'libcairo2-dbg', + 'libfontconfig1-dbg', + 'libglib2.0-0-dbg', + 'libgtk2.0-0-dbg', + 'libpango1.0-0-dbg', + 'libpcre3-dbg', + 'libpixman-1-0-dbg', + 'libsqlite3-0-dbg', + 'libx11-6-dbg', + 'libx11-xcb1-dbg', + 'libxau6-dbg', + 'libxcb1-dbg', + 'libxcomposite1-dbg', + 'libxcursor1-dbg', + 'libxdamage1-dbg', + 'libxdmcp6-dbg', + 'libxext6-dbg', + 'libxfixes3-dbg', + 'libxi6-dbg', + 'libxinerama1-dbg', + 'libxrandr2-dbg', + 'libxrender1-dbg', + 'libxtst6-dbg', + 'zlib1g-dbg', +) + + +# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf. +_packages_lib32 = ( + 'linux-libc-dev:i386', +) + + +# arm cross toolchain packages needed to build chrome on armhf. +_packages_arm = ( + 'g++-arm-linux-gnueabihf', + 'libc6-dev-armhf-cross', + 'linux-libc-dev-armhf-cross', +) + + +# Packages to build NaCl, its toolchains, and its ports. +_packages_naclports = ( + 'ant', + 'autoconf', + 'bison', + 'cmake', + 'gawk', + 'intltool', + 'xsltproc', + 'xutils-dev', +) +_packages_nacl = ( + 'g++-mingw-w64-i686', + 'lib32ncurses5-dev', + 'lib32z1-dev', + 'libasound2:i386', + 'libcap2:i386', + 'libelf-dev:i386', + 'libfontconfig1:i386', + 'libgconf-2-4:i386', + 'libglib2.0-0:i386', + 'libgpm2:i386', + 'libgtk2.0-0:i386', + 'libncurses5:i386', + 'libnss3:i386', + 'libpango1.0-0:i386', + 'libssl1.0.0:i386', + 'libtinfo-dev', + 'libtinfo-dev:i386', + 'libtool', + 'libxcomposite1:i386', + 'libxcursor1:i386', + 'libxdamage1:i386', + 'libxi6:i386', + 'libxrandr2:i386', + 'libxss1:i386', + 'libxtst6:i386', + 'texinfo', + 'xvfb', +) + + +def is_userland_64_bit(): + return platform.architecture()[0] == '64bit' + + +def package_exists(pkg): + return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines() + + +def lsb_release_short_codename(): + return subprocess.check_output( + ['lsb_release', '--codename', '--short']).strip() + + +def write_error(message): + sys.stderr.write('ERROR: %s\n' % message) + sys.stderr.flush() + + +def nonfatal_get_output(*popenargs, **kwargs): + process = subprocess.Popen( + stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs) + stdout, stderr = process.communicate() + retcode = process.poll() + return retcode, stdout, stderr + + +def compute_dynamic_package_lists(): + global _packages_arm + global _packages_dbg + global _packages_dev + global _packages_lib + global _packages_lib32 + global _packages_nacl + + if is_userland_64_bit(): + # 64-bit systems need a minimum set of 32-bit compat packages + # for the pre-built NaCl binaries. + _packages_dev += ( + 'lib32gcc1', + 'lib32stdc++6', + 'libc6-i386', + ) + + # When cross building for arm/Android on 64-bit systems the host binaries + # that are part of v8 need to be compiled with -m32 which means + # that basic multilib support is needed. + # gcc-multilib conflicts with the arm cross compiler (at least in trusty) + # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out + # the appropriate value of X and Y by seeing what version the current + # distribution's g++-multilib package depends on. + output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib']) + multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group() + _packages_lib32 += (multilib_package,) + + lsb_codename = lsb_release_short_codename() + + # Find the proper version of libstdc++6-4.x-dbg. + if lsb_codename == 'precise': + _packages_dbg += ('libstdc++6-4.6-dbg',) + elif lsb_codename == 'trusty': + _packages_dbg += ('libstdc++6-4.8-dbg',) + else: + _packages_dbg += ('libstdc++6-4.9-dbg',) + + # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 . + if lsb_codename == 'trusty': + _packages_arm += ( + 'g++-4.8-multilib-arm-linux-gnueabihf', + 'gcc-4.8-multilib-arm-linux-gnueabihf', + ) + + # Find the proper version of libgbm-dev. We can't just install libgbm-dev as + # it depends on mesa, and only one version of mesa can exists on the system. + # Hence we must match the same version or this entire script will fail. + mesa_variant = '' + for variant in ('-lts-trusty', '-lts-utopic'): + rc, stdout, stderr = nonfatal_get_output( + ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant]) + if 'ok installed' in output: + mesa_variant = variant + _packages_dev += ( + 'libgbm-dev' + mesa_variant, + 'libgl1-mesa-dev' + mesa_variant, + 'libgles2-mesa-dev' + mesa_variant, + 'mesa-common-dev' + mesa_variant, + ) + + if package_exists('ttf-mscorefonts-installer'): + _packages_dev += ('ttf-mscorefonts-installer',) + else: + _packages_dev += ('msttcorefonts',) + + if package_exists('libnspr4-dbg'): + _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg') + _packages_lib += ('libnspr4', 'libnss3') + else: + _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg') + _packages_lib += ('libnspr4-0d', 'libnss3-1d') + + if package_exists('libjpeg-dev'): + _packages_dev += ('libjpeg-dev',) + else: + _packages_dev += ('libjpeg62-dev',) + + if package_exists('libudev1'): + _packages_dev += ('libudev1',) + _packages_nacl += ('libudev1:i386',) + else: + _packages_dev += ('libudev0',) + _packages_nacl += ('libudev0:i386',) + + if package_exists('libbrlapi0.6'): + _packages_dev += ('libbrlapi0.6',) + else: + _packages_dev += ('libbrlapi0.5',) + + if package_exists('apache2-bin'): + _packages_dev += ('apache2-bin',) + else: + _packages_dev += ('apache2.2-bin',) + + if package_exists('xfonts-mathml'): + _packages_dev += ('xfonts-mathml',) + + # Some packages are only needed if the distribution actually supports + # installing them. + if package_exists('appmenu-gtk'): + _packages_lib += ('appmenu-gtk',) + + _packages_dev += _packages_chromeos_dev + _packages_lib += _packages_chromeos_lib + _packages_nacl += _packages_naclports + + +def quick_check(packages): + rc, stdout, stderr = nonfatal_get_output([ + 'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages)) + if rc == 0 and not stderr: + return 0 + print stderr + return 1 + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument('--quick-check', action='store_true', + help='quickly try to determine if dependencies are ' + 'installed (this avoids interactive prompts and ' + 'sudo commands so might not be 100% accurate)') + parser.add_argument('--unsupported', action='store_true', + help='attempt installation even on unsupported systems') + args = parser.parse_args(argv) + + lsb_codename = lsb_release_short_codename() + if not args.unsupported and not args.quick_check: + if lsb_codename not in map( + operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS): + supported_ubuntus = ['%(number)s (%(codename)s)' % v + for v in SUPPORTED_UBUNTU_VERSIONS] + write_error('Only Ubuntu %s are currently supported.' % + ', '.join(supported_ubuntus)) + return 1 + + if platform.machine() not in ('i686', 'x86_64'): + write_error('Only x86 architectures are currently supported.') + return 1 + + if os.geteuid() != 0 and not args.quick_check: + print 'Running as non-root user.' + print 'You might have to enter your password one or more times' + print 'for \'sudo\'.' + print + + compute_dynamic_package_lists() + + packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 + + _packages_arm + _packages_nacl) + def packages_key(pkg): + s = pkg.rsplit(':', 1) + if len(s) == 1: + return (s, '') + return s + packages = sorted(set(packages), key=packages_key) + + if args.quick_check: + return quick_check(packages) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py new file mode 100644 index 00000000000..080370ca9ad --- /dev/null +++ b/build/extract_from_cab.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Extracts a single file from a CAB archive.""" + +import os +import shutil +import subprocess +import sys +import tempfile + +def run_quiet(*args): + """Run 'expand' suppressing noisy output. Returns returncode from process.""" + popen = subprocess.Popen(args, stdout=subprocess.PIPE) + out, _ = popen.communicate() + if popen.returncode: + # expand emits errors to stdout, so if we fail, then print that out. + print out + return popen.returncode + +def main(): + if len(sys.argv) != 4: + print 'Usage: extract_from_cab.py cab_path archived_file output_dir' + return 1 + + [cab_path, archived_file, output_dir] = sys.argv[1:] + + # Expand.exe does its work in a fixed-named temporary directory created within + # the given output directory. This is a problem for concurrent extractions, so + # create a unique temp dir within the desired output directory to work around + # this limitation. + temp_dir = tempfile.mkdtemp(dir=output_dir) + + try: + # Invoke the Windows expand utility to extract the file. + level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir) + if level == 0: + # Move the output file into place, preserving expand.exe's behavior of + # paving over any preexisting file. + output_file = os.path.join(output_dir, archived_file) + try: + os.remove(output_file) + except OSError: + pass + os.rename(os.path.join(temp_dir, archived_file), output_file) + finally: + shutil.rmtree(temp_dir, True) + + if level != 0: + return level + + # The expand utility preserves the modification date and time of the archived + # file. Touch the extracted file. This helps build systems that compare the + # modification times of input and output files to determine whether to do an + # action. + os.utime(os.path.join(output_dir, archived_file), None) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi new file mode 100644 index 00000000000..5cff4c359f7 --- /dev/null +++ b/build/filename_rules.gypi @@ -0,0 +1,102 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This gypi file defines the patterns used for determining whether a +# file is excluded from the build on a given platform. It is +# included by common.gypi for chromium_code. + +{ + 'target_conditions': [ + ['OS!="win" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_win(_browsertest|_unittest|_test)?\\.(h|cc)$'], + ['exclude', '(^|/)win/'], + ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ], + }], + ['OS!="mac" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_(cocoa|mac|mach)(_unittest|_test)?\\.(h|cc|c|mm?)$'], + ['exclude', '(^|/)(cocoa|mac|mach)/'] ], + }], + ['OS!="ios" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_ios(_unittest|_test)?\\.(h|cc|mm?)$'], + ['exclude', '(^|/)ios/'] ], + }], + ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '\\.mm?$' ] ], + }], + # Do not exclude the linux files on *BSD since most of them can be + # shared at this point. + # In case a file is not needed, it is going to be excluded later on. + # TODO(evan): the above is not correct; we shouldn't build _linux + # files on non-linux. + ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_linux(_unittest|_test)?\\.(h|cc)$'], + ['exclude', '(^|/)linux/'], + ], + }], + ['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_android(_unittest|_test)?\\.(h|cc)$'], + ['exclude', '(^|/)android/'], + ], + }], + ['OS=="win" and >(nacl_untrusted_build)==0', { + 'sources/': [ + ['exclude', '_posix(_unittest|_test)?\\.(h|cc)$'], + ['exclude', '(^|/)posix/'], + ], + }], + ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_chromeos(_unittest|_test)?\\.(h|cc)$'], + ['exclude', '(^|/)chromeos/'], + ], + }], + ['>(nacl_untrusted_build)==0', { + 'sources/': [ + ['exclude', '_nacl(_unittest)?\\.(h|cc)$'], + ], + }], + ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_xdg(_unittest)?\\.(h|cc)$'], + ], + }], + ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_(x|x11)(_interactive_uitest|_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'], + ['exclude', '(^|/)x11/'], + ['exclude', '(^|/)x/'], + ], + }], + ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_views(_browsertest|_unittest)?\\.(h|cc)$'] ] + }], + ['<(use_aura)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)aura/'], + ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)ash/'], + ] + }], + ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ] + }], + ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'], + ['exclude', '_ashwin\\.(h|cc)$'] + ] + }], + ['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ] + }], + ['<(use_ozone)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'] ] + }], + ['<(use_pango)==0', { + 'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ], + }], + ] +} diff --git a/build/find_depot_tools.py b/build/find_depot_tools.py new file mode 100644 index 00000000000..1c34fea6f6c --- /dev/null +++ b/build/find_depot_tools.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Small utility function to find depot_tools and add it to the python path. + +Will throw an ImportError exception if depot_tools can't be found since it +imports breakpad. + +This can also be used as a standalone script to print out the depot_tools +directory location. +""" + +import os +import sys + + +def IsRealDepotTools(path): + return os.path.isfile(os.path.join(path, 'gclient.py')) + + +def add_depot_tools_to_path(): + """Search for depot_tools and add it to sys.path.""" + # First look if depot_tools is already in PYTHONPATH. + for i in sys.path: + if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i): + return i + # Then look if depot_tools is in PATH, common case. + for i in os.environ['PATH'].split(os.pathsep): + if IsRealDepotTools(i): + sys.path.append(i.rstrip(os.sep)) + return i + # Rare case, it's not even in PATH, look upward up to root. + root_dir = os.path.dirname(os.path.abspath(__file__)) + previous_dir = os.path.abspath(__file__) + while root_dir and root_dir != previous_dir: + i = os.path.join(root_dir, 'depot_tools') + if IsRealDepotTools(i): + sys.path.append(i) + return i + previous_dir = root_dir + root_dir = os.path.dirname(root_dir) + print >> sys.stderr, 'Failed to find depot_tools' + return None + +DEPOT_TOOLS_PATH = add_depot_tools_to_path() + +# pylint: disable=W0611 +import breakpad + + +def main(): + if DEPOT_TOOLS_PATH is None: + return 1 + print DEPOT_TOOLS_PATH + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py new file mode 100644 index 00000000000..c5b3ab77a90 --- /dev/null +++ b/build/find_isolated_tests.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Scans build output directory for .isolated files, calculates their SHA1 +hashes, stores final list in JSON document and then removes *.isolated files +found (to ensure no stale *.isolated stay around on the next build). + +Used to figure out what tests were build in isolated mode to trigger these +tests to run on swarming. + +For more info see: +https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing +""" + +import glob +import hashlib +import json +import optparse +import os +import re +import sys + + +def hash_file(filepath): + """Calculates the hash of a file without reading it all in memory at once.""" + digest = hashlib.sha1() + with open(filepath, 'rb') as f: + while True: + chunk = f.read(1024*1024) + if not chunk: + break + digest.update(chunk) + return digest.hexdigest() + + +def main(): + parser = optparse.OptionParser( + usage='%prog --build-dir --output-json ', + description=sys.modules[__name__].__doc__) + parser.add_option( + '--build-dir', + help='Path to a directory to search for *.isolated files.') + parser.add_option( + '--output-json', + help='File to dump JSON results into.') + + options, _ = parser.parse_args() + if not options.build_dir: + parser.error('--build-dir option is required') + if not options.output_json: + parser.error('--output-json option is required') + + result = {} + + # Get the file hash values and output the pair. + pattern = os.path.join(options.build_dir, '*.isolated') + for filepath in sorted(glob.glob(pattern)): + test_name = os.path.splitext(os.path.basename(filepath))[0] + if re.match(r'^.+?\.\d$', test_name): + # It's a split .isolated file, e.g. foo.0.isolated. Ignore these. + continue + + # TODO(csharp): Remove deletion once the isolate tracked dependencies are + # inputs for the isolated files. + sha1_hash = hash_file(filepath) + os.remove(filepath) + result[test_name] = sha1_hash + + with open(options.output_json, 'wb') as f: + json.dump(result, f) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/gdb-add-index b/build/gdb-add-index new file mode 100644 index 00000000000..73367c83503 --- /dev/null +++ b/build/gdb-add-index @@ -0,0 +1,184 @@ +#!/bin/bash +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Saves the gdb index for a given binary and its shared library dependencies. +# +# This will run gdb index in parallel on a number of binaries using SIGUSR1 +# as the communication mechanism to simulate a semaphore. Because of the +# nature of this technique, using "set -e" is very difficult. The SIGUSR1 +# terminates a "wait" with an error which we need to interpret. +# +# When modifying this code, most of the real logic is in the index_one_file +# function. The rest is cleanup + sempahore plumbing. + +function usage_exit { + echo "Usage: $0 [-f] [-r] [-n] ..." + echo " -f forces replacement of an existing index." + echo " -r removes the index section." + echo " -n don't extract the dependencies of each binary with lld." + echo " e.g., $0 -n out/Debug/lib.unstripped/lib*" + echo + echo " Set TOOLCHAIN_PREFIX to use a non-default set of binutils." + exit 1 +} + +# Cleanup temp directory and ensure all child jobs are dead-dead. +function on_exit { + trap "" EXIT USR1 # Avoid reentrancy. + + local jobs=$(jobs -p) + if [ -n "$jobs" ]; then + echo -n "Killing outstanding index jobs..." + kill -KILL $(jobs -p) + wait + echo "done" + fi + + if [ -d "$directory" ]; then + echo -n "Removing temp directory $directory..." + rm -rf "$directory" + echo done + fi +} + +# Add index to one binary. +function index_one_file { + local file=$1 + local basename=$(basename "$file") + local should_index_this_file="${should_index}" + + local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file") + if [[ $readelf_out =~ "gdb_index" ]]; then + if $remove_index; then + ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file" + echo "Removed index from $basename." + else + echo "Skipped $basename -- already contains index." + should_index_this_file=false + fi + fi + + if $should_index_this_file; then + local start=$(date +"%s%N") + echo "Adding index to $basename..." + + ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \ + -ex "quit" + local index_file="$directory/$basename.gdb-index" + if [ -f "$index_file" ]; then + ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \ + --set-section-flags .gdb_index=readonly "$file" "$file" + local finish=$(date +"%s%N") + local elapsed=$(((finish - start) / 1000000)) + echo " ...$basename indexed. [${elapsed}ms]" + else + echo " ...$basename unindexable." + fi + fi +} + +# Functions that when combined, concurrently index all files in FILES_TO_INDEX +# array. The global FILES_TO_INDEX is declared in the main body of the script. +function async_index { + # Start a background subshell to run the index command. + { + index_one_file $1 + kill -SIGUSR1 $$ # $$ resolves to the parent script. + exit 129 # See comment above wait loop at bottom. + } & +} + +cur_file_num=0 +function index_next { + if ((cur_file_num >= ${#files_to_index[@]})); then + return + fi + + async_index "${files_to_index[cur_file_num]}" + ((cur_file_num += 1)) || true +} + +######## +### Main body of the script. + +remove_index=false +should_index=true +should_index_deps=true +files_to_index=() +while (($# > 0)); do + case "$1" in + -h) + usage_exit + ;; + -f) + remove_index=true + ;; + -r) + remove_index=true + should_index=false + ;; + -n) + should_index_deps=false + ;; + -*) + echo "Invalid option: $1" >&2 + usage_exit + ;; + *) + if [[ ! -f "$1" ]]; then + echo "Path $1 does not exist." + exit 1 + fi + files_to_index+=("$1") + ;; + esac + shift +done + +if ((${#files_to_index[@]} == 0)); then + usage_exit +fi + +dependencies=() +if $should_index_deps; then + for file in "${files_to_index[@]}"; do + # Append the shared library dependencies of this file that + # have the same dirname. The dirname is a signal that these + # shared libraries were part of the same build as the binary. + dependencies+=( \ + $(ldd "$file" 2>/dev/null \ + | grep $(dirname "$file") \ + | sed "s/.*[ \t]\(.*\) (.*/\1/") \ + ) + done +fi +files_to_index+=("${dependencies[@]}") + +# Ensure we cleanup on on exit. +trap on_exit EXIT INT + +# We're good to go! Create temp directory for index files. +directory=$(mktemp -d) +echo "Made temp directory $directory." + +# Start concurrent indexing. +trap index_next USR1 + +# 4 is an arbitrary default. When changing, remember we are likely IO bound +# so basing this off the number of cores is not sensible. +index_tasks=${INDEX_TASKS:-4} +for ((i = 0; i < index_tasks; i++)); do + index_next +done + +# Do a wait loop. Bash waits that terminate due a trap have an exit +# code > 128. We also ensure that our subshell's "normal" exit occurs with +# an exit code > 128. This allows us to do consider a > 128 exit code as +# an indication that the loop should continue. Unfortunately, it also means +# we cannot use set -e since technically the "wait" is failing. +wait +while (($? > 128)); do + wait +done diff --git a/build/get_landmines.py b/build/get_landmines.py new file mode 100644 index 00000000000..268cfb937fe --- /dev/null +++ b/build/get_landmines.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This file emits the list of reasons why a particular build needs to be clobbered +(or a list of 'landmines'). +""" + +import sys + +import landmine_utils + + +distributor = landmine_utils.distributor +gyp_defines = landmine_utils.gyp_defines +gyp_msvs_version = landmine_utils.gyp_msvs_version +platform = landmine_utils.platform + + +def print_landmines(): + """ + ALL LANDMINES ARE EMITTED FROM HERE. + """ + # DO NOT add landmines as part of a regular CL. Landmines are a last-effort + # bandaid fix if a CL that got landed has a build dependency bug and all bots + # need to be cleaned up. If you're writing a new CL that causes build + # dependency problems, fix the dependency problems instead of adding a + # landmine. + + if distributor() == 'goma' and platform() == 'win32': + print 'Need to clobber winja goma due to backend cwd cache fix.' + if platform() == 'android': + print 'Clobber: to handle new way of suppressing findbugs failures.' + print 'Clobber to fix gyp not rename package name (crbug.com/457038)' + if platform() == 'win': + print 'Compile on cc_unittests fails due to symbols removed in r185063.' + if platform() == 'linux': + print 'Builders switching from make to ninja will clobber on this.' + if platform() == 'mac': + print 'Switching from bundle to unbundled dylib (issue 14743002).' + if platform() in ('win', 'mac'): + print ('Improper dependency for create_nmf.py broke in r240802, ' + 'fixed in r240860.') + if (platform() == 'win' and gyp_msvs_version().startswith('2015')): + print 'Switch to VS2015 Update 2' + print 'Need to clobber everything due to an IDL change in r154579 (blink)' + print 'Need to clobber everything due to gen file moves in r175513 (Blink)' + if (platform() != 'ios'): + print 'Clobber to get rid of obselete test plugin after r248358' + print 'Clobber to rebuild GN files for V8' + print 'Clobber to get rid of stale generated mojom.h files' + print 'Need to clobber everything due to build_nexe change in nacl r13424' + print '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...' + print 'blink_resources.grd changed: crbug.com/400860' + print 'ninja dependency cycle: crbug.com/408192' + print 'Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).' + print 'Another clobber for missing NaCl gyp deps (crbug.com/427427).' + print 'Clobber to fix GN not picking up increased ID range (crbug.com/444902)' + print 'Remove NaCl toolchains from the output dir (crbug.com/456902)' + if platform() == 'ios': + print 'Clobber iOS to workaround Xcode deps bug (crbug.com/485435)' + if platform() == 'win': + print 'Clobber to delete stale generated files (crbug.com/510086)' + if platform() == 'android' and gyp_defines().get('target_arch') == 'arm64': + print 'Clobber to support new location/infra for chrome_sync_shell_apk' + if platform() == 'mac': + print 'Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)' + if platform() == 'mac': + print 'Clobber to remove libsystem.dylib. See crbug.com/620075' + + +def main(): + print_landmines() + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py new file mode 100644 index 00000000000..1cab3fcf48d --- /dev/null +++ b/build/get_syzygy_binaries.py @@ -0,0 +1,487 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A utility script for downloading versioned Syzygy binaries.""" + +import hashlib +import errno +import json +import logging +import optparse +import os +import re +import shutil +import stat +import sys +import subprocess +import tempfile +import time +import zipfile + + +_LOGGER = logging.getLogger(os.path.basename(__file__)) + +# The relative path where official builds are archived in their GS bucket. +_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s') + +# A JSON file containing the state of the download directory. If this file and +# directory state do not agree, then the binaries will be downloaded and +# installed again. +_STATE = '.state' + +# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash). +# The archive exclusively uses lowercase GIT hashes. +_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$') + +# This matches an MD5 hash. +_MD5_RE = re.compile('^[a-f0-9]{32}$') + +# List of reources to be downloaded and installed. These are tuples with the +# following format: +# (basename, logging name, relative installation path, extraction filter) +_RESOURCES = [ + ('benchmark.zip', 'benchmark', '', None), + ('binaries.zip', 'binaries', 'exe', None), + ('symbols.zip', 'symbols', 'exe', + lambda x: x.filename.endswith('.dll.pdb'))] + + +def _LoadState(output_dir): + """Loads the contents of the state file for a given |output_dir|, returning + None if it doesn't exist. + """ + path = os.path.join(output_dir, _STATE) + if not os.path.exists(path): + _LOGGER.debug('No state file found.') + return None + with open(path, 'rb') as f: + _LOGGER.debug('Reading state file: %s', path) + try: + return json.load(f) + except ValueError: + _LOGGER.debug('Invalid state file.') + return None + + +def _SaveState(output_dir, state, dry_run=False): + """Saves the |state| dictionary to the given |output_dir| as a JSON file.""" + path = os.path.join(output_dir, _STATE) + _LOGGER.debug('Writing state file: %s', path) + if dry_run: + return + with open(path, 'wb') as f: + f.write(json.dumps(state, sort_keys=True, indent=2)) + + +def _Md5(path): + """Returns the MD5 hash of the file at |path|, which must exist.""" + return hashlib.md5(open(path, 'rb').read()).hexdigest() + + +def _StateIsValid(state): + """Returns true if the given state structure is valid.""" + if not isinstance(state, dict): + _LOGGER.debug('State must be a dict.') + return False + r = state.get('revision', None) + if not isinstance(r, basestring) or not _REVISION_RE.match(r): + _LOGGER.debug('State contains an invalid revision.') + return False + c = state.get('contents', None) + if not isinstance(c, dict): + _LOGGER.debug('State must contain a contents dict.') + return False + for (relpath, md5) in c.iteritems(): + if not isinstance(relpath, basestring) or len(relpath) == 0: + _LOGGER.debug('State contents dict contains an invalid path.') + return False + if not isinstance(md5, basestring) or not _MD5_RE.match(md5): + _LOGGER.debug('State contents dict contains an invalid MD5 digest.') + return False + return True + + +def _BuildActualState(stored, revision, output_dir): + """Builds the actual state using the provided |stored| state as a template. + Only examines files listed in the stored state, causing the script to ignore + files that have been added to the directories locally. |stored| must be a + valid state dictionary. + """ + contents = {} + state = { 'revision': revision, 'contents': contents } + for relpath, md5 in stored['contents'].iteritems(): + abspath = os.path.abspath(os.path.join(output_dir, relpath)) + if os.path.isfile(abspath): + m = _Md5(abspath) + contents[relpath] = m + + return state + + +def _StatesAreConsistent(stored, actual): + """Validates whether two state dictionaries are consistent. Both must be valid + state dictionaries. Additional entries in |actual| are ignored. + """ + if stored['revision'] != actual['revision']: + _LOGGER.debug('Mismatched revision number.') + return False + cont_stored = stored['contents'] + cont_actual = actual['contents'] + for relpath, md5 in cont_stored.iteritems(): + if relpath not in cont_actual: + _LOGGER.debug('Missing content: %s', relpath) + return False + if md5 != cont_actual[relpath]: + _LOGGER.debug('Modified content: %s', relpath) + return False + return True + + +def _GetCurrentState(revision, output_dir): + """Loads the current state and checks to see if it is consistent. Returns + a tuple (state, bool). The returned state will always be valid, even if an + invalid state is present on disk. + """ + stored = _LoadState(output_dir) + if not _StateIsValid(stored): + _LOGGER.debug('State is invalid.') + # Return a valid but empty state. + return ({'revision': '0', 'contents': {}}, False) + actual = _BuildActualState(stored, revision, output_dir) + # If the script has been modified consider the state invalid. + path = os.path.join(output_dir, _STATE) + if os.path.getmtime(__file__) > os.path.getmtime(path): + return (stored, False) + # Otherwise, explicitly validate the state. + if not _StatesAreConsistent(stored, actual): + return (stored, False) + return (stored, True) + + +def _DirIsEmpty(path): + """Returns true if the given directory is empty, false otherwise.""" + for root, dirs, files in os.walk(path): + return not dirs and not files + + +def _RmTreeHandleReadOnly(func, path, exc): + """An error handling function for use with shutil.rmtree. This will + detect failures to remove read-only files, and will change their properties + prior to removing them. This is necessary on Windows as os.remove will return + an access error for read-only files, and git repos contain read-only + pack/index files. + """ + excvalue = exc[1] + if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: + _LOGGER.debug('Removing read-only path: %s', path) + os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) + func(path) + else: + raise + + +def _RmTree(path): + """A wrapper of shutil.rmtree that handles read-only files.""" + shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly) + + +def _CleanState(output_dir, state, dry_run=False): + """Cleans up files/directories in |output_dir| that are referenced by + the given |state|. Raises an error if there are local changes. Returns a + dictionary of files that were deleted. + """ + _LOGGER.debug('Deleting files from previous installation.') + deleted = {} + + # Generate a list of files to delete, relative to |output_dir|. + contents = state['contents'] + files = sorted(contents.keys()) + + # Try to delete the files. Keep track of directories to delete as well. + dirs = {} + for relpath in files: + fullpath = os.path.join(output_dir, relpath) + fulldir = os.path.dirname(fullpath) + dirs[fulldir] = True + if os.path.exists(fullpath): + # If somehow the file has become a directory complain about it. + if os.path.isdir(fullpath): + raise Exception('Directory exists where file expected: %s' % fullpath) + + # Double check that the file doesn't have local changes. If it does + # then refuse to delete it. + if relpath in contents: + stored_md5 = contents[relpath] + actual_md5 = _Md5(fullpath) + if actual_md5 != stored_md5: + raise Exception('File has local changes: %s' % fullpath) + + # The file is unchanged so it can safely be deleted. + _LOGGER.debug('Deleting file "%s".', fullpath) + deleted[relpath] = True + if not dry_run: + os.unlink(fullpath) + + # Sort directories from longest name to shortest. This lets us remove empty + # directories from the most nested paths first. + dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True) + for p in dirs: + if os.path.exists(p) and _DirIsEmpty(p): + _LOGGER.debug('Deleting empty directory "%s".', p) + if not dry_run: + _RmTree(p) + + return deleted + + +def _FindGsUtil(): + """Looks for depot_tools and returns the absolute path to gsutil.py.""" + for path in os.environ['PATH'].split(os.pathsep): + path = os.path.abspath(path) + git_cl = os.path.join(path, 'git_cl.py') + gs_util = os.path.join(path, 'gsutil.py') + if os.path.exists(git_cl) and os.path.exists(gs_util): + return gs_util + return None + + +def _GsUtil(*cmd): + """Runs the given command in gsutil with exponential backoff and retries.""" + gs_util = _FindGsUtil() + cmd = [sys.executable, gs_util] + list(cmd) + + retries = 3 + timeout = 4 # Seconds. + while True: + _LOGGER.debug('Running %s', cmd) + prog = subprocess.Popen(cmd, shell=False) + prog.communicate() + + # Stop retrying on success. + if prog.returncode == 0: + return + + # Raise a permanent failure if retries have been exhausted. + if retries == 0: + raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode)) + + _LOGGER.debug('Sleeping %d seconds and trying again.', timeout) + time.sleep(timeout) + retries -= 1 + timeout *= 2 + + +def _Download(resource): + """Downloads the given GS resource to a temporary file, returning its path.""" + tmp = tempfile.mkstemp(suffix='syzygy_archive') + os.close(tmp[0]) + url = 'gs://syzygy-archive' + resource + _GsUtil('cp', url, tmp[1]) + return tmp[1] + + +def _InstallBinaries(options, deleted={}): + """Installs Syzygy binaries. This assumes that the output directory has + already been cleaned, as it will refuse to overwrite existing files.""" + contents = {} + state = { 'revision': options.revision, 'contents': contents } + archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision } + if options.resources: + resources = [(resource, resource, '', None) + for resource in options.resources] + else: + resources = _RESOURCES + for (base, name, subdir, filt) in resources: + # Create the output directory if it doesn't exist. + fulldir = os.path.join(options.output_dir, subdir) + if os.path.isfile(fulldir): + raise Exception('File exists where a directory needs to be created: %s' % + fulldir) + if not os.path.exists(fulldir): + _LOGGER.debug('Creating directory: %s', fulldir) + if not options.dry_run: + os.makedirs(fulldir) + + # Download and read the archive. + resource = archive_path + '/' + base + _LOGGER.debug('Retrieving %s archive at "%s".', name, resource) + path = _Download(resource) + + _LOGGER.debug('Unzipping %s archive.', name) + with open(path, 'rb') as data: + archive = zipfile.ZipFile(data) + for entry in archive.infolist(): + if not filt or filt(entry): + fullpath = os.path.normpath(os.path.join(fulldir, entry.filename)) + relpath = os.path.relpath(fullpath, options.output_dir) + if os.path.exists(fullpath): + # If in a dry-run take into account the fact that the file *would* + # have been deleted. + if options.dry_run and relpath in deleted: + pass + else: + raise Exception('Path already exists: %s' % fullpath) + + # Extract the file and update the state dictionary. + _LOGGER.debug('Extracting "%s".', fullpath) + if not options.dry_run: + archive.extract(entry.filename, fulldir) + md5 = _Md5(fullpath) + contents[relpath] = md5 + if sys.platform == 'cygwin': + os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR) + + _LOGGER.debug('Removing temporary file "%s".', path) + os.remove(path) + + return state + + +def _ParseCommandLine(): + """Parses the command-line and returns an options structure.""" + option_parser = optparse.OptionParser() + option_parser.add_option('--dry-run', action='store_true', default=False, + help='If true then will simply list actions that would be performed.') + option_parser.add_option('--force', action='store_true', default=False, + help='Force an installation even if the binaries are up to date.') + option_parser.add_option('--no-cleanup', action='store_true', default=False, + help='Allow installation on non-Windows platforms, and skip the forced ' + 'cleanup step.') + option_parser.add_option('--output-dir', type='string', + help='The path where the binaries will be replaced. Existing binaries ' + 'will only be overwritten if not up to date.') + option_parser.add_option('--overwrite', action='store_true', default=False, + help='If specified then the installation will happily delete and rewrite ' + 'the entire output directory, blasting any local changes.') + option_parser.add_option('--revision', type='string', + help='The SVN revision or GIT hash associated with the required version.') + option_parser.add_option('--revision-file', type='string', + help='A text file containing an SVN revision or GIT hash.') + option_parser.add_option('--resource', type='string', action='append', + dest='resources', help='A resource to be downloaded.') + option_parser.add_option('--verbose', dest='log_level', action='store_const', + default=logging.INFO, const=logging.DEBUG, + help='Enables verbose logging.') + option_parser.add_option('--quiet', dest='log_level', action='store_const', + default=logging.INFO, const=logging.ERROR, + help='Disables all output except for errors.') + options, args = option_parser.parse_args() + if args: + option_parser.error('Unexpected arguments: %s' % args) + if not options.output_dir: + option_parser.error('Must specify --output-dir.') + if not options.revision and not options.revision_file: + option_parser.error('Must specify one of --revision or --revision-file.') + if options.revision and options.revision_file: + option_parser.error('Must not specify both --revision and --revision-file.') + + # Configure logging. + logging.basicConfig(level=options.log_level) + + # If a revision file has been specified then read it. + if options.revision_file: + options.revision = open(options.revision_file, 'rb').read().strip() + _LOGGER.debug('Parsed revision "%s" from file "%s".', + options.revision, options.revision_file) + + # Ensure that the specified SVN revision or GIT hash is valid. + if not _REVISION_RE.match(options.revision): + option_parser.error('Must specify a valid SVN or GIT revision.') + + # This just makes output prettier to read. + options.output_dir = os.path.normpath(options.output_dir) + + return options + + +def _RemoveOrphanedFiles(options): + """This is run on non-Windows systems to remove orphaned files that may have + been downloaded by a previous version of this script. + """ + # Reconfigure logging to output info messages. This will allow inspection of + # cleanup status on non-Windows buildbots. + _LOGGER.setLevel(logging.INFO) + + output_dir = os.path.abspath(options.output_dir) + + # We only want to clean up the folder in 'src/third_party/syzygy', and we + # expect to be called with that as an output directory. This is an attempt to + # not start deleting random things if the script is run from an alternate + # location, or not called from the gclient hooks. + expected_syzygy_dir = os.path.abspath(os.path.join( + os.path.dirname(__file__), '..', 'third_party', 'syzygy')) + expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries') + if expected_output_dir != output_dir: + _LOGGER.info('Unexpected output directory, skipping cleanup.') + return + + if not os.path.isdir(expected_syzygy_dir): + _LOGGER.info('Output directory does not exist, skipping cleanup.') + return + + def OnError(function, path, excinfo): + """Logs error encountered by shutil.rmtree.""" + _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo) + + _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir) + if not options.dry_run: + shutil.rmtree(expected_syzygy_dir, True, OnError) + + +def main(): + options = _ParseCommandLine() + + if options.dry_run: + _LOGGER.debug('Performing a dry-run.') + + # We only care about Windows platforms, as the Syzygy binaries aren't used + # elsewhere. However, there was a short period of time where this script + # wasn't gated on OS types, and those OSes downloaded and installed binaries. + # This will cleanup orphaned files on those operating systems. + if sys.platform not in ('win32', 'cygwin'): + if options.no_cleanup: + _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.') + else: + return _RemoveOrphanedFiles(options) + + # Load the current installation state, and validate it against the + # requested installation. + state, is_consistent = _GetCurrentState(options.revision, options.output_dir) + + # Decide whether or not an install is necessary. + if options.force: + _LOGGER.debug('Forcing reinstall of binaries.') + elif is_consistent: + # Avoid doing any work if the contents of the directory are consistent. + _LOGGER.debug('State unchanged, no reinstall necessary.') + return + + # Under normal logging this is the only only message that will be reported. + _LOGGER.info('Installing revision %s Syzygy binaries.', + options.revision[0:12]) + + # Clean up the old state to begin with. + deleted = [] + if options.overwrite: + if os.path.exists(options.output_dir): + # If overwrite was specified then take a heavy-handed approach. + _LOGGER.debug('Deleting entire installation directory.') + if not options.dry_run: + _RmTree(options.output_dir) + else: + # Otherwise only delete things that the previous installation put in place, + # and take care to preserve any local changes. + deleted = _CleanState(options.output_dir, state, options.dry_run) + + # Install the new binaries. In a dry-run this will actually download the + # archives, but it won't write anything to disk. + state = _InstallBinaries(options, deleted) + + # Build and save the state for the directory. + _SaveState(options.output_dir, state, options.dry_run) + + +if __name__ == '__main__': + main() diff --git a/build/git-hooks/OWNERS b/build/git-hooks/OWNERS new file mode 100644 index 00000000000..3e327dc7113 --- /dev/null +++ b/build/git-hooks/OWNERS @@ -0,0 +1,3 @@ +set noparent +szager@chromium.org +cmp@chromium.org diff --git a/build/git-hooks/pre-commit b/build/git-hooks/pre-commit new file mode 100644 index 00000000000..41b596344c4 --- /dev/null +++ b/build/git-hooks/pre-commit @@ -0,0 +1,60 @@ +#!/bin/sh + +submodule_diff() { + if test -n "$2"; then + git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs + else + git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs + fi +} + +if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then + merge_base=$(git merge-base HEAD MERGE_HEAD) + if test -z "$(submodule_diff $merge_base HEAD)"; then + # Most up-to-date submodules are in MERGE_HEAD. + head_ref=MERGE_HEAD + else + # Most up-to-date submodules are in HEAD. + head_ref=HEAD + fi +else + # No merge in progress. Submodules must match HEAD. + head_ref=HEAD +fi + +submods=$(submodule_diff $head_ref) +if test "$submods"; then + echo "You are trying to commit changes to the following submodules:" 1>&2 + echo 1>&2 + echo $submods | cut -d ' ' -f 6 | sed 's/^/ /g' 1>&2 + cat <&2 + +Submodule commits are not allowed. Please run: + + git status --ignore-submodules=dirty + +and/or: + + git diff-index --cached --ignore-submodules=dirty HEAD + +... to see what's in your index. + +If you're really and truly trying to roll the version of a submodule, you should +commit the new version to DEPS, instead. +EOF + exit 1 +fi + +gitmodules_diff() { + git diff-index --cached "$1" .gitmodules +} + +if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then + cat <&2 +You are trying to commit a change to .gitmodules. That is not allowed. +To make changes to submodule names/paths, edit DEPS. +EOF + exit 1 +fi + +exit 0 diff --git a/build/gn_helpers.py b/build/gn_helpers.py new file mode 100644 index 00000000000..2d5d9863b94 --- /dev/null +++ b/build/gn_helpers.py @@ -0,0 +1,351 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper functions useful when writing scripts that integrate with GN. + +The main functions are ToGNString and FromGNString which convert between +serialized GN veriables and Python variables. + +To use in a random python file in the build: + + import os + import sys + + sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, "build")) + import gn_helpers + +Where the sequence of parameters to join is the relative path from your source +file to the build directory.""" + +class GNException(Exception): + pass + + +def ToGNString(value, allow_dicts = True): + """Returns a stringified GN equivalent of the Python value. + + allow_dicts indicates if this function will allow converting dictionaries + to GN scopes. This is only possible at the top level, you can't nest a + GN scope in a list, so this should be set to False for recursive calls.""" + if isinstance(value, basestring): + if value.find('\n') >= 0: + raise GNException("Trying to print a string with a newline in it.") + return '"' + \ + value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \ + '"' + + if isinstance(value, unicode): + return ToGNString(value.encode('utf-8')) + + if isinstance(value, bool): + if value: + return "true" + return "false" + + if isinstance(value, list): + return '[ %s ]' % ', '.join(ToGNString(v) for v in value) + + if isinstance(value, dict): + if not allow_dicts: + raise GNException("Attempting to recursively print a dictionary.") + result = "" + for key in sorted(value): + if not isinstance(key, basestring): + raise GNException("Dictionary key is not a string.") + result += "%s = %s\n" % (key, ToGNString(value[key], False)) + return result + + if isinstance(value, int): + return str(value) + + raise GNException("Unsupported type when printing to GN.") + + +def FromGNString(input): + """Converts the input string from a GN serialized value to Python values. + + For details on supported types see GNValueParser.Parse() below. + + If your GN script did: + something = [ "file1", "file2" ] + args = [ "--values=$something" ] + The command line would look something like: + --values="[ \"file1\", \"file2\" ]" + Which when interpreted as a command line gives the value: + [ "file1", "file2" ] + + You can parse this into a Python list using GN rules with: + input_values = FromGNValues(options.values) + Although the Python 'ast' module will parse many forms of such input, it + will not handle GN escaping properly, nor GN booleans. You should use this + function instead. + + + A NOTE ON STRING HANDLING: + + If you just pass a string on the command line to your Python script, or use + string interpolation on a string variable, the strings will not be quoted: + str = "asdf" + args = [ str, "--value=$str" ] + Will yield the command line: + asdf --value=asdf + The unquoted asdf string will not be valid input to this function, which + accepts only quoted strings like GN scripts. In such cases, you can just use + the Python string literal directly. + + The main use cases for this is for other types, in particular lists. When + using string interpolation on a list (as in the top example) the embedded + strings will be quoted and escaped according to GN rules so the list can be + re-parsed to get the same result.""" + parser = GNValueParser(input) + return parser.Parse() + + +def FromGNArgs(input): + """Converts a string with a bunch of gn arg assignments into a Python dict. + + Given a whitespace-separated list of + + = (integer | string | boolean | ) + + gn assignments, this returns a Python dict, i.e.: + + FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }. + + Only simple types and lists supported; variables, structs, calls + and other, more complicated things are not. + + This routine is meant to handle only the simple sorts of values that + arise in parsing --args. + """ + parser = GNValueParser(input) + return parser.ParseArgs() + + +def UnescapeGNString(value): + """Given a string with GN escaping, returns the unescaped string. + + Be careful not to feed with input from a Python parsing function like + 'ast' because it will do Python unescaping, which will be incorrect when + fed into the GN unescaper.""" + result = '' + i = 0 + while i < len(value): + if value[i] == '\\': + if i < len(value) - 1: + next_char = value[i + 1] + if next_char in ('$', '"', '\\'): + # These are the escaped characters GN supports. + result += next_char + i += 1 + else: + # Any other backslash is a literal. + result += '\\' + else: + result += value[i] + i += 1 + return result + + +def _IsDigitOrMinus(char): + return char in "-0123456789" + + +class GNValueParser(object): + """Duplicates GN parsing of values and converts to Python types. + + Normally you would use the wrapper function FromGNValue() below. + + If you expect input as a specific type, you can also call one of the Parse* + functions directly. All functions throw GNException on invalid input. """ + def __init__(self, string): + self.input = string + self.cur = 0 + + def IsDone(self): + return self.cur == len(self.input) + + def ConsumeWhitespace(self): + while not self.IsDone() and self.input[self.cur] in ' \t\n': + self.cur += 1 + + def Parse(self): + """Converts a string representing a printed GN value to the Python type. + + See additional usage notes on FromGNString above. + + - GN booleans ('true', 'false') will be converted to Python booleans. + + - GN numbers ('123') will be converted to Python numbers. + + - GN strings (double-quoted as in '"asdf"') will be converted to Python + strings with GN escaping rules. GN string interpolation (embedded + variables preceeded by $) are not supported and will be returned as + literals. + + - GN lists ('[1, "asdf", 3]') will be converted to Python lists. + + - GN scopes ('{ ... }') are not supported.""" + result = self._ParseAllowTrailing() + self.ConsumeWhitespace() + if not self.IsDone(): + raise GNException("Trailing input after parsing:\n " + + self.input[self.cur:]) + return result + + def ParseArgs(self): + """Converts a whitespace-separated list of ident=literals to a dict. + + See additional usage notes on FromGNArgs, above. + """ + d = {} + + self.ConsumeWhitespace() + while not self.IsDone(): + ident = self._ParseIdent() + self.ConsumeWhitespace() + if self.input[self.cur] != '=': + raise GNException("Unexpected token: " + self.input[self.cur:]) + self.cur += 1 + self.ConsumeWhitespace() + val = self._ParseAllowTrailing() + self.ConsumeWhitespace() + d[ident] = val + + return d + + def _ParseAllowTrailing(self): + """Internal version of Parse that doesn't check for trailing stuff.""" + self.ConsumeWhitespace() + if self.IsDone(): + raise GNException("Expected input to parse.") + + next_char = self.input[self.cur] + if next_char == '[': + return self.ParseList() + elif _IsDigitOrMinus(next_char): + return self.ParseNumber() + elif next_char == '"': + return self.ParseString() + elif self._ConstantFollows('true'): + return True + elif self._ConstantFollows('false'): + return False + else: + raise GNException("Unexpected token: " + self.input[self.cur:]) + + def _ParseIdent(self): + id = '' + + next_char = self.input[self.cur] + if not next_char.isalpha() and not next_char=='_': + raise GNException("Expected an identifier: " + self.input[self.cur:]) + + id += next_char + self.cur += 1 + + next_char = self.input[self.cur] + while next_char.isalpha() or next_char.isdigit() or next_char=='_': + id += next_char + self.cur += 1 + next_char = self.input[self.cur] + + return id + + def ParseNumber(self): + self.ConsumeWhitespace() + if self.IsDone(): + raise GNException('Expected number but got nothing.') + + begin = self.cur + + # The first character can include a negative sign. + if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]): + self.cur += 1 + while not self.IsDone() and self.input[self.cur].isdigit(): + self.cur += 1 + + number_string = self.input[begin:self.cur] + if not len(number_string) or number_string == '-': + raise GNException("Not a valid number.") + return int(number_string) + + def ParseString(self): + self.ConsumeWhitespace() + if self.IsDone(): + raise GNException('Expected string but got nothing.') + + if self.input[self.cur] != '"': + raise GNException('Expected string beginning in a " but got:\n ' + + self.input[self.cur:]) + self.cur += 1 # Skip over quote. + + begin = self.cur + while not self.IsDone() and self.input[self.cur] != '"': + if self.input[self.cur] == '\\': + self.cur += 1 # Skip over the backslash. + if self.IsDone(): + raise GNException("String ends in a backslash in:\n " + + self.input) + self.cur += 1 + + if self.IsDone(): + raise GNException('Unterminated string:\n ' + self.input[begin:]) + + end = self.cur + self.cur += 1 # Consume trailing ". + + return UnescapeGNString(self.input[begin:end]) + + def ParseList(self): + self.ConsumeWhitespace() + if self.IsDone(): + raise GNException('Expected list but got nothing.') + + # Skip over opening '['. + if self.input[self.cur] != '[': + raise GNException("Expected [ for list but got:\n " + + self.input[self.cur:]) + self.cur += 1 + self.ConsumeWhitespace() + if self.IsDone(): + raise GNException("Unterminated list:\n " + self.input) + + list_result = [] + previous_had_trailing_comma = True + while not self.IsDone(): + if self.input[self.cur] == ']': + self.cur += 1 # Skip over ']'. + return list_result + + if not previous_had_trailing_comma: + raise GNException("List items not separated by comma.") + + list_result += [ self._ParseAllowTrailing() ] + self.ConsumeWhitespace() + if self.IsDone(): + break + + # Consume comma if there is one. + previous_had_trailing_comma = self.input[self.cur] == ',' + if previous_had_trailing_comma: + # Consume comma. + self.cur += 1 + self.ConsumeWhitespace() + + raise GNException("Unterminated list:\n " + self.input) + + def _ConstantFollows(self, constant): + """Returns true if the given constant follows immediately at the current + location in the input. If it does, the text is consumed and the function + returns true. Otherwise, returns false and the current position is + unchanged.""" + end = self.cur + len(constant) + if end > len(self.input): + return False # Not enough room. + if self.input[self.cur:end] == constant: + self.cur = end + return True + return False diff --git a/build/gn_helpers_unittest.py b/build/gn_helpers_unittest.py new file mode 100644 index 00000000000..cc6018a1721 --- /dev/null +++ b/build/gn_helpers_unittest.py @@ -0,0 +1,117 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import gn_helpers +import unittest + +class UnitTest(unittest.TestCase): + def test_ToGNString(self): + self.assertEqual( + gn_helpers.ToGNString([1, 'two', [ '"thr$\\', True, False, [] ]]), + '[ 1, "two", [ "\\"thr\\$\\\\", true, false, [ ] ] ]') + + def test_UnescapeGNString(self): + # Backslash followed by a \, $, or " means the folling character without + # the special meaning. Backslash followed by everything else is a literal. + self.assertEqual( + gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'), + '\\as$\\asd"') + + def test_FromGNString(self): + self.assertEqual( + gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'), + [ 1, -20, True, False, [ 'as"', [] ] ]) + + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('123 456') + parser.Parse() + + def test_ParseBool(self): + parser = gn_helpers.GNValueParser('true') + self.assertEqual(parser.Parse(), True) + + parser = gn_helpers.GNValueParser('false') + self.assertEqual(parser.Parse(), False) + + def test_ParseNumber(self): + parser = gn_helpers.GNValueParser('123') + self.assertEqual(parser.ParseNumber(), 123) + + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('') + parser.ParseNumber() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('a123') + parser.ParseNumber() + + def test_ParseString(self): + parser = gn_helpers.GNValueParser('"asdf"') + self.assertEqual(parser.ParseString(), 'asdf') + + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('') # Empty. + parser.ParseString() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('asdf') # Unquoted. + parser.ParseString() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('"trailing') # Unterminated. + parser.ParseString() + + def test_ParseList(self): + parser = gn_helpers.GNValueParser('[1,]') # Optional end comma OK. + self.assertEqual(parser.ParseList(), [ 1 ]) + + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('') # Empty. + parser.ParseList() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('asdf') # No []. + parser.ParseList() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('[1, 2') # Unterminated + parser.ParseList() + with self.assertRaises(gn_helpers.GNException): + parser = gn_helpers.GNValueParser('[1 2]') # No separating comma. + parser.ParseList() + + def test_FromGNArgs(self): + # Booleans and numbers should work; whitespace is allowed works. + self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'), + {'foo': True, 'bar': 1}) + + # Whitespace is not required; strings should also work. + self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'), + {'foo': 'bar baz'}) + + # Lists should work. + self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'), + {'foo': [1, 2, 3]}) + + # Empty strings should return an empty dict. + self.assertEqual(gn_helpers.FromGNArgs(''), {}) + self.assertEqual(gn_helpers.FromGNArgs(' \n '), {}) + + # Non-identifiers should raise an exception. + with self.assertRaises(gn_helpers.GNException): + gn_helpers.FromGNArgs('123 = true') + + # References to other variables should raise an exception. + with self.assertRaises(gn_helpers.GNException): + gn_helpers.FromGNArgs('foo = bar') + + # References to functions should raise an exception. + with self.assertRaises(gn_helpers.GNException): + gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")') + + # Underscores in identifiers should work. + self.assertEqual(gn_helpers.FromGNArgs('_foo = true'), + {'_foo': True}) + self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'), + {'foo_bar': True}) + self.assertEqual(gn_helpers.FromGNArgs('foo_=true'), + {'foo_': True}) + +if __name__ == '__main__': + unittest.main() diff --git a/build/gn_migration.gypi b/build/gn_migration.gypi new file mode 100644 index 00000000000..7535ff394f4 --- /dev/null +++ b/build/gn_migration.gypi @@ -0,0 +1,764 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file defines five targets that we are using to track the progress of the +# GYP->GN migration: +# +# 'both_gn_and_gyp' lists what GN is currently capable of building and should +# match the 'both_gn_and_gyp' target in //BUILD.gn. +# +# 'gyp_all' Should include everything built when building "all"; i.e., if you +# type 'ninja gyp_all' and then 'ninja all', the second build should do +# nothing. 'gyp_all' should just depend on the other four targets. +# +# 'gyp_remaining' lists all of the targets that still need to be converted, +# i.e., all of the other (non-empty) targets that a GYP build will build. + +{ + 'includes': [ + '../media/media_variables.gypi' + ], + 'targets': [ + { + 'target_name': 'gyp_all', + 'type': 'none', + 'dependencies': [ + 'both_gn_and_gyp', + 'gyp_remaining', + ] + }, + { + # This target should mirror the structure of //:both_gn_and_gyp + # in src/BUILD.gn as closely as possible, for ease of comparison. + 'target_name': 'both_gn_and_gyp', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_i18n_perftests', + '../base/base.gyp:base_perftests', + '../base/base.gyp:base_unittests', + '../base/base.gyp:check_example', + '../cc/cc_tests.gyp:cc_perftests', + '../cc/cc_tests.gyp:cc_unittests', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../chrome/chrome.gyp:load_library_perf_tests', + '../chrome/chrome.gyp:unit_tests', + '../components/components.gyp:network_hints_browser', + '../components/components.gyp:policy_templates', + '../components/components_tests.gyp:components_browsertests', + '../components/components_tests.gyp:components_perftests', + '../components/components_tests.gyp:components_unittests', + '../content/content.gyp:content_app_browser', + '../content/content.gyp:content_app_child', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_perftests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../device/device_tests.gyp:device_unittests', + '../gin/gin.gyp:gin_v8_snapshot_fingerprint', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:gpu_perftests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../media/cast/cast.gyp:cast_unittests', + '../media/media.gyp:media_perftests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:dump_cache', + '../net/net.gyp:net_perftests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../testing/gmock.gyp:gmock_main', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests', + '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests', + '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests', + '../third_party/boringssl/boringssl_tests.gyp:boringssl_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/smhasher/smhasher.gyp:pmurhash', + '../third_party/catapult/telemetry/telemetry.gyp:bitmaptools#host', + '../ui/accessibility/accessibility.gyp:accessibility_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/display/display.gyp:display_unittests', + '../ui/events/events_unittests.gyp:events_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/snapshot/snapshot.gyp:snapshot_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../url/url.gyp:url_unittests', + '../v8/src/v8.gyp:v8_snapshot', + '../v8/src/v8.gyp:postmortem-metadata', + ], + 'conditions': [ + ['clang==1', { + 'dependencies': [ + '../build/sanitizers/sanitizers.gyp:llvm-symbolizer', + ], + }], + ['disable_nacl==0 and disable_nacl_untrusted==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_loader_unittests', + ] + }], + ['enable_extensions==1', { + 'dependencies': [ + '../extensions/shell/app_shell.gyp:app_shell', + '../extensions/shell/app_shell.gyp:app_shell_unittests', + ], + }], + ['enable_mdns==1', { + 'dependencies': [ + '../chrome/chrome.gyp:service_discovery_sniffer', + ] + }], + ['remoting==1', { + 'dependencies': [ + '../remoting/remoting_all.gyp:remoting_all', + ], + }], + ['remoting==1 and chromeos==0 and use_x11==1', { + 'dependencies': [ + '../remoting/remoting.gyp:remoting_me2me_host', + '../remoting/remoting.gyp:remoting_me2me_native_messaging_host', + ], + }], + ['toolkit_views==1', { + 'dependencies': [ + '../ui/views/views.gyp:views_unittests', + ], + }], + ['enable_app_list==1', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:app_list_demo', + ], + }], + ['use_ash==1', { + 'dependencies': [ + '../ash/ash.gyp:ash_shell_with_content', + '../ash/ash.gyp:ash_unittests', + ], + }], + ['use_ash==1 or chromeos== 1', { + 'dependencies': [ + '../components/components.gyp:session_manager_component', + ] + }], + ['use_aura==1', { + 'dependencies': [ + '../ui/aura/aura.gyp:aura_demo', + '../ui/aura/aura.gyp:aura_unittests', + '../ui/keyboard/keyboard.gyp:keyboard_unittests', + '../ui/wm/wm.gyp:wm_unittests', + ], + }], + ['use_ozone==1', { + 'dependencies': [ + '../ui/ozone/ozone.gyp:ozone', + ], + }], + ['use_x11==1', { + 'dependencies': [ + '../tools/xdisplaycheck/xdisplaycheck.gyp:xdisplaycheck', + ], + 'conditions': [ + ['target_arch!="arm"', { + 'dependencies': [ + '../gpu/tools/tools.gyp:compositor_model_bench', + ], + }], + ], + }], + ['OS=="android"', { + 'dependencies': [ + '../base/base.gyp:base_junit_tests', + '../base/base.gyp:base_perftests_apk', + '../base/base.gyp:base_unittests_apk', + '../base/base.gyp:chromium_android_linker', + '../breakpad/breakpad.gyp:breakpad_unittests_deps', + '../breakpad/breakpad.gyp:symupload#host', + '../breakpad/breakpad.gyp:microdump_stackwalk#host', + '../breakpad/breakpad.gyp:minidump_dump#host', + '../breakpad/breakpad.gyp:minidump_stackwalk#host', + '../build/android/rezip.gyp:rezip_apk_jar', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk', + '../cc/cc_tests.gyp:cc_perftests_apk', + '../cc/cc_tests.gyp:cc_unittests_apk', + #"//clank" TODO(GYP) - conditional somehow? + '../components/components.gyp:cronet_package', + '../components/components.gyp:cronet_sample_apk', + '../components/components.gyp:cronet_sample_test_apk', + '../components/components.gyp:cronet_test_apk', + '../components/components.gyp:cronet_test_instrumentation_apk', + '../components/components.gyp:cronet_unittests_apk', + '../components/components_tests.gyp:components_browsertests_apk', + '../components/components_tests.gyp:components_junit_tests', + '../components/components_tests.gyp:components_unittests_apk', + '../content/content_shell_and_tests.gyp:chromium_linker_test_apk', + '../content/content_shell_and_tests.gyp:content_browsertests_apk', + '../content/content_shell_and_tests.gyp:content_junit_tests', + '../content/content_shell_and_tests.gyp:content_shell_apk', + '../content/content_shell_and_tests.gyp:content_shell_test_apk', + '../content/content_shell_and_tests.gyp:content_unittests_apk', + '../device/device_tests.gyp:device_unittests_apk', + '../gpu/gpu.gyp:command_buffer_gles2_tests_apk', + '../gpu/gpu.gyp:gl_tests_apk', + '../gpu/gpu.gyp:gpu_perftests_apk', + '../gpu/gpu.gyp:gpu_unittests_apk', + '../ipc/ipc.gyp:ipc_tests_apk', + '../media/cast/cast.gyp:cast_unittests_apk', + '../media/media.gyp:media_perftests_apk', + '../media/media.gyp:media_unittests_apk', + '../media/media.gyp:video_decode_accelerator_unittest_apk', + '../media/midi/midi.gyp:midi_unittests_apk', + '../net/net.gyp:net_junit_tests', + '../net/net.gyp:net_unittests_apk', + '../sandbox/sandbox.gyp:sandbox_linux_unittests_deps', + '../skia/skia_tests.gyp:skia_unittests_apk', + '../sql/sql.gyp:sql_unittests_apk', + '../testing/android/junit/junit_test.gyp:junit_unit_tests', + '../third_party/smhasher/smhasher.gyp:murmurhash3', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests_apk', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests_apk', + '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests_apk', + '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests_apk', + '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk', + '../tools/android/android_tools.gyp:memconsumer', + '../tools/android/android_tools.gyp:push_apps_to_background', + '../tools/imagediff/image_diff.gyp:image_diff#host', + '../third_party/catapult/telemetry/telemetry.gyp:bitmaptools#host', + '../ui/android/ui_android.gyp:ui_android_unittests_apk', + '../ui/android/ui_android.gyp:ui_junit_tests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk', + '../ui/events/events_unittests.gyp:events_unittests_apk', + '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk', + '../ui/gl/gl_tests.gyp:gl_unittests_apk', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_apk', + + #"//third_party/android_tools:uiautomator_java", + ], + 'dependencies!': [ + # TODO(GYP): All of these targets need to be ported over. + '../url/url.gyp:url_unittests', + ], + }], + ['OS=="android" and chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview_shell.gyp:system_webview_shell_apk', + '../android_webview/android_webview_shell.gyp:system_webview_shell_layout_test_apk', + '../android_webview/android_webview_shell.gyp:system_webview_shell_page_cycler_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_test_apk', + '../chrome/android/chrome_apk.gyp:chrome_sync_shell_apk', + '../chrome/android/chrome_apk.gyp:chrome_sync_shell_test_apk', + '../chrome/chrome.gyp:chromedriver_webview_shell_apk', + '../chrome/chrome.gyp:chrome_junit_tests', + '../chrome/chrome.gyp:unit_tests_apk', + '../third_party/custom_tabs_client/custom_tabs_client.gyp:custom_tabs_client_example_apk', + ], + }], + ['OS!="android"', { + 'dependencies': [ + '../base/base.gyp:build_utf8_validator_tables#host', + '../chrome/chrome.gyp:chrome_app_unittests', + '../chrome/chrome.gyp:chromedriver', + '../chrome/chrome.gyp:chromedriver_tests', + '../chrome/chrome.gyp:chromedriver_unittests', + '../content/content_shell_and_tests.gyp:content_shell', + '../courgette/courgette.gyp:courgette', + '../courgette/courgette.gyp:courgette_fuzz', + '../courgette/courgette.gyp:courgette_minimal_tool', + '../courgette/courgette.gyp:courgette_unittests', + '../gin/gin.gyp:gin_unittests', + '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support', # TODO(GYP) crbug.com/471920 + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../google_apis/gcm/gcm.gyp:mcs_probe', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../mojo/mojo.gyp:mojo', + '../mojo/mojo_edk_tests.gyp:mojo_system_unittests', + '../mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests', + '../mojo/mojo_edk_tests.gyp:mojo_public_system_perftests', + '../mojo/mojo_edk_tests.gyp:mojo_public_system_unittests', + '../net/net.gyp:crash_cache', + '../net/net.gyp:crl_set_dump', + '../net/net.gyp:dns_fuzz_stub', + '../net/net.gyp:gdig', + '../net/net.gyp:get_server_time', + '../net/net.gyp:hpack_example_generator', + '../net/net.gyp:hpack_fuzz_mutator', + '../net/net.gyp:hpack_fuzz_wrapper', + '../net/net.gyp:net_watcher', # TODO(GYP): This should be conditional on use_v8_in_net + '../net/net.gyp:run_testserver', + '../net/net.gyp:stress_cache', + '../net/net.gyp:tld_cleanup', + '../ppapi/ppapi_internal.gyp:ppapi_example_audio', + '../ppapi/ppapi_internal.gyp:ppapi_example_audio_input', + '../ppapi/ppapi_internal.gyp:ppapi_example_c_stub', + '../ppapi/ppapi_internal.gyp:ppapi_example_cc_stub', + '../ppapi/ppapi_internal.gyp:ppapi_example_compositor', + '../ppapi/ppapi_internal.gyp:ppapi_example_crxfs', + '../ppapi/ppapi_internal.gyp:ppapi_example_enumerate_devices', + '../ppapi/ppapi_internal.gyp:ppapi_example_file_chooser', + '../ppapi/ppapi_internal.gyp:ppapi_example_flash_topmost', + '../ppapi/ppapi_internal.gyp:ppapi_example_gamepad', + '../ppapi/ppapi_internal.gyp:ppapi_example_gles2', + '../ppapi/ppapi_internal.gyp:ppapi_example_gles2_spinning_cube', + '../ppapi/ppapi_internal.gyp:ppapi_example_graphics_2d', + '../ppapi/ppapi_internal.gyp:ppapi_example_ime', + '../ppapi/ppapi_internal.gyp:ppapi_example_input', + '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_audio', + '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_video', + '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_cursor', + '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_lock', + '../ppapi/ppapi_internal.gyp:ppapi_example_paint_manager', + '../ppapi/ppapi_internal.gyp:ppapi_example_post_message', + '../ppapi/ppapi_internal.gyp:ppapi_example_printing', + '../ppapi/ppapi_internal.gyp:ppapi_example_scaling', + '../ppapi/ppapi_internal.gyp:ppapi_example_scroll', + '../ppapi/ppapi_internal.gyp:ppapi_example_simple_font', + '../ppapi/ppapi_internal.gyp:ppapi_example_threading', + '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader', + '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader_file', + '../ppapi/ppapi_internal.gyp:ppapi_example_vc', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode_dev', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_effects', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_encode', + '../ppapi/ppapi_internal.gyp:ppapi_tests', + '../ppapi/ppapi_internal.gyp:ppapi_perftests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../components/sync.gyp:run_sync_testserver', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/imagediff/image_diff.gyp:image_diff', + '../ui/compositor/compositor.gyp:compositor_unittests', + ], + }], + ['enable_app_list==1', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:app_list_unittests', + ], + }], + ['OS!="android" and chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:sync_performance_tests', + '../extensions/extensions_tests.gyp:extensions_browsertests', + '../extensions/extensions_tests.gyp:extensions_unittests', + '../gin/gin.gyp:gin_shell', + '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test', # TODO(GYP) crbug.com/471920 + '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test', # TODO(GYP) crbug.com/471903 to make this complete. + '../ipc/ipc.gyp:ipc_perftests', + '../mojo/mojo_base.gyp:mojo_common_unittests', + '../ppapi/tools/ppapi_tools.gyp:pepper_hash_for_uma', + '../skia/skia.gyp:filter_fuzz_stub', + '../skia/skia.gyp:image_operations_bench', + '../components/sync/tools/sync_tools.gyp:sync_client', + '../components/sync/tools/sync_tools.gyp:sync_listen_notifications', + '../third_party/codesighs/codesighs.gyp:codesighs', + '../third_party/codesighs/codesighs.gyp:maptsvdifftool', + '../third_party/pdfium/samples/samples.gyp:pdfium_diff', + '../third_party/pdfium/samples/samples.gyp:pdfium_test', + '../tools/battor_agent/battor_agent.gyp:battor_agent', + '../tools/battor_agent/battor_agent.gyp:battor_agent_unittests', + '../tools/gn/gn.gyp:gn', + '../tools/gn/gn.gyp:gn_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache', + '../ui/message_center/message_center.gyp:message_center_unittests', + '../ui/views/examples/examples.gyp:views_examples_with_content_exe', + ], + }], + ['media_use_ffmpeg==1 and OS!="android"', { + 'dependencies': [ + '../media/media.gyp:ffmpeg_regression_tests', + ], + }], + ['OS=="android" or OS=="linux"', { + 'dependencies': [ + '../net/net.gyp:disk_cache_memory_test', + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../chromeos/chromeos.gyp:chromeos_unittests', + '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests', + '../ui/arc/arc.gyp:ui_arc_unittests', + ] + }], + ['chromeos==1 or OS=="win" or OS=="mac"', { + 'dependencies': [ + '../rlz/rlz.gyp:rlz_id', + '../rlz/rlz.gyp:rlz_lib', + '../rlz/rlz.gyp:rlz_unittests', + ], + }], + ['OS=="linux" or OS=="android" or os_bsd==1', { + 'dependencies': [ + '../breakpad/breakpad.gyp:breakpad_unittests', + '../breakpad/breakpad.gyp:core-2-minidump', + '../breakpad/breakpad.gyp:dump_syms#host', + '../breakpad/breakpad.gyp:generate_test_dump', + '../breakpad/breakpad.gyp:minidump-2-core', + ], + }], + ['OS=="linux" or os_bsd==1', { + 'dependencies': [ + '../breakpad/breakpad.gyp:microdump_stackwalk', + '../breakpad/breakpad.gyp:minidump_dump', + '../breakpad/breakpad.gyp:minidump_stackwalk', + '../breakpad/breakpad.gyp:symupload', + '../third_party/codesighs/codesighs.gyp:nm2tsv', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../dbus/dbus.gyp:dbus_test_server', + '../dbus/dbus.gyp:dbus_unittests', + '../media/cast/cast.gyp:cast_testing_tools', + '../media/cast/cast.gyp:tap_proxy', + '../net/net.gyp:disk_cache_memory_test', + '../net/net.gyp:epoll_quic_client', + '../net/net.gyp:epoll_quic_server', + '../net/net.gyp:hpack_example_generator', + '../net/net.gyp:hpack_fuzz_mutator', + '../net/net.gyp:hpack_fuzz_wrapper', + '../net/net.gyp:net_perftests', + '../net/net.gyp:quic_client', + '../net/net.gyp:quic_server', + '../sandbox/sandbox.gyp:chrome_sandbox', + '../sandbox/sandbox.gyp:sandbox_linux_unittests', + '../third_party/sqlite/sqlite.gyp:sqlite_shell', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:crash_inspector', + '../breakpad/breakpad.gyp:dump_syms', + '../breakpad/breakpad.gyp:symupload', + '../third_party/apple_sample_code/apple_sample_code.gyp:apple_sample_code', + '../third_party/molokocacao/molokocacao.gyp:molokocacao', + + # TODO(GYP): remove these when the corresponding root targets work. + #"//cc/blink", + #"//components/ui/zoom", + #"//content", + #"//content/test:test_support", + #"//device/battery", + #"//device/bluetooth", + #"//device/nfc", + #"//device/usb", + #"//device/vibration", + #"//media/blink", + #"//pdf", + #"//storage/browser", + #"//third_party/brotli", + #"//third_party/flac", + #"//third_party/hunspell", + #//third_party/iccjpeg", + #"//third_party/libphonenumber", + #"//third_party/ots", + #"//third_party/qcms", + #"//third_party/smhasher:murmurhash3", + #"//third_party/webrtc/system_wrappers", + #"//ui/native_theme", + #"//ui/snapshot", + #"//ui/surface", + ], + 'dependencies!': [ + #"//chrome", # TODO(GYP) + #"//chrome/test:browser_tests", # TODO(GYP) + #"//chrome/test:interactive_ui_tests", # TODO(GYP) + #"//chrome/test:sync_integration_tests", # TODO(GYP) + #"//chrome/test:unit_tests", # TODO(GYP) + #"//components:components_unittests", # TODO(GYP) + #"//extensions:extensions_browsertests", # TODO(GYP) + #"//extensions:extensions_unittests", # TODO(GYP) + #"//net:net_unittests", # TODO(GYP) + #"//third_party/usrsctp", # TODO(GYP) + #"//ui/app_list:app_list_unittests", # TODO(GYP) + #"//ui/gfx:gfx_unittests", # TODO(GYP) + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../base/base.gyp:pe_image_test', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:install_static_unittests', + '../chrome/chrome.gyp:setup', + '../chrome/chrome.gyp:setup_unittests', + '../chrome/installer/mini_installer.gyp:mini_installer', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests', + '../chrome_elf/chrome_elf.gyp:dll_hash_main', + '../components/components.gyp:wifi_test', + '../net/net.gyp:quic_client', + '../net/net.gyp:quic_server', + '../rlz/rlz.gyp:rlz', + '../sandbox/sandbox.gyp:pocdll', + '../sandbox/sandbox.gyp:sandbox_poc', + '../sandbox/sandbox.gyp:sbox_integration_tests', + '../sandbox/sandbox.gyp:sbox_unittests', + '../sandbox/sandbox.gyp:sbox_validation_tests', + '../testing/gtest.gyp:gtest_main', + '../third_party/codesighs/codesighs.gyp:msdump2symdb', + '../third_party/codesighs/codesighs.gyp:msmap2tsv', + '../third_party/pdfium/samples/samples.gyp:pdfium_diff', + ], + 'conditions': [ + ['component!="shared_library" or target_arch!="ia32"', { + 'dependencies': [ + '../chrome/installer/mini_installer.gyp:next_version_mini_installer', + ], + }], + ], + }], + ['chromecast==1', { + 'dependencies': [ + '../chromecast/chromecast.gyp:cast_shell', + ], + }], + ['use_openh264==1', { + 'dependencies': [ + '../third_party/openh264/openh264.gyp:openh264_common', + '../third_party/openh264/openh264.gyp:openh264_processing', + '../third_party/openh264/openh264.gyp:openh264_encoder', + ], + }], + ], + }, + { + 'target_name': 'gyp_remaining', + 'type': 'none', + 'conditions': [ + ['test_isolation_mode!="noop"', { + 'dependencies': [ + '../base/base.gyp:base_unittests_run', + '../cc/cc_tests.gyp:cc_unittests_run', + '../chrome/chrome.gyp:browser_tests_run', + '../chrome/chrome.gyp:chrome_app_unittests_run', + '../chrome/chrome.gyp:chrome_run', + '../chrome/chrome.gyp:interactive_ui_tests_run', + '../chrome/chrome.gyp:sync_integration_tests_run', + '../chrome/chrome.gyp:unit_tests_run', + '../device/device_tests.gyp:device_unittests_run', + '../components/components_tests.gyp:components_browsertests_run', + '../components/components_tests.gyp:components_unittests_run', + '../content/content_shell_and_tests.gyp:content_browsertests_run', + '../content/content_shell_and_tests.gyp:content_unittests_run', + '../courgette/courgette.gyp:courgette_unittests_run', + '../crypto/crypto.gyp:crypto_unittests_run', + '../extensions/extensions_tests.gyp:extensions_browsertests_run', + '../extensions/extensions_tests.gyp:extensions_unittests_run', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests_run', + '../google_apis/google_apis.gyp:google_apis_unittests_run', + '../gpu/gpu.gyp:gpu_unittests_run', + '../media/blink/media_blink.gyp:media_blink_unittests_run', + '../media/cast/cast.gyp:cast_unittests_run', + '../media/media.gyp:media_unittests_run', + '../media/midi/midi.gyp:midi_unittests_run', + '../mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests_run', + '../mojo/mojo_edk_tests.gyp:mojo_public_system_unittests_run', + '../net/net.gyp:net_unittests_run', + '../ppapi/ppapi_internal.gyp:ppapi_unittests_run', + '../printing/printing.gyp:printing_unittests_run', + '../remoting/remoting.gyp:remoting_unittests_run', + '../skia/skia_tests.gyp:skia_unittests_run', + '../sql/sql.gyp:sql_unittests_run', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests_run', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests_run', + '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests_run', + '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests_run', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests_run', + '../tools/battor_agent/battor_agent.gyp:battor_agent_unittests_run', + '../tools/gn/gn.gyp:gn_unittests_run', + '../ui/accessibility/accessibility.gyp:accessibility_unittests_run', + '../ui/compositor/compositor.gyp:compositor_unittests_run', + '../ui/display/display.gyp:display_unittests_run', + '../ui/events/events_unittests.gyp:events_unittests_run', + '../ui/gfx/gfx_tests.gyp:gfx_unittests_run', + '../ui/message_center/message_center.gyp:message_center_unittests_run', + '../url/url.gyp:url_unittests_run', + ], + 'conditions': [ + ['OS=="linux"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:sandbox_linux_unittests_run', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:sandbox_mac_unittests_run', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:installer_util_unittests_run', + '../chrome/chrome.gyp:setup_unittests_run', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests_run', + '../sandbox/sandbox.gyp:sbox_integration_tests_run', + '../sandbox/sandbox.gyp:sbox_unittests_run', + '../sandbox/sandbox.gyp:sbox_validation_tests_run', + ], + }], + ['OS!="android"', { + 'dependencies': [ + '../ipc/ipc.gyp:ipc_tests_run', + '../ui/base/ui_base_tests.gyp:ui_base_unittests_run', + '../ui/gl/gl_tests.gyp:gl_unittests_run', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_run', + ], + }], + ['OS!="android" and OS!="ios" and chromecast==0', { + 'dependencies': [ + '../mojo/mojo_edk_tests.gyp:mojo_js_unittests_run', + '../mojo/mojo_edk_tests.gyp:mojo_js_integration_tests_run', + '../mojo/mojo_edk_tests.gyp:mojo_system_unittests_run', + '../services/shell/shell.gyp:mojo_shell_unittests_run', + ], + }], + ['use_ash==1', { + 'dependencies': [ + '../ash/ash.gyp:ash_unittests_run', + ], + }], + ['enable_app_list==1', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:app_list_unittests_run', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ui/app_list/presenter/app_list_presenter.gyp:app_list_presenter_unittests_run', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ui/aura/aura.gyp:aura_unittests_run', + '../ui/wm/wm.gyp:wm_unittests_run', + ], + }], + ['enable_webrtc==1 or OS!="android"', { + 'dependencies': [ + '../jingle/jingle.gyp:jingle_unittests_run', + ], + }], + ['disable_nacl==0 and disable_nacl_untrusted==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_loader_unittests_run', + ] + }], + ['toolkit_views==1', { + 'dependencies': [ + '../ui/views/views.gyp:views_unittests_run', + ], + }], + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../media/media.gyp:jpeg_decode_accelerator_unittest', + ], + }], + ['chromeos==1 or OS=="mac"', { + 'dependencies': [ + '../media/media.gyp:video_encode_accelerator_unittest', + ], + }], + ['chromeos==1 and target_arch != "arm"', { + 'dependencies': [ + '../media/media.gyp:vaapi_jpeg_decoder_unittest', + ], + }], + ['chromeos==1 or OS=="win" or OS=="android"', { + 'dependencies': [ + '../media/media.gyp:video_decode_accelerator_unittest', + ], + }], + ['OS=="linux" or OS=="win"', { + 'dependencies': [ + # TODO(GYP): Figure out which of these run on android/mac/win/ios/etc. + '../remoting/remoting.gyp:ar_sample_test_driver', + + # TODO(GYP): in progress - see tfarina. + '../third_party/webrtc/tools/tools.gyp:frame_analyzer', + '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter', + ], + }], + ['OS=="win"', { + 'dependencies': [ + # TODO(GYP): All of these targets still need to be converted. + '../chrome/chrome.gyp:gcapi_dll', + '../chrome/chrome.gyp:gcapi_test', + '../chrome/chrome.gyp:pack_policy_templates', + + # This is a safe browsing utility only necessary for developers. + # For now, we can skip this and anybody that needs this can add it + # to the GN build. + '../chrome/chrome.gyp:sb_sigutil', + + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + '../gpu/gpu.gyp:angle_end2end_tests', + '../gpu/gpu.gyp:angle_perftests', + '../ppapi/ppapi_internal.gyp:ppapi_perftests', + '../remoting/remoting.gyp:ar_sample_test_driver', + '../remoting/remoting.gyp:remoting_breakpad_tester', + '../remoting/remoting.gyp:remoting_console', + '../remoting/remoting.gyp:remoting_desktop', + '../tools/win/static_initializers/static_initializers.gyp:static_initializers', + ], + }], + ['OS=="win" and target_arch=="ia32"', { + 'dependencies': [ + # TODO(GYP): All of these targets need to be ported over. + '../base/base.gyp:base_win64', + '../base/base.gyp:base_i18n_nacl_win64', + '../chrome/chrome.gyp:launcher_support64', + '../components/components.gyp:breakpad_win64', + '../courgette/courgette.gyp:courgette64', + '../crypto/crypto.gyp:crypto_nacl_win64', + '../ipc/ipc.gyp:ipc_win64', + '../sandbox/sandbox.gyp:sandbox_win64', + ], + }], + ['OS=="win" and target_arch=="ia32" and configuration_policy==1', { + 'dependencies': [ + # TODO(GYP): All of these targets need to be ported over. + '../components/components.gyp:policy_win64', + ] + }], + ['OS=="android"', { + 'dependencies': [ + '../components/components.gyp:cronet_perf_test_apk', + '../url/url.gyp:url_unittests', + ], + }], + ['OS=="android" and chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:android_webview_apk', + '../android_webview/android_webview.gyp:android_webview_test_apk', + '../android_webview/android_webview.gyp:android_webview_unittests', + '../android_webview/android_webview.gyp:android_webview_unittests_apk', + ], + }], + ['OS=="android" and chromecast==0 and use_webview_internal_framework==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:system_webview_apk', + ], + }], + ['OS=="android" and target_arch != "x64"', { + 'dependencies': [ + '../third_party/android_platform/relocation_packer.gyp:android_relocation_packer_unittests#host' + ], + }], + ], + }, + ] +} diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py new file mode 100644 index 00000000000..7d83f6136fd --- /dev/null +++ b/build/gn_run_binary.py @@ -0,0 +1,22 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper script for GN to run an arbitrary binary. See compiled_action.gni. + +Run with: + python gn_run_binary.py [args ...] +""" + +import sys +import subprocess + +# This script is designed to run binaries produced by the current build. We +# always prefix it with "./" to avoid picking up system versions that might +# also be on the path. +path = './' + sys.argv[1] + +# The rest of the arguements are passed directly to the executable. +args = [path] + sys.argv[2:] + +sys.exit(subprocess.call(args)) diff --git a/build/grit_action.gypi b/build/grit_action.gypi new file mode 100644 index 00000000000..b24f0f8d715 --- /dev/null +++ b/build/grit_action.gypi @@ -0,0 +1,71 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to invoke grit in a +# consistent manner. To use this the following variables need to be +# defined: +# grit_grd_file: string: grd file path +# grit_out_dir: string: the output directory path + +# It would be really nice to do this with a rule instead of actions, but it +# would need to determine inputs and outputs via grit_info on a per-file +# basis. GYP rules don't currently support that. They could be extended to +# do this, but then every generator would need to be updated to handle this. + +{ + 'variables': { + 'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'], + 'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids', + # This makes it possible to add more defines in specific targets, + # instead of build/common.gypi . + 'grit_additional_defines%': [], + 'grit_rc_header_format%': [], + 'grit_whitelist%': '', + + 'conditions': [ + # These scripts can skip writing generated files if they are identical + # to the already existing files, which avoids further build steps, like + # recompilation. However, a dependency (earlier build step) having a + # newer timestamp than an output (later build step) confuses some build + # systems, so only use this on ninja, which explicitly supports this use + # case (gyp turns all actions into ninja restat rules). + ['"<(GENERATOR)"=="ninja"', { + 'write_only_new': '1', + }, { + 'write_only_new': '0', + }], + ], + }, + 'conditions': [ + ['"<(grit_whitelist)"==""', { + 'variables': { + 'grit_whitelist_flag': [], + } + }, { + 'variables': { + 'grit_whitelist_flag': ['-w', '<(grit_whitelist)'], + } + }] + ], + 'inputs': [ + ' 2: + specified_includes.add(os.path.realpath(arg[2:])) + + result = [] + def AddInclude(path): + if os.path.realpath(path) not in specified_includes: + result.append(path) + + if os.environ.get('GYP_INCLUDE_FIRST') != None: + AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_FIRST'))) + + # Always include common.gypi. + AddInclude(os.path.join(script_dir, 'common.gypi')) + + # Optionally add supplemental .gypi files if present. + for supplement in supplemental_files: + AddInclude(supplement) + + if os.environ.get('GYP_INCLUDE_LAST') != None: + AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_LAST'))) + + return result + + +def main(): + # Disabling garbage collection saves about 1 second out of 16 on a Linux + # z620 workstation. Since this is a short-lived process it's not a problem to + # leak a few cyclyc references in order to spare the CPU cycles for + # scanning the heap. + gc.disable() + + args = sys.argv[1:] + + use_analyzer = len(args) and args[0] == '--analyzer' + if use_analyzer: + args.pop(0) + os.environ['GYP_GENERATORS'] = 'analyzer' + args.append('-Gconfig_path=' + args.pop(0)) + args.append('-Ganalyzer_output_path=' + args.pop(0)) + + gyp_chromium_no_action = os.environ.get('GYP_CHROMIUM_NO_ACTION') + if gyp_chromium_no_action == '1': + print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.' + sys.exit(0) + + # Use the Psyco JIT if available. + if psyco: + psyco.profile() + print "Enabled Psyco JIT." + + # Fall back on hermetic python if we happen to get run under cygwin. + # TODO(bradnelson): take this out once this issue is fixed: + # http://code.google.com/p/gyp/issues/detail?id=177 + if sys.platform == 'cygwin': + import find_depot_tools + depot_tools_path = find_depot_tools.add_depot_tools_to_path() + python_dir = sorted(glob.glob(os.path.join(depot_tools_path, + 'python2*_bin')))[-1] + env = os.environ.copy() + env['PATH'] = python_dir + os.pathsep + env.get('PATH', '') + cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv + sys.exit(subprocess.call(cmd, env=env)) + + # This could give false positives since it doesn't actually do real option + # parsing. Oh well. + gyp_file_specified = any(arg.endswith('.gyp') for arg in args) + + gyp_environment.SetEnvironment() + + # If we didn't get a file, check an env var, and then fall back to + # assuming 'all.gyp' from the same directory as the script. + if not gyp_file_specified: + gyp_file = os.environ.get('CHROMIUM_GYP_FILE') + if gyp_file: + # Note that CHROMIUM_GYP_FILE values can't have backslashes as + # path separators even on Windows due to the use of shlex.split(). + args.extend(shlex.split(gyp_file)) + else: + args.append(os.path.join(script_dir, 'all.gyp')) + + supplemental_includes = GetSupplementalFiles() + gyp_vars_dict = GetGypVars(supplemental_includes) + # There shouldn't be a circular dependency relationship between .gyp files, + # but in Chromium's .gyp files, on non-Mac platforms, circular relationships + # currently exist. The check for circular dependencies is currently + # bypassed on other platforms, but is left enabled on iOS, where a violation + # of the rule causes Xcode to misbehave badly. + # TODO(mark): Find and kill remaining circular dependencies, and remove this + # option. http://crbug.com/35878. + # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the + # list. + if gyp_vars_dict.get('OS') != 'ios': + args.append('--no-circular-check') + + # libtool on Mac warns about duplicate basenames in static libraries, so + # they're disallowed in general by gyp. We are lax on this point, so disable + # this check other than on Mac. GN does not use static libraries as heavily, + # so over time this restriction will mostly go away anyway, even on Mac. + # https://code.google.com/p/gyp/issues/detail?id=384 + if sys.platform != 'darwin': + args.append('--no-duplicate-basename-check') + + # We explicitly don't support the make gyp generator (crbug.com/348686). Be + # nice and fail here, rather than choking in gyp. + if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')): + print 'Error: make gyp generator not supported (check GYP_GENERATORS).' + sys.exit(1) + + # We explicitly don't support the native msvs gyp generator. Be nice and + # fail here, rather than generating broken projects. + if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')): + print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).' + print 'Did you mean to use the `msvs-ninja` generator?' + sys.exit(1) + + # We explicitly don't support the native xcode gyp generator. Be nice and + # fail here, rather than generating broken projects. + if re.search(r'(^|,|\s)xcode($|,|\s)', os.environ.get('GYP_GENERATORS', '')): + print 'Error: xcode gyp generator not supported (check GYP_GENERATORS).' + print 'Did you mean to use the `xcode-ninja` generator?' + sys.exit(1) + + # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check + # to enfore syntax checking. + syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK') + if syntax_check and int(syntax_check): + args.append('--check') + + # TODO(dmikurube): Remove these checks and messages after a while. + if ('linux_use_tcmalloc' in gyp_vars_dict or + 'android_use_tcmalloc' in gyp_vars_dict): + print '*****************************************************************' + print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!' + print '-----------------------------------------------------------------' + print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in' + print 'your GYP_DEFINES. Please switch them into "use_allocator" now.' + print 'See http://crbug.com/345554 for the details.' + print '*****************************************************************' + + # Automatically turn on crosscompile support for platforms that need it. + # (The Chrome OS build sets CC_host / CC_target which implicitly enables + # this mode.) + if all(('ninja' in os.environ.get('GYP_GENERATORS', ''), + gyp_vars_dict.get('OS') in ['android', 'ios'], + 'GYP_CROSSCOMPILE' not in os.environ)): + os.environ['GYP_CROSSCOMPILE'] = '1' + if gyp_vars_dict.get('OS') == 'android': + args.append('--check') + + args.extend( + ['-I' + i for i in additional_include_files(supplemental_includes, args)]) + + args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()]) + + mac_toolchain_dir = mac_toolchain.GetToolchainDirectory() + if mac_toolchain_dir: + args.append('-Gmac_toolchain_dir=' + mac_toolchain_dir) + mac_toolchain.SetToolchainEnvironment() + + running_as_hook = '--running-as-hook' + if running_as_hook in args and gyp_chromium_no_action != '0': + print 'GYP is now disabled by default in runhooks.\n' + print 'If you really want to run this, either run ' + print '`python build/gyp_chromium.py` explicitly by hand' + print 'or set the environment variable GYP_CHROMIUM_NO_ACTION=0.' + sys.exit(0) + + if running_as_hook in args: + args.remove(running_as_hook) + + if not use_analyzer: + print 'Updating projects from gyp files...' + sys.stdout.flush() + + # Off we go... + gyp_rc = gyp.main(args) + + if gyp_rc == 0 and not use_analyzer: + vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() + if vs2013_runtime_dll_dirs: + x64_runtime, x86_runtime = vs2013_runtime_dll_dirs + vs_toolchain.CopyVsRuntimeDlls( + os.path.join(chrome_src, GetOutputDirectory()), + (x86_runtime, x64_runtime)) + + sys.exit(gyp_rc) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/gyp_chromium_test.py b/build/gyp_chromium_test.py new file mode 100644 index 00000000000..0ae74faf31d --- /dev/null +++ b/build/gyp_chromium_test.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys +import unittest + +SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) +SRC_DIR = os.path.dirname(SCRIPT_DIR) + +sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock')) + +import mock + +import gyp_chromium + + +class TestGetOutputDirectory(unittest.TestCase): + @mock.patch('os.environ', {}) + @mock.patch('sys.argv', [__file__]) + def testDefaultValue(self): + self.assertEqual(gyp_chromium.GetOutputDirectory(), 'out') + + @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'}) + @mock.patch('sys.argv', [__file__]) + def testEnvironment(self): + self.assertEqual(gyp_chromium.GetOutputDirectory(), 'envfoo') + + @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'}) + @mock.patch('sys.argv', [__file__, '-Goutput_dir=cmdfoo']) + def testGFlagOverridesEnv(self): + self.assertEqual(gyp_chromium.GetOutputDirectory(), 'cmdfoo') + + @mock.patch('os.environ', {}) + @mock.patch('sys.argv', [__file__, '-G', 'output_dir=foo']) + def testGFlagWithSpace(self): + self.assertEqual(gyp_chromium.GetOutputDirectory(), 'foo') + + +class TestGetGypVars(unittest.TestCase): + @mock.patch('os.environ', {}) + def testDefault(self): + self.assertEqual(gyp_chromium.GetGypVars([]), {}) + + @mock.patch('os.environ', {}) + @mock.patch('sys.argv', [__file__, '-D', 'foo=bar']) + def testDFlags(self): + self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar'}) + + @mock.patch('os.environ', {}) + @mock.patch('sys.argv', [__file__, '-D', 'foo']) + def testDFlagsNoValue(self): + self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': '1'}) + + @mock.patch('os.environ', {}) + @mock.patch('sys.argv', [__file__, '-D', 'foo=bar', '-Dbaz']) + def testDFlagMulti(self): + self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar', 'baz': '1'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/build/gyp_environment.py b/build/gyp_environment.py new file mode 100644 index 00000000000..87e3ea8680f --- /dev/null +++ b/build/gyp_environment.py @@ -0,0 +1,32 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Sets up various automatic gyp environment variables. These are used by +gyp_chromium and landmines.py which run at different stages of runhooks. To +make sure settings are consistent between them, all setup should happen here. +""" + +import gyp_helper +import mac_toolchain +import os +import sys +import vs_toolchain + +def SetEnvironment(): + """Sets defaults for GYP_* variables.""" + gyp_helper.apply_chromium_gyp_env() + + # Default to ninja on linux and windows, but only if no generator has + # explicitly been set. + # Also default to ninja on mac, but only when not building chrome/ios. + # . -f / --format has precedence over the env var, no need to check for it + # . set the env var only if it hasn't been set yet + # . chromium.gyp_env has been applied to os.environ at this point already + if sys.platform.startswith(('linux', 'win', 'freebsd', 'darwin')) and \ + not os.environ.get('GYP_GENERATORS'): + os.environ['GYP_GENERATORS'] = 'ninja' + + vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() + mac_toolchain.SetToolchainEnvironment() diff --git a/build/gyp_helper.py b/build/gyp_helper.py new file mode 100644 index 00000000000..c840f2d6dc2 --- /dev/null +++ b/build/gyp_helper.py @@ -0,0 +1,68 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file helps gyp_chromium and landmines correctly set up the gyp +# environment from chromium.gyp_env on disk + +import os + +SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) +CHROME_SRC = os.path.dirname(SCRIPT_DIR) + + +def apply_gyp_environment_from_file(file_path): + """Reads in a *.gyp_env file and applies the valid keys to os.environ.""" + if not os.path.exists(file_path): + return + with open(file_path, 'rU') as f: + file_contents = f.read() + try: + file_data = eval(file_contents, {'__builtins__': None}, None) + except SyntaxError, e: + e.filename = os.path.abspath(file_path) + raise + supported_vars = ( + 'CC', + 'CC_wrapper', + 'CC.host_wrapper', + 'CHROMIUM_GYP_FILE', + 'CHROMIUM_GYP_SYNTAX_CHECK', + 'CXX', + 'CXX_wrapper', + 'CXX.host_wrapper', + 'GYP_DEFINES', + 'GYP_GENERATOR_FLAGS', + 'GYP_CROSSCOMPILE', + 'GYP_GENERATOR_OUTPUT', + 'GYP_GENERATORS', + 'GYP_INCLUDE_FIRST', + 'GYP_INCLUDE_LAST', + 'GYP_MSVS_VERSION', + ) + for var in supported_vars: + file_val = file_data.get(var) + if file_val: + if var in os.environ: + behavior = 'replaces' + if var == 'GYP_DEFINES': + result = file_val + ' ' + os.environ[var] + behavior = 'merges with, and individual components override,' + else: + result = os.environ[var] + print 'INFO: Environment value for "%s" %s value in %s' % ( + var, behavior, os.path.abspath(file_path) + ) + string_padding = max(len(var), len(file_path), len('result')) + print ' %s: %s' % (var.rjust(string_padding), os.environ[var]) + print ' %s: %s' % (file_path.rjust(string_padding), file_val) + os.environ[var] = result + else: + os.environ[var] = file_val + + +def apply_chromium_gyp_env(): + if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ: + # Update the environment based on chromium.gyp_env + path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env') + apply_gyp_environment_from_file(path) diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py new file mode 100644 index 00000000000..08007088a89 --- /dev/null +++ b/build/gypi_to_gn.py @@ -0,0 +1,191 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Converts a given gypi file to a python scope and writes the result to stdout. + +USING THIS SCRIPT IN CHROMIUM + +Forking Python to run this script in the middle of GN is slow, especially on +Windows, and it makes both the GYP and GN files harder to follow. You can't +use "git grep" to find files in the GN build any more, and tracking everything +in GYP down requires a level of indirection. Any calls will have to be removed +and cleaned up once the GYP-to-GN transition is complete. + +As a result, we only use this script when the list of files is large and +frequently-changing. In these cases, having one canonical list outweights the +downsides. + +As of this writing, the GN build is basically complete. It's likely that all +large and frequently changing targets where this is appropriate use this +mechanism already. And since we hope to turn down the GYP build soon, the time +horizon is also relatively short. As a result, it is likely that no additional +uses of this script should every be added to the build. During this later part +of the transition period, we should be focusing more and more on the absolute +readability of the GN build. + + +HOW TO USE + +It is assumed that the file contains a toplevel dictionary, and this script +will return that dictionary as a GN "scope" (see example below). This script +does not know anything about GYP and it will not expand variables or execute +conditions. + +It will strip conditions blocks. + +A variables block at the top level will be flattened so that the variables +appear in the root dictionary. This way they can be returned to the GN code. + +Say your_file.gypi looked like this: + { + 'sources': [ 'a.cc', 'b.cc' ], + 'defines': [ 'ENABLE_DOOM_MELON' ], + } + +You would call it like this: + gypi_values = exec_script("//build/gypi_to_gn.py", + [ rebase_path("your_file.gypi") ], + "scope", + [ "your_file.gypi" ]) + +Notes: + - The rebase_path call converts the gypi file from being relative to the + current build file to being system absolute for calling the script, which + will have a different current directory than this file. + + - The "scope" parameter tells GN to interpret the result as a series of GN + variable assignments. + + - The last file argument to exec_script tells GN that the given file is a + dependency of the build so Ninja can automatically re-run GN if the file + changes. + +Read the values into a target like this: + component("mycomponent") { + sources = gypi_values.sources + defines = gypi_values.defines + } + +Sometimes your .gypi file will include paths relative to a different +directory than the current .gn file. In this case, you can rebase them to +be relative to the current directory. + sources = rebase_path(gypi_values.sources, ".", + "//path/gypi/input/values/are/relative/to") + +This script will tolerate a 'variables' in the toplevel dictionary or not. If +the toplevel dictionary just contains one item called 'variables', it will be +collapsed away and the result will be the contents of that dictinoary. Some +.gypi files are written with or without this, depending on how they expect to +be embedded into a .gyp file. + +This script also has the ability to replace certain substrings in the input. +Generally this is used to emulate GYP variable expansion. If you passed the +argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in +the input will be replaced with "bar": + + gypi_values = exec_script("//build/gypi_to_gn.py", + [ rebase_path("your_file.gypi"), + "--replace=<(foo)=bar"], + "scope", + [ "your_file.gypi" ]) + +""" + +import gn_helpers +from optparse import OptionParser +import sys + +def LoadPythonDictionary(path): + file_string = open(path).read() + try: + file_data = eval(file_string, {'__builtins__': None}, None) + except SyntaxError, e: + e.filename = path + raise + except Exception, e: + raise Exception("Unexpected error while reading %s: %s" % (path, str(e))) + + assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path + + # Flatten any variables to the top level. + if 'variables' in file_data: + file_data.update(file_data['variables']) + del file_data['variables'] + + # Strip all elements that this script can't process. + elements_to_strip = [ + 'conditions', + 'target_conditions', + 'targets', + 'includes', + 'actions', + ] + for element in elements_to_strip: + if element in file_data: + del file_data[element] + + return file_data + + +def ReplaceSubstrings(values, search_for, replace_with): + """Recursively replaces substrings in a value. + + Replaces all substrings of the "search_for" with "repace_with" for all + strings occurring in "values". This is done by recursively iterating into + lists as well as the keys and values of dictionaries.""" + if isinstance(values, str): + return values.replace(search_for, replace_with) + + if isinstance(values, list): + return [ReplaceSubstrings(v, search_for, replace_with) for v in values] + + if isinstance(values, dict): + # For dictionaries, do the search for both the key and values. + result = {} + for key, value in values.items(): + new_key = ReplaceSubstrings(key, search_for, replace_with) + new_value = ReplaceSubstrings(value, search_for, replace_with) + result[new_key] = new_value + return result + + # Assume everything else is unchanged. + return values + +def main(): + parser = OptionParser() + parser.add_option("-r", "--replace", action="append", + help="Replaces substrings. If passed a=b, replaces all substrs a with b.") + (options, args) = parser.parse_args() + + if len(args) != 1: + raise Exception("Need one argument which is the .gypi file to read.") + + data = LoadPythonDictionary(args[0]) + if options.replace: + # Do replacements for all specified patterns. + for replace in options.replace: + split = replace.split('=') + # Allow "foo=" to replace with nothing. + if len(split) == 1: + split.append('') + assert len(split) == 2, "Replacement must be of the form 'key=value'." + data = ReplaceSubstrings(data, split[0], split[1]) + + # Sometimes .gypi files use the GYP syntax with percents at the end of the + # variable name (to indicate not to overwrite a previously-defined value): + # 'foo%': 'bar', + # Convert these to regular variables. + for key in data: + if len(key) > 1 and key[len(key) - 1] == '%': + data[key[:-1]] = data[key] + del data[key] + + print gn_helpers.ToGNString(data) + +if __name__ == '__main__': + try: + main() + except Exception, e: + print str(e) + sys.exit(1) diff --git a/build/host_jar.gypi b/build/host_jar.gypi new file mode 100644 index 00000000000..a7e6ab5b3ba --- /dev/null +++ b/build/host_jar.gypi @@ -0,0 +1,149 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule to build +# a JAR file for use on a host in a consistent manner. If a main class is +# specified, this file will also generate an executable to run the jar in the +# output folder's /bin/ directory. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_jar', +# 'type': 'none', +# 'variables': { +# 'src_paths': [ +# 'path/to/directory', +# 'path/to/other/directory', +# 'path/to/individual_file.java', +# ... +# ], +# }, +# 'includes': [ 'path/to/this/gypi/file' ], +# } +# +# Required variables: +# src_paths - A list of all paths containing java files that should be +# included in the jar. Paths can be either directories or files. +# Optional/automatic variables: +# excluded_src_paths - A list of all paths that should be excluded from +# the jar. +# generated_src_dirs - Directories containing additional .java files +# generated at build time. +# input_jars_paths - A list of paths to the jars that should be included +# in the classpath. +# main_class - The class containing the main() function that should be called +# when running the jar file. +# jar_excluded_classes - A list of .class files that should be excluded +# from the jar. + +{ + 'dependencies': [ + '<(DEPTH)/build/android/setup.gyp:build_output_dirs', + ], + 'variables': { + 'classes_dir': '<(intermediate_dir)/classes', + 'excluded_src_paths': [], + 'generated_src_dirs': [], + 'input_jars_paths': [], + 'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)', + 'jar_dir': '<(PRODUCT_DIR)/lib.java', + 'jar_excluded_classes': [], + 'jar_name': '<(_target_name).jar', + 'jar_path': '<(jar_dir)/<(jar_name)', + 'main_class%': '', + 'stamp': '<(intermediate_dir)/jar.stamp', + 'conditions': [ + ['chromium_code == 0', { + 'enable_errorprone': 0, + }], + ], + 'enable_errorprone%': 0, + 'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone', + 'wrapper_script_name%': '<(_target_name)', + }, + 'all_dependent_settings': { + 'variables': { + 'input_jars_paths': ['<(jar_path)'] + }, + }, + 'actions': [ + { + 'action_name': 'javac_<(_target_name)', + 'message': 'Compiling <(_target_name) java sources', + 'variables': { + 'extra_args': [], + 'extra_inputs': [], + 'java_sources': [ '@(input_jars_paths)', + '<@(extra_inputs)', + ], + 'outputs': [ + '<(jar_path)', + '<(stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/javac.py', + '--classpath=>(input_jars_paths)', + '--src-gendirs=>(generated_src_dirs)', + '--chromium-code=<(chromium_code)', + '--stamp=<(stamp)', + '--jar-path=<(jar_path)', + '<@(extra_args)', + '^@(java_sources)', + ], + }, + ], + 'conditions': [ + ['main_class != ""', { + 'actions': [ + { + 'action_name': 'create_java_binary_script_<(_target_name)', + 'message': 'Creating java binary script <(_target_name)', + 'variables': { + 'output': '<(PRODUCT_DIR)/bin/<(wrapper_script_name)', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/create_java_binary_script.py', + '<(jar_path)', + ], + 'outputs': [ + '<(output)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_java_binary_script.py', + '--classpath=>(input_jars_paths)', + '--jar-path=<(jar_path)', + '--output=<(output)', + '--main-class=>(main_class)', + ] + } + ] + }], + ['enable_errorprone == 1', { + 'dependencies': [ + '<(DEPTH)/third_party/errorprone/errorprone.gyp:require_errorprone', + ], + }], + ] +} + diff --git a/build/host_prebuilt_jar.gypi b/build/host_prebuilt_jar.gypi new file mode 100644 index 00000000000..feed5caebb5 --- /dev/null +++ b/build/host_prebuilt_jar.gypi @@ -0,0 +1,50 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule to +# copy a prebuilt JAR for use on a host to the output directory. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_prebuilt_jar', +# 'type': 'none', +# 'variables': { +# 'jar_path': 'path/to/prebuilt.jar', +# }, +# 'includes': [ 'path/to/this/gypi/file' ], +# } +# +# Required variables: +# jar_path - The path to the prebuilt jar. + +{ + 'dependencies': [ + ], + 'variables': { + 'dest_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).jar', + 'src_path': '<(jar_path)', + }, + 'all_dependent_settings': { + 'variables': { + 'input_jars_paths': [ + '<(dest_path)', + ] + }, + }, + 'actions': [ + { + 'action_name': 'copy_prebuilt_jar', + 'message': 'Copy <(src_path) to <(dest_path)', + 'inputs': [ + '<(src_path)', + ], + 'outputs': [ + '<(dest_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/cp.py', '<(src_path)', '<(dest_path)', + ], + } + ] +} diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh new file mode 100644 index 00000000000..172279b2ecc --- /dev/null +++ b/build/install-build-deps-android.sh @@ -0,0 +1,100 @@ +#!/bin/bash -e + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Script to install everything needed to build chromium on android, including +# items requiring sudo privileges. +# See https://www.chromium.org/developers/how-tos/android-build-instructions + +# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires +# a license agreement, so upon installation it will prompt the user. To get +# past the curses-based dialog press TAB TAB to agree. + +args="$@" + +if ! uname -m | egrep -q "i686|x86_64"; then + echo "Only x86 architectures are currently supported" >&2 + exit +fi + +lsb_release=$(lsb_release --codename --short) + +case $lsb_release in + xenial) + java_alternative="java-1.8.0-openjdk-amd64" + java_pkgs="openjdk-8-jre openjdk-8-jdk" + ;; + *) + java_alternative="java-1.7.0-openjdk-amd64" + java_pkgs="openjdk-7-jre openjdk-7-jdk" + ;; +esac + +# Install first the default Linux build deps. +"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \ + --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}" + +# The temporary directory used to store output of update-java-alternatives +TEMPDIR=$(mktemp -d) +cleanup() { + local status=${?} + trap - EXIT + rm -rf "${TEMPDIR}" + exit ${status} +} +trap cleanup EXIT + +# Fix deps +sudo apt-get -f install + +# Install deps +# This step differs depending on what Ubuntu release we are running +# on since the package names are different, and Sun's Java must +# be installed manually on late-model versions. + +# common +sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils + +# Some binaries in the Android SDK require 32-bit libraries on the host. +# See https://developer.android.com/sdk/installing/index.html?pkg=tools +if [[ $lsb_release == "precise" ]]; then + sudo apt-get -y install ia32-libs +else + sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386 +fi + +sudo apt-get -y install ant + +# Install openjdk and openjre stuff +sudo apt-get -y install $java_pkgs + +# Switch version of Java to openjdk 7. +# Some Java plugins (e.g. for firefox, mozilla) are not required to build, and +# thus are treated only as warnings. Any errors in updating java alternatives +# which are not '*-javaplugin.so' will cause errors and stop the script from +# completing successfully. +if ! sudo update-java-alternatives -s $java_alternative \ + >& "${TEMPDIR}"/update-java-alternatives.out +then + # Check that there are the expected javaplugin.so errors for the update + if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \ + /dev/null + then + # Print as warnings all the javaplugin.so errors + echo 'WARNING: java-6-sun has no alternatives for the following plugins:' + grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out + fi + # Check if there are any errors that are not javaplugin.so + if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \ + >& /dev/null + then + # If there are non-javaplugin.so errors, treat as errors and exit + echo 'ERRORS: Failed to update alternatives for java-6-sun:' + grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out + exit 1 + fi +fi + +echo "install-build-deps-android.sh complete." diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh new file mode 100644 index 00000000000..832d116beee --- /dev/null +++ b/build/install-build-deps.sh @@ -0,0 +1,507 @@ +#!/bin/bash -e + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Script to install everything needed to build chromium (well, ideally, anyway) +# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md + +usage() { + echo "Usage: $0 [--options]" + echo "Options:" + echo "--[no-]syms: enable or disable installation of debugging symbols" + echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot" + echo "--[no-]arm: enable or disable installation of arm cross toolchain" + echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\ + "fonts" + echo "--[no-]nacl: enable or disable installation of prerequisites for"\ + "building standalone NaCl and all its toolchains" + echo "--no-prompt: silently select standard options/defaults" + echo "--quick-check: quickly try to determine if dependencies are installed" + echo " (this avoids interactive prompts and sudo commands," + echo " so might not be 100% accurate)" + echo "--unsupported: attempt installation even on unsupported systems" + echo "Script will prompt interactively if options not given." + exit 1 +} + +# Checks whether a particular package is available in the repos. +# USAGE: $ package_exists +package_exists() { + apt-cache pkgnames | grep -x "$1" > /dev/null 2>&1 +} + +# These default to on because (some) bots need them and it keeps things +# simple for the bot setup if all bots just run the script in its default +# mode. Developers who don't want stuff they don't need installed on their +# own workstations can pass --no-arm --no-nacl when running the script. +do_inst_arm=1 +do_inst_nacl=1 + +while test "$1" != "" +do + case "$1" in + --syms) do_inst_syms=1;; + --no-syms) do_inst_syms=0;; + --lib32) do_inst_lib32=1;; + --arm) do_inst_arm=1;; + --no-arm) do_inst_arm=0;; + --chromeos-fonts) do_inst_chromeos_fonts=1;; + --no-chromeos-fonts) do_inst_chromeos_fonts=0;; + --nacl) do_inst_nacl=1;; + --no-nacl) do_inst_nacl=0;; + --no-prompt) do_default=1 + do_quietly="-qq --assume-yes" + ;; + --quick-check) do_quick_check=1;; + --unsupported) do_unsupported=1;; + *) usage;; + esac + shift +done + +if test "$do_inst_arm" = "1"; then + do_inst_lib32=1 +fi + +# Check for lsb_release command in $PATH +if ! which lsb_release > /dev/null; then + echo "ERROR: lsb_release not found in \$PATH" >&2 + exit 1; +fi + +lsb_release=$(lsb_release --codename --short) +ubuntu_codenames="(precise|trusty|utopic|vivid|wily|xenial)" +if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then + if [[ ! $lsb_release =~ $ubuntu_codenames ]]; then + echo "ERROR: Only Ubuntu 12.04 (precise), 14.04 (trusty), " \ + "14.10 (utopic), 15.04 (vivid), 15.10 (wily) and 16.04 (xenial) " \ + "are currently supported" >&2 + exit 1 + fi + + if ! uname -m | egrep -q "i686|x86_64"; then + echo "Only x86 architectures are currently supported" >&2 + exit + fi +fi + +if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then + echo "Running as non-root user." + echo "You might have to enter your password one or more times for 'sudo'." + echo +fi + +# Packages needed for chromeos only +chromeos_dev_list="libbluetooth-dev libxkbcommon-dev realpath" + +# Packages needed for development +dev_list="bison cdbs curl dpkg-dev elfutils devscripts fakeroot + flex fonts-thai-tlwg g++ git-core git-svn gperf language-pack-da + language-pack-fr language-pack-he language-pack-zh-hant + libasound2-dev libbrlapi-dev libav-tools + libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev + libdrm-dev libelf-dev libffi-dev libgconf2-dev libglib2.0-dev + libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libkrb5-dev + libnspr4-dev libnss3-dev libpam0g-dev libpci-dev libpulse-dev + libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev libudev-dev + libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev openbox + patch perl pkg-config python python-cherrypy3 python-crypto + python-dev python-numpy python-opencv python-openssl python-psutil + python-yaml rpm ruby subversion ttf-dejavu-core wdiff xcompmgr zip + $chromeos_dev_list" + +# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built +# NaCl binaries. +if file -L /sbin/init | grep -q 'ELF 64-bit'; then + dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6" +fi + +# Run-time libraries required by chromeos only +chromeos_lib_list="libpulse0 libbz2-1.0" + +# Full list of required run-time libraries +lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcap2 libcups2 libexpat1 + libffi6 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0 + libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0 + libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libx11-6 libx11-xcb1 + libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6 + libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1 + libxtst6 zlib1g $chromeos_lib_list" + +# Debugging symbols for all of the run-time libraries +dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libffi6-dbg libfontconfig1-dbg + libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg + libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libx11-xcb1-dbg + libxau6-dbg libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg + libxdamage1-dbg libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg + libxinerama1-dbg libxrandr2-dbg libxrender1-dbg libxtst6-dbg + zlib1g-dbg" + +# Find the proper version of libstdc++6-4.x-dbg. +if [ "x$lsb_release" = "xprecise" ]; then + dbg_list="${dbg_list} libstdc++6-4.6-dbg" +elif [ "x$lsb_release" = "xtrusty" ]; then + dbg_list="${dbg_list} libstdc++6-4.8-dbg" +else + dbg_list="${dbg_list} libstdc++6-4.9-dbg" +fi + +# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf +lib32_list="linux-libc-dev:i386" + +# arm cross toolchain packages needed to build chrome on armhf +arm_list="libc6-dev-armhf-cross + linux-libc-dev-armhf-cross + g++-arm-linux-gnueabihf" + +# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 +case $lsb_release in + trusty) + arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf + gcc-4.8-multilib-arm-linux-gnueabihf" + ;; + wily) + arm_list+=" g++-5-multilib-arm-linux-gnueabihf + gcc-5-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf" + ;; + xenial) + arm_list+=" g++-5-multilib-arm-linux-gnueabihf + gcc-5-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf" + ;; +esac + +# Packages to build NaCl, its toolchains, and its ports. +naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc" +nacl_list="g++-mingw-w64-i686 lib32z1-dev + libasound2:i386 libcap2:i386 libelf-dev:i386 libfontconfig1:i386 + libgconf-2-4:i386 libglib2.0-0:i386 libgpm2:i386 libgtk2.0-0:i386 + libncurses5:i386 lib32ncurses5-dev libnss3:i386 libpango1.0-0:i386 + libssl1.0.0:i386 libtinfo-dev libtinfo-dev:i386 libtool + libxcomposite1:i386 libxcursor1:i386 libxdamage1:i386 libxi6:i386 + libxrandr2:i386 libxss1:i386 libxtst6:i386 texinfo xvfb + ${naclports_list}" + +# Find the proper version of packages that depend on mesa. Only one -lts variant +# of mesa can be installed and everything that depends on it must match. + +# Query for the name and status of all mesa LTS variants, filter for only +# installed packages, extract just the name, and eliminate duplicates (there can +# be more than one with the same name in the case of multiarch). Expand into an +# array. +mesa_packages=($(dpkg-query -Wf'${package} ${status}\n' \ + libgl1-mesa-glx-lts-\* 2>/dev/null | \ + grep " ok installed" | cut -d " " -f 1 | sort -u)) +if [ "${#mesa_packages[@]}" -eq 0 ]; then + mesa_variant="" +elif [ "${#mesa_packages[@]}" -eq 1 ]; then + # Strip the base package name and leave just "-lts-whatever" + mesa_variant="${mesa_packages[0]#libgl1-mesa-glx}" +else + echo "ERROR: unable to determine which libgl1-mesa-glx variant is installed." + exit 1 +fi +dev_list="${dev_list} libgbm-dev${mesa_variant} + libgles2-mesa-dev${mesa_variant} libgl1-mesa-dev${mesa_variant} + mesa-common-dev${mesa_variant}" +nacl_list="${nacl_list} libgl1-mesa-glx${mesa_variant}:i386" + +# Some package names have changed over time +if package_exists ttf-mscorefonts-installer; then + dev_list="${dev_list} ttf-mscorefonts-installer" +else + dev_list="${dev_list} msttcorefonts" +fi +if package_exists libnspr4-dbg; then + dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg" + lib_list="${lib_list} libnspr4 libnss3" +else + dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg" + lib_list="${lib_list} libnspr4-0d libnss3-1d" +fi +if package_exists libjpeg-dev; then + dev_list="${dev_list} libjpeg-dev" +else + dev_list="${dev_list} libjpeg62-dev" +fi +if package_exists libudev1; then + dev_list="${dev_list} libudev1" + nacl_list="${nacl_list} libudev1:i386" +else + dev_list="${dev_list} libudev0" + nacl_list="${nacl_list} libudev0:i386" +fi +if package_exists libbrlapi0.6; then + dev_list="${dev_list} libbrlapi0.6" +else + dev_list="${dev_list} libbrlapi0.5" +fi +if package_exists apache2-bin; then + dev_list="${dev_list} apache2-bin" +else + dev_list="${dev_list} apache2.2-bin" +fi +if package_exists xfonts-mathml; then + dev_list="${dev_list} xfonts-mathml" +fi +if package_exists fonts-indic; then + dev_list="${dev_list} fonts-indic" +else + dev_list="${dev_list} ttf-indic-fonts" +fi +if package_exists php7.0-cgi; then + dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0" +else + dev_list="${dev_list} php5-cgi libapache2-mod-php5" +fi +# Ubuntu 16.04 has this package deleted. +if package_exists ttf-kochi-gothic; then + dev_list="${dev_list} ttf-kochi-gothic" +fi +# Ubuntu 16.04 has this package deleted. +if package_exists ttf-kochi-mincho; then + dev_list="${dev_list} ttf-kochi-mincho" +fi + +# Some packages are only needed if the distribution actually supports +# installing them. +if package_exists appmenu-gtk; then + lib_list="$lib_list appmenu-gtk" +fi + +# When cross building for arm/Android on 64-bit systems the host binaries +# that are part of v8 need to be compiled with -m32 which means +# that basic multilib support is needed. +if file -L /sbin/init | grep -q 'ELF 64-bit'; then + # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but + # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the + # appropriate value of X and Y by seeing what version the current + # distribution's g++-multilib package depends on. + multilib_package=$(apt-cache depends g++-multilib --important | \ + grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b') + lib32_list="$lib32_list $multilib_package" +fi + +# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is +# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has +# been provided to yes_no(), the function also accepts RETURN as a user input. +# The parameter specifies the exit code that should be returned in that case. +# The function will echo the user's selection followed by a newline character. +# Users can abort the function by pressing CTRL-C. This will call "exit 1". +yes_no() { + if [ 0 -ne "${do_default-0}" ] ; then + [ $1 -eq 0 ] && echo "Y" || echo "N" + return $1 + fi + local c + while :; do + c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT + stty -echo iuclc -icanon 2>/dev/null + dd count=1 bs=1 2>/dev/null | od -An -tx1)" + case "$c" in + " 0a") if [ -n "$1" ]; then + [ $1 -eq 0 ] && echo "Y" || echo "N" + return $1 + fi + ;; + " 79") echo "Y" + return 0 + ;; + " 6e") echo "N" + return 1 + ;; + "") echo "Aborted" >&2 + exit 1 + ;; + *) # The user pressed an unrecognized key. As we are not echoing + # any incorrect user input, alert the user by ringing the bell. + (tput bel) 2>/dev/null + ;; + esac + done +} + +if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0} +then + echo "This script installs all tools and libraries needed to build Chromium." + echo "" + echo "For most of the libraries, it can also install debugging symbols, which" + echo "will allow you to debug code in the system libraries. Most developers" + echo "won't need these symbols." + echo -n "Do you want me to install them for you (y/N) " + if yes_no 1; then + do_inst_syms=1 + fi +fi +if test "$do_inst_syms" = "1"; then + echo "Including debugging symbols." +else + echo "Skipping debugging symbols." + dbg_list= +fi + +if test "$do_inst_lib32" = "1" ; then + echo "Including 32-bit libraries for ARM/Android." +else + echo "Skipping 32-bit libraries for ARM/Android." + lib32_list= +fi + +if test "$do_inst_arm" = "1" ; then + echo "Including ARM cross toolchain." +else + echo "Skipping ARM cross toolchain." + arm_list= +fi + +if test "$do_inst_nacl" = "1"; then + echo "Including NaCl, NaCl toolchain, NaCl ports dependencies." +else + echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies." + nacl_list= +fi + +# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid +# confusing dpkg-query (crbug.com/446172). +packages="$( + echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\ + "${nacl_list}" | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " " +)" + +if [ 1 -eq "${do_quick_check-0}" ] ; then + failed_check="$(dpkg-query -W -f '${PackageSpec}:${Status}\n' \ + ${packages} 2>&1 | grep -v "ok installed" || :)" + if [ -n "${failed_check}" ]; then + echo + nomatch="$(echo "${failed_check}" | \ + sed -e "s/^No packages found matching \(.*\).$/\1/;t;d")" + missing="$(echo "${failed_check}" | \ + sed -e "/^No packages found matching/d;s/^\(.*\):.*$/\1/")" + if [ "$nomatch" ]; then + # Distinguish between packages that actually aren't available to the + # system (i.e. not in any repo) and packages that just aren't known to + # dpkg (i.e. managed by apt). + unknown="" + for p in ${nomatch}; do + if apt-cache show ${p} > /dev/null 2>&1; then + missing="${p}\n${missing}" + else + unknown="${p}\n${unknown}" + fi + done + if [ -n "${unknown}" ]; then + echo "WARNING: The following packages are unknown to your system" + echo "(maybe missing a repo or need to 'sudo apt-get update'):" + echo -e "${unknown}" | sed -e "s/^/ /" + fi + fi + if [ -n "${missing}" ]; then + echo "WARNING: The following packages are not installed:" + echo -e "${missing}" | sed -e "s/^/ /" + fi + exit 1 + fi + exit 0 +fi + +if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then + if [[ ! $lsb_release =~ (precise) ]]; then + sudo dpkg --add-architecture i386 + fi +fi +sudo apt-get update + +# We initially run "apt-get" with the --reinstall option and parse its output. +# This way, we can find all the packages that need to be newly installed +# without accidentally promoting any packages from "auto" to "manual". +# We then re-run "apt-get" with just the list of missing packages. +echo "Finding missing packages..." +# Intentionally leaving $packages unquoted so it's more readable. +echo "Packages required: " $packages +echo +new_list_cmd="sudo apt-get install --reinstall $(echo $packages)" +if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then + # We probably never hit this following line. + echo "No missing packages, and the packages are up to date." +elif [ $? -eq 1 ]; then + # We expect apt-get to have exit status of 1. + # This indicates that we cancelled the install with "yes n|". + new_list=$(echo "$new_list" | + sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d') + new_list=$(echo "$new_list" | sed 's/ *$//') + if [ -z "$new_list" ] ; then + echo "No missing packages, and the packages are up to date." + else + echo "Installing missing packages: $new_list." + sudo apt-get install ${do_quietly-} ${new_list} + fi + echo +else + # An apt-get exit status of 100 indicates that a real error has occurred. + + # I am intentionally leaving out the '"'s around new_list_cmd, + # as this makes it easier to cut and paste the output + echo "The following command failed: " ${new_list_cmd} + echo + echo "It produces the following output:" + yes n | $new_list_cmd || true + echo + echo "You will have to install the above packages yourself." + echo + exit 100 +fi + +# Install the Chrome OS default fonts. This must go after running +# apt-get, since install-chromeos-fonts depends on curl. +if test "$do_inst_chromeos_fonts" != "0"; then + echo + echo "Installing Chrome OS fonts." + dir=`echo $0 | sed -r -e 's/\/[^/]+$//'` + if ! sudo $dir/linux/install-chromeos-fonts.py; then + echo "ERROR: The installation of the Chrome OS default fonts failed." + if [ `stat -f -c %T $dir` == "nfs" ]; then + echo "The reason is that your repo is installed on a remote file system." + else + echo "This is expected if your repo is installed on a remote file system." + fi + echo "It is recommended to install your repo on a local file system." + echo "You can skip the installation of the Chrome OS default founts with" + echo "the command line option: --no-chromeos-fonts." + exit 1 + fi +else + echo "Skipping installation of Chrome OS fonts." +fi + +# $1 - target name +# $2 - link name +create_library_symlink() { + target=$1 + linkname=$2 + if [ -L $linkname ]; then + if [ "$(basename $(readlink $linkname))" != "$(basename $target)" ]; then + sudo rm $linkname + fi + fi + if [ ! -r $linkname ]; then + echo "Creating link: $linkname" + sudo ln -fs $target $linkname + fi +} + +if test "$do_inst_nacl" = "1"; then + echo "Installing symbolic links for NaCl." + # naclports needs to cross build python for i386, but libssl1.0.0:i386 + # only contains libcrypto.so.1.0.0 and not the symlink needed for + # linking (libcrypto.so). + create_library_symlink /lib/i386-linux-gnu/libcrypto.so.1.0.0 \ + /usr/lib/i386-linux-gnu/libcrypto.so + + create_library_symlink /lib/i386-linux-gnu/libssl.so.1.0.0 \ + /usr/lib/i386-linux-gnu/libssl.so +else + echo "Skipping symbolic links for NaCl." +fi diff --git a/build/install-chroot.sh b/build/install-chroot.sh new file mode 100644 index 00000000000..99451ed7ea4 --- /dev/null +++ b/build/install-chroot.sh @@ -0,0 +1,888 @@ +#!/bin/bash -e + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script installs Debian-derived distributions in a chroot environment. +# It can for example be used to have an accurate 32bit build and test +# environment when otherwise working on a 64bit machine. +# N. B. it is unlikely that this script will ever work on anything other than a +# Debian-derived system. + +# Older Debian based systems had both "admin" and "adm" groups, with "admin" +# apparently being used in more places. Newer distributions have standardized +# on just the "adm" group. Check /etc/group for the preferred name of the +# administrator group. +admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm) + +usage() { + echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]" + echo "-b dir additional directories that should be bind mounted," + echo ' or "NONE".' + echo " Default: if local filesystems present, ask user for help" + echo "-g group,... groups that can use the chroot unauthenticated" + echo " Default: '${admin}' and current user's group ('$(id -gn)')" + echo "-l List all installed chroot environments" + echo "-m mirror an alternate repository mirror for package downloads" + echo "-s configure default deb-srcs" + echo "-c always copy 64bit helper binaries to 32bit chroot" + echo "-h this help message" +} + +process_opts() { + local OPTNAME OPTIND OPTERR OPTARG + while getopts ":b:g:lm:sch" OPTNAME; do + case "$OPTNAME" in + b) + if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then + bind_mounts="${OPTARG}" + else + if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \ + ! -d "${OPTARG}" ]; then + echo "Invalid -b option(s)" + usage + exit 1 + fi + bind_mounts="${bind_mounts} +${OPTARG} ${OPTARG} none rw,bind 0 0" + fi + ;; + g) + [ -n "${OPTARG}" ] && + chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}" + ;; + l) + list_all_chroots + exit + ;; + m) + if [ -n "${mirror}" ]; then + echo "You can only specify exactly one mirror location" + usage + exit 1 + fi + mirror="$OPTARG" + ;; + s) + add_srcs="y" + ;; + c) + copy_64="y" + ;; + h) + usage + exit 0 + ;; + \:) + echo "'-$OPTARG' needs an argument." + usage + exit 1 + ;; + *) + echo "invalid command-line option: $OPTARG" + usage + exit 1 + ;; + esac + done + + if [ $# -ge ${OPTIND} ]; then + eval echo "Unexpected command line argument: \${${OPTIND}}" + usage + exit 1 + fi +} + +list_all_chroots() { + for i in /var/lib/chroot/*; do + i="${i##*/}" + [ "${i}" = "*" ] && continue + [ -x "/usr/local/bin/${i%bit}" ] || continue + grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue + [ -r "/etc/schroot/script-${i}" -a \ + -r "/etc/schroot/mount-${i}" ] || continue + echo "${i%bit}" + done +} + +getkey() { + ( + trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP + stty -echo iuclc -icanon 2>/dev/null + dd count=1 bs=1 2>/dev/null + ) +} + +chr() { + printf "\\$(printf '%03o' "$1")" +} + +ord() { + printf '%d' $(printf '%c' "$1" | od -tu1 -An) +} + +is_network_drive() { + stat -c %T -f "$1/" 2>/dev/null | + egrep -qs '^nfs|cifs|smbfs' +} + +# Check that we are running as a regular user +[ "$(id -nu)" = root ] && { + echo "Run this script as a regular user and provide your \"sudo\"" \ + "password if requested" >&2 + exit 1 +} + +process_opts "$@" + +echo "This script will help you through the process of installing a" +echo "Debian or Ubuntu distribution in a chroot environment. You will" +echo "have to provide your \"sudo\" password when requested." +echo + +# Error handler +trap 'exit 1' INT TERM QUIT HUP +trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT + +# Install any missing applications that this script relies on. If these packages +# are already installed, don't force another "apt-get install". That would +# prevent them from being auto-removed, if they ever become eligible for that. +# And as this script only needs the packages once, there is no good reason to +# introduce a hard dependency on things such as dchroot and debootstrap. +dep= +for i in dchroot debootstrap libwww-perl; do + [ -d /usr/share/doc/"$i" ] || dep="$dep $i" +done +[ -n "$dep" ] && sudo apt-get -y install $dep +sudo apt-get -y install schroot + +# Create directory for chroot +sudo mkdir -p /var/lib/chroot + +# Find chroot environments that can be installed with debootstrap +targets="$(cd /usr/share/debootstrap/scripts + ls | grep '^[a-z]*$')" + +# Ask user to pick one of the available targets +echo "The following targets are available to be installed in a chroot:" +j=1; for i in $targets; do + printf '%4d: %s\n' "$j" "$i" + j=$(($j+1)) +done +while :; do + printf "Which target would you like to install: " + read n + [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break +done +j=1; for i in $targets; do + [ "$j" -eq "$n" ] && { distname="$i"; break; } + j=$(($j+1)) +done +echo + +# On x86-64, ask whether the user wants to install x86-32 or x86-64 +archflag= +arch= +if [ "$(uname -m)" = x86_64 ]; then + while :; do + echo "You are running a 64bit kernel. This allows you to install either a" + printf "32bit or a 64bit chroot environment. %s" \ + "Which one do you want (32, 64) " + read arch + [ "${arch}" == 32 -o "${arch}" == 64 ] && break + done + [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64" + arch="${arch}bit" + echo +fi +target="${distname}${arch}" + +# Don't accidentally overwrite an existing installation +[ -d /var/lib/chroot/"${target}" ] && { + while :; do + echo "This chroot already exists on your machine." + if schroot -l --all-sessions 2>&1 | + sed 's/^session://' | + grep -qs "^${target%bit}-"; then + echo "And it appears to be in active use. Terminate all programs that" + echo "are currently using the chroot environment and then re-run this" + echo "script." + echo "If you still get an error message, you might have stale mounts" + echo "that you forgot to delete. You can always clean up mounts by" + echo "executing \"${target%bit} -c\"." + exit 1 + fi + echo "I can abort installation, I can overwrite the existing chroot," + echo "or I can delete the old one and then exit. What would you like to" + printf "do (a/o/d)? " + read choice + case "${choice}" in + a|A) exit 1;; + o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;; + d|D) sudo rm -rf "/var/lib/chroot/${target}" \ + "/usr/local/bin/${target%bit}" \ + "/etc/schroot/mount-${target}" \ + "/etc/schroot/script-${target}" \ + "/etc/schroot/${target}" + sudo sed -ni '/^[[]'"${target%bit}"']$/,${ + :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ + "/etc/schroot/schroot.conf" + trap '' INT TERM QUIT HUP + trap '' EXIT + echo "Deleted!" + exit 0;; + esac + done + echo +} +sudo mkdir -p /var/lib/chroot/"${target}" + +# Offer to include additional standard repositories for Ubuntu-based chroots. +alt_repos= +grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && { + while :; do + echo "Would you like to add ${distname}-updates and ${distname}-security " + printf "to the chroot's sources.list (y/n)? " + read alt_repos + case "${alt_repos}" in + y|Y) + alt_repos="y" + break + ;; + n|N) + break + ;; + esac + done + echo +} + +# Check for non-standard file system mount points and ask the user whether +# they should be imported into the chroot environment +# We limit to the first 26 mount points that much some basic heuristics, +# because a) that allows us to enumerate choices with a single character, +# and b) if we find more than 26 mount points, then these are probably +# false-positives and something is very unusual about the system's +# configuration. No need to spam the user with even more information that +# is likely completely irrelevant. +if [ -z "${bind_mounts}" ]; then + mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" && + $2 !~ "^/media" && $2 !~ "^/run" && + ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" || + $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" || + $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" || + $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") { + print $2 + }' /proc/mounts | + head -n26)" + if [ -n "${mounts}" ]; then + echo "You appear to have non-standard mount points that you" + echo "might want to import into the chroot environment:" + echo + sel= + while :; do + # Print a menu, listing all non-default mounts of local or network + # file systems. + j=1; for m in ${mounts}; do + c="$(printf $(printf '\\%03o' $((64+$j))))" + echo "$sel" | grep -qs $c && + state="mounted in chroot" || state="$(tput el)" + printf " $c) %-40s${state}\n" "$m" + j=$(($j+1)) + done + # Allow user to interactively (de-)select any of the entries + echo + printf "Select mount points that you want to be included or press %s" \ + "SPACE to continue" + c="$(getkey | tr a-z A-Z)" + [ "$c" == " " ] && { echo; echo; break; } + if [ -z "$c" ] || + [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then + # Invalid input, ring the console bell + tput bel + else + # Toggle the selection for the given entry + if echo "$sel" | grep -qs $c; then + sel="$(printf "$sel" | sed "s/$c//")" + else + sel="$sel$c" + fi + fi + # Reposition cursor to the top of the list of entries + tput cuu $(($j + 1)) + echo + done + fi + j=1; for m in ${mounts}; do + c="$(chr $(($j + 64)))" + if echo "$sel" | grep -qs $c; then + bind_mounts="${bind_mounts}$m $m none rw,bind 0 0 +" + fi + j=$(($j+1)) + done +fi + +# Remove stale entry from /etc/schroot/schroot.conf. Entries start +# with the target name in square brackets, followed by an arbitrary +# number of lines. The entry stops when either the end of file has +# been reached, or when the beginning of a new target is encountered. +# This means, we cannot easily match for a range of lines in +# "sed". Instead, we actually have to iterate over each line and check +# whether it is the beginning of a new entry. +sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ + /etc/schroot/schroot.conf + +# Download base system. This takes some time +if [ -z "${mirror}" ]; then + grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && + mirror="http://archive.ubuntu.com/ubuntu" || + mirror="http://ftp.us.debian.org/debian" +fi + +sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \ + "${distname}" "/var/lib/chroot/${target}" "$mirror" + +# Add new entry to /etc/schroot/schroot.conf +grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && + brand="Ubuntu" || brand="Debian" +if [ -z "${chroot_groups}" ]; then + chroot_groups="${admin},$(id -gn)" +fi + +if [ -d '/etc/schroot/default' ]; then + new_version=1 + fstab="/etc/schroot/${target}/fstab" +else + new_version=0 + fstab="/etc/schroot/mount-${target}" +fi + +if [ "$new_version" = "1" ]; then + sudo cp -ar /etc/schroot/default /etc/schroot/${target} + + sudo sh -c 'cat >>/etc/schroot/schroot.conf' <>${fstab}" +else + # Older versions of schroot wanted a "priority=" line, whereas recent + # versions deprecate "priority=" and warn if they see it. We don't have + # a good feature test, but scanning for the string "priority=" in the + # existing "schroot.conf" file is a good indication of what to do. + priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf && + echo 'priority=3' || :) + sudo sh -c 'cat >>/etc/schroot/schroot.conf' </etc/schroot/script-'"${target}" + sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \ + /etc/schroot/mount-defaults | + sudo sh -c "cat > ${fstab}" +fi + +# Add the extra mount points that the user told us about +[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] && + printf "${bind_mounts}" | + sudo sh -c 'cat >>'"${fstab}" + +# If this system has a "/media" mountpoint, import it into the chroot +# environment. Most modern distributions use this mount point to +# automatically mount devices such as CDROMs, USB sticks, etc... +if [ -d /media ] && + ! grep -qs '^/media' "${fstab}"; then + echo '/media /media none rw,rbind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +fi + +# Share /dev/shm, /run and /run/shm. +grep -qs '^/dev/shm' "${fstab}" || + echo '/dev/shm /dev/shm none rw,bind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +if [ ! -d "/var/lib/chroot/${target}/run" ] && + ! grep -qs '^/run' "${fstab}"; then + echo '/run /run none rw,bind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +fi +if ! grep -qs '^/run/shm' "${fstab}"; then + { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' || + echo '/dev/shm /run/shm none rw,bind 0 0'; } | + sudo sh -c 'cat >>'"${fstab}" +fi + +# Set up a special directory that changes contents depending on the target +# that is executing. +d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")" +s="${d}/.${target}" +echo "${s} ${d} none rw,bind 0 0" | + sudo sh -c 'cat >>'"${target}" +mkdir -p "${s}" + +# Install a helper script to launch commands in the chroot +sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF' +#!/bin/bash + +chroot="${0##*/}" + +wrap() { + # Word-wrap the text passed-in on stdin. Optionally, on continuation lines + # insert the same number of spaces as the number of characters in the + # parameter(s) passed to this function. + # If the "fold" program cannot be found, or if the actual width of the + # terminal cannot be determined, this function doesn't attempt to do any + # wrapping. + local f="$(type -P fold)" + [ -z "${f}" ] && { cat; return; } + local c="$(stty -a /dev/null | + sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')" + [ -z "${c}" ] && { cat; return; } + local i="$(echo "$*"|sed 's/./ /g')" + local j="$(printf %s "${i}"|wc -c)" + if [ "${c}" -gt "${j}" ]; then + dd bs=1 count="${j}" 2>/dev/null + "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/' + else + "${f}" -sw "${c}" + fi +} + +help() { + echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} " + echo " help: print this message" | wrap " " + echo " list: list all known chroot environments" | wrap " " + echo " clean: remove all old chroot sessions for \"${chroot}\"" | wrap " " + echo " clean-all: remove all old chroot sessions for all environments" | wrap " " + exit 0 +} + +clean() { + local s t rc + rc=0 + for s in $(schroot -l --all-sessions); do + if [ -n "$1" ]; then + t="${s#session:}" + [ "${t#${chroot}-}" == "${t}" ] && continue + fi + if ls -l /proc/*/{cwd,fd} 2>/dev/null | + fgrep -qs "/var/lib/schroot/mount/${t}"; then + echo "Session \"${t}\" still has active users, not cleaning up" | wrap + rc=1 + continue + fi + sudo schroot -c "${s}" -e || rc=1 + done + exit ${rc} +} + +list() { + for e in $(schroot -l); do + e="${e#chroot:}" + [ -x "/usr/local/bin/${e}" ] || continue + if schroot -l --all-sessions 2>/dev/null | + sed 's/^session://' | + grep -qs "^${e}-"; then + echo "${e} is currently active" + else + echo "${e}" + fi + done + exit 0 +} + +while [ "$#" -ne 0 ]; do + case "$1" in + --) shift; break;; + -h|--help) shift; help;; + -l|--list) shift; list;; + -c|--clean) shift; clean "${chroot}";; + -C|--clean-all) shift; clean;; + *) break;; + esac +done + +# Start a new chroot session and keep track of the session id. We inject this +# id into all processes that run inside the chroot. Unless they go out of their +# way to clear their environment, we can then later identify our child and +# grand-child processes by scanning their environment. +session="$(schroot -c "${chroot}" -b)" +export CHROOT_SESSION_ID="${session}" + +# Set GOMA_TMP_DIR for better handling of goma inside chroot. +export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID" +mkdir -p "$GOMA_TMP_DIR" + +if [ $# -eq 0 ]; then + # Run an interactive shell session + schroot -c "${session}" -r -p +else + # Run a command inside of the chroot environment + p="$1"; shift + schroot -c "${session}" -r -p "$p" -- "$@" +fi +rc=$? + +# Compute the inode of the root directory inside of the chroot environment. +i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. | + awk '{ print $1 }') 2>/dev/null +other_pids= +while [ -n "$i" ]; do + # Identify processes by the inode number of their root directory. Then + # remove all processes that we know belong to other sessions. We use + # "sort | uniq -u" to do what amounts to a "set substraction operation". + pids=$({ ls -id1 /proc/*/root/. 2>/dev/null | + sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1, + t + d'; + echo "${other_pids}"; + echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1 + # Kill all processes that are still left running in the session. This is + # typically an assortment of daemon processes that were started + # automatically. They result in us being unable to tear down the session + # cleanly. + [ -z "${pids}" ] && break + for j in $pids; do + # Unfortunately, the way that schroot sets up sessions has the + # side-effect of being unable to tell one session apart from another. + # This can result in us attempting to kill processes in other sessions. + # We make a best-effort to avoid doing so. + k="$( ( xargs -0 -n1 /dev/null | + sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')" + if [ -n "${k}" -a "${k#x}" != "${session}" ]; then + other_pids="${other_pids} +${j}" + continue + fi + kill -9 $pids + done +done +# End the chroot session. This should clean up all temporary files. But if we +# earlier failed to terminate all (daemon) processes inside of the session, +# deleting the session could fail. When that happens, the user has to manually +# clean up the stale files by invoking us with "--clean" after having killed +# all running processes. +schroot -c "${session}" -e +# Since no goma processes are running, we can remove goma directory. +rm -rf "$GOMA_TMP_DIR" +exit $rc +EOF +sudo chown root:root /usr/local/bin/"${target%bit}" +sudo chmod 755 /usr/local/bin/"${target%bit}" + +# Add the standard Ubuntu update repositories if requested. +[ "${alt_repos}" = "y" -a \ + -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i '/^deb .* [^ -]\+ main$/p + s/^\(deb .* [^ -]\+\) main/\1-security main/ + p + t1 + d + :1;s/-security main/-updates main/ + t + d' "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Add a few more repositories to the chroot +[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i 's/ main$/ main restricted universe multiverse/' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Add the Ubuntu "partner" repository, if available +if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && + HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \ + >&/dev/null; then + sudo sh -c ' + echo "deb http://archive.canonical.com/ubuntu" \ + "'"${distname}"' partner" \ + >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"' +fi + +# Add source repositories, if the user requested we do so +[ "${add_srcs}" = "y" -a \ + -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i '/^deb[^-]/p + s/^deb\([^-]\)/deb-src\1/' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Set apt proxy if host has set http_proxy +if [ -n "${http_proxy}" ]; then + sudo sh -c ' + echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \ + >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"' +fi + +# Update packages +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + apt-get update; apt-get -y dist-upgrade' || : + +# Install a couple of missing packages +for i in debian-keyring ubuntu-keyring locales sudo; do + [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] || + sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || : +done + +# Configure locales +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + l='"${LANG:-en_US}"'; l="${l%%.*}" + [ -r /etc/locale.gen ] && + sed -i "s/^# \($l\)/\1/" /etc/locale.gen + locale-gen $LANG en_US en_US.UTF-8' || : + +# Enable multi-arch support, if available +sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null && + [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && { + sudo sed -i 's/ / [arch=amd64,i386] /' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] && + sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \ + $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null || + echo foreign-architecture \ + $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) | + sudo sh -c \ + "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'" +} + +# Configure "sudo" package +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + egrep -qs '"'^$(id -nu) '"' /etc/sudoers || + echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers' + +# Install a few more commonly used packages +sudo "/usr/local/bin/${target%bit}" apt-get -y install \ + autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool \ + lsof strace + +# If running a 32bit environment on a 64bit machine, install a few binaries +# as 64bit. This is only done automatically if the chroot distro is the same as +# the host, otherwise there might be incompatibilities in build settings or +# runtime dependencies. The user can force it with the '-c' flag. +host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \ + cut -d "=" -f 2) +if [ "${copy_64}" = "y" -o \ + "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \ + file /bin/bash 2>/dev/null | grep -q x86-64; then + readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \ + 'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1') + sudo "/usr/local/bin/${target%bit}" apt-get -y install \ + lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6 + dep= + for i in binutils gdb; do + [ -d /usr/share/doc/"$i" ] || dep="$dep $i" + done + [ -n "$dep" ] && sudo apt-get -y install $dep + sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64" + for i in libbfd libpython; do + lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } | + grep -s "$i" | awk '{ print $3 }')" + if [ -n "$lib" -a -r "$lib" ]; then + sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64" + fi + done + for lib in libssl libcrypt; do + for path in /usr/lib /usr/lib/x86_64-linux-gnu; do + sudo cp $path/$lib* \ + "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || : + done + done + for i in gdb ld; do + sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/" + sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <&/dev/null; then + tmp_script="/tmp/${script##*/}" + cp "${script}" "${tmp_script}" + fi + # Some distributions automatically start an instance of the system- + # wide dbus daemon, cron daemon or of the logging daemon, when + # installing the Chrome build depencies. This prevents the chroot + # session from being closed. So, we always try to shut down any running + # instance of dbus and rsyslog. + sudo /usr/local/bin/"${target%bit}" sh -c "${script}; + rc=$?; + /etc/init.d/cron stop >/dev/null 2>&1 || :; + /etc/init.d/rsyslog stop >/dev/null 2>&1 || :; + /etc/init.d/dbus stop >/dev/null 2>&1 || :; + exit $rc" + rc=$? + [ -n "${tmp_script}" ] && rm -f "${tmp_script}" + [ $rc -ne 0 ] && exit $rc + break + ;; + n|N) + break + ;; + esac + done + echo +fi + +# Check whether ~/chroot is on a (slow) network file system and offer to +# relocate it. Also offer relocation, if the user appears to have multiple +# spindles (as indicated by "${bind_mount}" being non-empty). +# We only offer this option, if it doesn't look as if a chroot environment +# is currently active. Otherwise, relocation is unlikely to work and it +# can be difficult for the user to recover from the failed attempt to relocate +# the ~/chroot directory. +# We don't aim to solve this problem for every configuration, +# but try to help with the common cases. For more advanced configuration +# options, the user can always manually adjust things. +mkdir -p "${HOME}/chroot/" +if [ ! -h "${HOME}/chroot" ] && + ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab && + { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] || + is_network_drive "${HOME}/chroot"; } && + ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then + echo "${HOME}/chroot is currently located on the same device as your" + echo "home directory." + echo "This might not be what you want. Do you want me to move it somewhere" + echo "else?" + # If the computer has multiple spindles, many users configure all or part of + # the secondary hard disk to be writable by the primary user of this machine. + # Make some reasonable effort to detect this type of configuration and + # then offer a good location for where to put the ~/chroot directory. + suggest= + for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do + if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] && + ! is_network_drive "$i"; then + suggest="$i" + else + for j in "$i/"*; do + if [ -d "$j" -a -w "$j" -a \ + \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] && + ! is_network_drive "$j"; then + suggest="$j" + else + for k in "$j/"*; do + if [ -d "$k" -a -w "$k" -a \ + \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] && + ! is_network_drive "$k"; then + suggest="$k" + break + fi + done + fi + [ -n "${suggest}" ] && break + done + fi + [ -n "${suggest}" ] && break + done + def_suggest="${HOME}" + if [ -n "${suggest}" ]; then + # For home directories that reside on network drives, make our suggestion + # the default option. For home directories that reside on a local drive, + # require that the user manually enters the new location. + if is_network_drive "${HOME}"; then + def_suggest="${suggest}" + else + echo "A good location would probably be in \"${suggest}\"" + fi + fi + while :; do + printf "Physical location [${def_suggest}]: " + read dir + [ -z "${dir}" ] && dir="${def_suggest}" + [ "${dir%%/}" == "${HOME%%/}" ] && break + if ! [ -d "${dir}" -a -w "${dir}" ] || + [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then + echo "Cannot write to ${dir}/chroot. Please try again" + else + mv "${HOME}/chroot" "${dir}/chroot" + ln -s "${dir}/chroot" "${HOME}/chroot" + for i in $(list_all_chroots); do + sudo "$i" mkdir -p "${dir}/chroot" + done + sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-* + break + fi + done +fi + +# Clean up package files +sudo schroot -c "${target%bit}" -p -- apt-get clean +sudo apt-get clean + +trap '' INT TERM QUIT HUP +trap '' EXIT + +# Let the user know what we did +cat < 0) + + add_to_path = []; + first_entry = argv[0]; + if first_entry.startswith('ADD_TO_PATH='): + argv = argv[1:]; + add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':') + + # Still need something to run. + assert(len(argv) > 0) + + clean_env = {} + + # Pull over the whitelisted keys. + for key in env_key_whitelist: + val = os.environ.get(key, None) + if not val is None: + clean_env[key] = val + + # Collect the developer dir as set via Xcode, defaulting it. + dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/') + if dev_prefix[-1:] != '/': + dev_prefix += '/' + + # Now pull in PATH, but remove anything Xcode might have added. + initial_path = os.environ.get('PATH', '') + filtered_chunks = \ + [x for x in initial_path.split(':') if not x.startswith(dev_prefix)] + if filtered_chunks: + clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks) + + # Add any KEY=VALUE args before the command to the cleaned environment. + args = argv[:] + while '=' in args[0]: + (key, val) = args[0].split('=', 1) + clean_env[key] = val + args = args[1:] + + # Still need something to run. + assert(len(args) > 0) + + # Off it goes... + os.execvpe(args[0], args, clean_env) + # Should never get here, so return a distinctive, non-zero status code. + return 66 + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/build/ios/coverage.gypi b/build/ios/coverage.gypi new file mode 100644 index 00000000000..e82208902cc --- /dev/null +++ b/build/ios/coverage.gypi @@ -0,0 +1,32 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + 'enable_coverage%': 0, + }, + 'conditions': [ + ['enable_coverage', { + 'target_defaults': { + 'defines': [ + 'ENABLE_TEST_CODE_COVERAGE=1' + ], + 'link_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-fprofile-arcs', + ], + }, + }, + 'xcode_settings': { + 'OTHER_CFLAGS': [ + '-fprofile-arcs', + '-ftest-coverage', + ], + }, + }, + }], + ], +} + diff --git a/build/isolate.gypi b/build/isolate.gypi new file mode 100644 index 00000000000..197e45591b1 --- /dev/null +++ b/build/isolate.gypi @@ -0,0 +1,129 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to "build" .isolate files into a .isolated file. +# +# To use this, create a gyp target with the following form: +# 'conditions': [ +# ['test_isolation_mode != "noop"', { +# 'targets': [ +# { +# 'target_name': 'foo_test_run', +# 'type': 'none', +# 'dependencies': [ +# 'foo_test', +# ], +# 'includes': [ +# '../build/isolate.gypi', +# ], +# 'sources': [ +# 'foo_test.isolate', +# ], +# }, +# ], +# }], +# ], +# +# Note: foo_test.isolate is included and a source file. It is an inherent +# property of the .isolate format. This permits to define GYP variables but is +# a stricter format than GYP so isolate.py can read it. +# +# The generated .isolated file will be: +# <(PRODUCT_DIR)/foo_test.isolated +# +# See http://dev.chromium.org/developers/testing/isolated-testing/for-swes +# for more information. + +{ + 'rules': [ + { + 'rule_name': 'isolate', + 'extension': 'isolate', + 'inputs': [ + # Files that are known to be involved in this step. + '<(DEPTH)/tools/isolate_driver.py', + '<(DEPTH)/tools/swarming_client/isolate.py', + '<(DEPTH)/tools/swarming_client/run_isolated.py', + ], + 'outputs': [], + 'action': [ + 'python', + '<(DEPTH)/tools/isolate_driver.py', + '<(test_isolation_mode)', + '--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated', + '--isolate', '<(RULE_INPUT_PATH)', + + # Variables should use the -V FOO=<(FOO) form so frequent values, + # like '0' or '1', aren't stripped out by GYP. Run 'isolate.py help' for + # more details. + + # Path variables are used to replace file paths when loading a .isolate + # file + '--path-variable', 'DEPTH', '<(DEPTH)', + '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ', + + # Note: This list must match DefaultConfigVariables() + # in build/android/pylib/utils/isolator.py + '--config-variable', 'CONFIGURATION_NAME=<(CONFIGURATION_NAME)', + '--config-variable', 'OS=<(OS)', + '--config-variable', 'asan=<(asan)', + '--config-variable', 'branding=<(branding)', + '--config-variable', 'chromeos=<(chromeos)', + '--config-variable', 'component=<(component)', + '--config-variable', 'disable_nacl=<(disable_nacl)', + '--config-variable', 'enable_pepper_cdms=<(enable_pepper_cdms)', + '--config-variable', 'enable_plugins=<(enable_plugins)', + '--config-variable', 'fastbuild=<(fastbuild)', + '--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)', + # TODO(kbr): move this to chrome_tests.gypi:gles2_conform_tests_run + # once support for user-defined config variables is added. + '--config-variable', + 'internal_gles2_conform_tests=<(internal_gles2_conform_tests)', + '--config-variable', 'kasko=<(kasko)', + '--config-variable', 'lsan=<(lsan)', + '--config-variable', 'msan=<(msan)', + '--config-variable', 'target_arch=<(target_arch)', + '--config-variable', 'tsan=<(tsan)', + '--config-variable', 'use_custom_libcxx=<(use_custom_libcxx)', + '--config-variable', 'use_instrumented_libraries=<(use_instrumented_libraries)', + '--config-variable', + 'use_prebuilt_instrumented_libraries=<(use_prebuilt_instrumented_libraries)', + '--config-variable', 'use_ozone=<(use_ozone)', + '--config-variable', 'use_x11=<(use_x11)', + '--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)', + ], + 'conditions': [ + # Note: When gyp merges lists, it appends them to the old value. + # Extra variables are replaced on the 'command' entry and on paths in + # the .isolate file but are not considered relative paths. + ['OS=="mac"', { + 'action': [ + '--extra-variable', 'mac_product_name=<(mac_product_name)', + ], + }], + ["test_isolation_mode == 'prepare'", { + 'outputs': [ + '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated.gen.json', + ], + }, { + 'outputs': [ + '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated', + ], + }], + ['OS=="win"', { + 'includes': ['../build/util/version.gypi'], + 'action': [ + '--extra-variable', 'version_full=<(version_full)', + '--config-variable', 'msvs_version=<(MSVS_VERSION)', + ], + }, { + 'action': [ + '--config-variable', 'msvs_version=0', + ], + }], + ], + }, + ], +} diff --git a/build/jar_file_jni_generator.gypi b/build/jar_file_jni_generator.gypi new file mode 100644 index 00000000000..71ab006f23c --- /dev/null +++ b/build/jar_file_jni_generator.gypi @@ -0,0 +1,72 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to generate jni bindings for system Java-files in a consistent manner. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'android_jar_jni_headers', +# 'type': 'none', +# 'variables': { +# 'jni_gen_package': 'chrome', +# 'input_java_class': 'java/io/InputStream.class', +# }, +# 'includes': [ '../build/jar_file_jni_generator.gypi' ], +# }, +# +# Optional variables: +# input_jar_file - The input jar file, if omitted, android_sdk_jar will be used. + +{ + 'variables': { + 'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py', + # A comma separated string of include files. + 'jni_generator_includes%': ( + 'base/android/jni_generator/jni_generator_helper.h' + ), + 'native_exports%': '--native_exports_optional', + }, + 'actions': [ + { + 'action_name': 'generate_jni_headers_from_jar_file', + 'inputs': [ + '<(jni_generator)', + '<(input_jar_file)', + '<(android_sdk_jar)', + ], + 'variables': { + 'java_class_name': '/src. +# Optional/automatic variables: +# add_to_dependents_classpaths - Set to 0 if the resulting jar file should not +# be added to its dependents' classpaths. +# additional_input_paths - These paths will be included in the 'inputs' list to +# ensure that this target is rebuilt when one of these paths changes. +# additional_src_dirs - Additional directories with .java files to be compiled +# and included in the output of this target. +# generated_src_dirs - Same as additional_src_dirs except used for .java files +# that are generated at build time. This should be set automatically by a +# target's dependencies. The .java files in these directories are not +# included in the 'inputs' list (unlike additional_src_dirs). +# input_jars_paths - The path to jars to be included in the classpath. This +# should be filled automatically by depending on the appropriate targets. +# javac_includes - A list of specific files to include. This is by default +# empty, which leads to inclusion of all files specified. May include +# wildcard, and supports '**/' for recursive path wildcards, ie.: +# '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'. +# has_java_resources - Set to 1 if the java target contains an +# Android-compatible resources folder named res. If 1, R_package and +# R_package_relpath must also be set. +# R_package - The java package in which the R class (which maps resources to +# integer IDs) should be generated, e.g. org.chromium.content. +# R_package_relpath - Same as R_package, but replace each '.' with '/'. +# res_extra_dirs - A list of extra directories containing Android resources. +# These directories may be generated at build time. +# res_extra_files - A list of the files in res_extra_dirs. +# never_lint - Set to 1 to not run lint on this target. + +{ + 'dependencies': [ + '<(DEPTH)/build/android/android_lint_cache.gyp:android_lint_cache', + '<(DEPTH)/build/android/setup.gyp:build_output_dirs', + ], + 'variables': { + 'add_to_dependents_classpaths%': 1, + 'android_jar': '<(android_sdk)/android.jar', + 'input_jars_paths': [ '<(android_jar)' ], + 'additional_src_dirs': [], + 'javac_includes': [], + 'jar_name': '<(_target_name).jar', + 'jar_dir': '<(PRODUCT_DIR)/lib.java', + 'jar_path': '<(intermediate_dir)/<(jar_name)', + 'jar_final_path': '<(jar_dir)/<(jar_name)', + 'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ], + 'emma_instr_stamp': '<(intermediate_dir)/emma_instr.stamp', + 'additional_input_paths': [], + 'additional_locale_input_paths': [], + 'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar', + 'generated_src_dirs': ['>@(generated_R_dirs)'], + 'generated_R_dirs': [], + 'has_java_resources%': 0, + 'res_extra_dirs': [], + 'res_extra_files': [], + 'res_v14_skip%': 0, + 'resource_input_paths': ['>@(res_extra_files)'], + 'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)', + 'compile_stamp': '<(intermediate_dir)/compile.stamp', + 'lint_stamp': '<(intermediate_dir)/lint.stamp', + 'lint_result': '<(intermediate_dir)/lint_result.xml', + 'lint_config': '<(intermediate_dir)/lint_config.xml', + 'never_lint%': 0, + 'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp', + 'run_findbugs%': 0, + 'java_in_dir_suffix%': '/src', + 'proguard_config%': '', + 'proguard_preprocess%': '0', + 'variables': { + 'variables': { + 'proguard_preprocess%': 0, + 'emma_never_instrument%': 0, + }, + 'conditions': [ + ['proguard_preprocess == 1', { + 'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar' + }, { + 'javac_jar_path': '<(jar_path)' + }], + ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', { + 'emma_instrument': 1, + }, { + 'emma_instrument': 0, + }], + ], + }, + 'emma_instrument': '<(emma_instrument)', + 'javac_jar_path': '<(javac_jar_path)', + 'conditions': [ + ['chromium_code == 0', { + 'enable_errorprone': 0, + }], + ], + 'enable_errorprone%': 0, + 'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone', + }, + 'conditions': [ + ['add_to_dependents_classpaths == 1', { + # This all_dependent_settings is used for java targets only. This will add the + # jar path to the classpath of dependent java targets. + 'all_dependent_settings': { + 'variables': { + 'input_jars_paths': ['<(jar_final_path)'], + 'library_dexed_jars_paths': ['<(dex_path)'], + }, + }, + }], + ['has_java_resources == 1', { + 'variables': { + 'resource_dir': '<(java_in_dir)/res', + 'res_input_dirs': ['<(resource_dir)', '<@(res_extra_dirs)'], + 'resource_input_paths': ['|(java_resources.<(_target_name).gypcmd >@(resource_input_paths))', + 'process_resources_options': [], + 'local_dependencies_res_zip_paths': [ + '>@(dependencies_res_zip_paths)', + '>@(dependencies_locale_zip_paths)' + ], + 'conditions': [ + ['res_v14_skip == 1', { + 'process_resources_options': ['--v14-skip'] + }], + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/process_resources.py', + '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py', + '>@(resource_input_paths)', + '>@(local_dependencies_res_zip_paths)', + '>(inputs_list_file)', + ], + 'outputs': [ + '<(resource_zip_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/process_resources.py', + '--android-sdk-jar', '<(android_sdk_jar)', + '--aapt-path', '<(android_aapt_path)', + # Need to generate onResourcesLoaded() in R.java, so could be used in java lib. + '--shared-resources', + + '--android-manifest', '<(android_manifest)', + '--custom-package', '<(R_package)', + + '--dependencies-res-zips', '>(local_dependencies_res_zip_paths)', + '--resource-dirs', '<(res_input_dirs)', + + '--R-dir', '<(R_dir)', + '--resource-zip-out', '<(resource_zip_path)', + + '<@(process_resources_options)', + ], + }, + ], + }], + ['proguard_preprocess == 1', { + 'actions': [ + { + 'action_name': 'proguard_<(_target_name)', + 'message': 'Proguard preprocessing <(_target_name) jar', + 'inputs': [ + '<(DEPTH)/third_party/proguard/lib/proguard.jar', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/proguard.py', + '<(javac_jar_path)', + '<(proguard_config)', + ], + 'outputs': [ + '<(jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/proguard.py', + '--proguard-path=<(DEPTH)/third_party/proguard/lib/proguard.jar', + '--input-path=<(javac_jar_path)', + '--output-path=<(jar_path)', + '--proguard-config=<(proguard_config)', + '--classpath=<(android_sdk_jar) >(input_jars_paths)', + ] + }, + ], + }], + ['run_findbugs == 1', { + 'actions': [ + { + 'action_name': 'findbugs_<(_target_name)', + 'message': 'Running findbugs on <(_target_name)', + 'variables': { + 'additional_findbugs_args': [], + 'findbugs_verbose%': 0, + }, + 'conditions': [ + ['findbugs_verbose == 1', { + 'variables': { + 'additional_findbugs_args+': ['-vv'], + }, + }], + ], + 'inputs': [ + '<(DEPTH)/build/android/findbugs_diff.py', + '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml', + '<(DEPTH)/build/android/pylib/utils/findbugs.py', + '>@(input_jars_paths)', + '<(jar_final_path)', + '<(compile_stamp)', + ], + 'outputs': [ + '<(findbugs_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/findbugs_diff.py', + '--auxclasspath-gyp', '>(input_jars_paths)', + '--stamp', '<(findbugs_stamp)', + '<@(additional_findbugs_args)', + '<(jar_final_path)', + ], + }, + ], + }], + ['enable_errorprone == 1', { + 'dependencies': [ + '<(DEPTH)/third_party/errorprone/errorprone.gyp:require_errorprone', + ], + }], + ], + 'actions': [ + { + 'action_name': 'javac_<(_target_name)', + 'message': 'Compiling <(_target_name) java sources', + 'variables': { + 'local_additional_input_paths': [ + '>@(additional_input_paths)', + '>@(additional_locale_input_paths)', + ], + 'extra_args': [], + 'extra_inputs': [], + 'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java")'], + 'conditions': [ + ['enable_errorprone == 1', { + 'extra_inputs': [ + '<(errorprone_exe_path)', + ], + 'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ], + }], + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/javac.py', + '>@(java_sources)', + '>@(input_jars_paths)', + '>@(local_additional_input_paths)', + '<@(extra_inputs)', + ], + 'outputs': [ + '<(compile_stamp)', + '<(javac_jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/javac.py', + '--bootclasspath=<(android_sdk_jar)', + '--classpath=>(input_jars_paths)', + '--src-gendirs=>(generated_src_dirs)', + '--javac-includes=<(javac_includes)', + '--chromium-code=<(chromium_code)', + '--jar-path=<(javac_jar_path)', + '--jar-excluded-classes=<(jar_excluded_classes)', + '--stamp=<(compile_stamp)', + '>@(java_sources)', + '<@(extra_args)', + ] + }, + { + 'action_name': 'emma_instr_jar_<(_target_name)', + 'message': 'Instrumenting <(_target_name) jar', + 'variables': { + 'input_path': '<(jar_path)', + 'output_path': '<(jar_final_path)', + 'coverage_file': '<(jar_dir)/<(_target_name).em', + 'sources_list_file': '<(jar_dir)/<(_target_name)_sources.txt', + 'stamp_path': '<(emma_instr_stamp)', + }, + 'outputs': [ + '<(jar_final_path)', + ], + 'inputs': [ + '<(jar_path)', + ], + 'includes': [ 'android/emma_instr_action.gypi' ], + }, + { + 'variables': { + 'src_dirs': [ + '<(java_in_dir)<(java_in_dir_suffix)', + '>@(additional_src_dirs)', + ], + 'stamp_path': '<(lint_stamp)', + 'result_path': '<(lint_result)', + 'config_path': '<(lint_config)', + 'lint_jar_path': '<(jar_final_path)', + }, + 'inputs': [ + '<(jar_final_path)', + '<(compile_stamp)', + ], + 'outputs': [ + '<(lint_stamp)', + ], + 'includes': [ 'android/lint_action.gypi' ], + }, + { + 'action_name': 'jar_toc_<(_target_name)', + 'message': 'Creating <(_target_name) jar.TOC', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/util/md5_check.py', + '<(DEPTH)/build/android/gyp/jar_toc.py', + '<(jar_final_path)', + ], + 'outputs': [ + '<(jar_final_path).TOC', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/jar_toc.py', + '--jar-path=<(jar_final_path)', + '--toc-path=<(jar_final_path).TOC', + ] + }, + { + 'action_name': 'dex_<(_target_name)', + 'variables': { + 'conditions': [ + ['emma_instrument != 0', { + 'dex_no_locals': 1, + }], + ], + 'dex_input_paths': [ '<(jar_final_path)' ], + 'output_path': '<(dex_path)', + }, + 'includes': [ 'android/dex_action.gypi' ], + }, + ], +} diff --git a/build/java_aidl.gypi b/build/java_aidl.gypi new file mode 100644 index 00000000000..dda28942e2b --- /dev/null +++ b/build/java_aidl.gypi @@ -0,0 +1,79 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to build Java aidl files in a consistent manner. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'aidl_aidl-file-name', +# 'type': 'none', +# 'variables': { +# 'aidl_interface_file': '/.aidl', +# 'aidl_import_include': '<(DEPTH)/', +# }, +# 'sources': { +# '/.aidl', +# '/.aidl', +# ... +# }, +# 'includes': ['/java_aidl.gypi'], +# } +# +# +# The generated java files will be: +# <(PRODUCT_DIR)/lib.java/.java +# <(PRODUCT_DIR)/lib.java/.java +# ... +# +# Optional variables: +# aidl_import_include - This should be an absolute path to your java src folder +# that contains the classes that are imported by your aidl files. +# +# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed. + +{ + 'variables': { + 'aidl_path%': '<(android_sdk_tools)/aidl', + 'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/aidl', + 'aidl_import_include%': '', + 'additional_aidl_arguments': [], + 'additional_aidl_input_paths': [], + }, + 'direct_dependent_settings': { + 'variables': { + 'generated_src_dirs': ['<(intermediate_dir)/'], + }, + }, + 'conditions': [ + ['aidl_import_include != ""', { + 'variables': { + 'additional_aidl_arguments': [ '-I<(aidl_import_include)' ], + 'additional_aidl_input_paths': [ '.apk +# java_in_dir - The top-level java directory. The src should be in +# <(java_in_dir)/src. +# Optional/automatic variables: +# additional_input_paths - These paths will be included in the 'inputs' list to +# ensure that this target is rebuilt when one of these paths changes. +# additional_res_packages - Package names of R.java files generated in addition +# to the default package name defined in AndroidManifest.xml. +# additional_src_dirs - Additional directories with .java files to be compiled +# and included in the output of this target. +# additional_bundled_libs - Additional libraries what will be stripped and +# bundled in the apk. +# asset_location - The directory where assets are located. +# create_abi_split - Whether to create abi-based spilts. Splits +# are supported only for minSdkVersion >= 21. +# create_density_splits - Whether to create density-based apk splits. +# language_splits - List of languages to create apk splits for. +# generated_src_dirs - Same as additional_src_dirs except used for .java files +# that are generated at build time. This should be set automatically by a +# target's dependencies. The .java files in these directories are not +# included in the 'inputs' list (unlike additional_src_dirs). +# library_jars_paths - The path to library jars to be included in the classpath. +# These will not be included into the final apk. +# input_jars_paths - The path to jars to be included in the classpath. This +# should be filled automatically by depending on the appropriate targets. +# is_test_apk - Set to 1 if building a test apk. This prevents resources from +# dependencies from being re-included. +# native_lib_target - The target_name of the target which generates the final +# shared library to be included in this apk. A stripped copy of the +# library will be included in the apk. +# resource_dir - The directory for resources. +# shared_resources - Make a resource package that can be loaded by a different +# application at runtime to access the package's resources. +# app_as_shared_library - Make a resource package that can be loaded as shared +# library. +# R_package - A custom Java package to generate the resource file R.java in. +# By default, the package given in AndroidManifest.xml will be used. +# include_all_resources - Set to 1 to include all resource IDs in all generated +# R.java files. +# use_chromium_linker - Enable the content dynamic linker that allows sharing the +# RELRO section of the native libraries between the different processes. +# load_library_from_zip - When using the dynamic linker, load the library +# directly out of the zip file. +# use_relocation_packer - Enable relocation packing. Relies on the chromium +# linker, so use_chromium_linker must also be enabled. +# enable_chromium_linker_tests - Enable the content dynamic linker test support +# code. This allows a test APK to inject a Linker.TestRunner instance at +# runtime. Should only be used by the chromium_linker_test_apk target!! +# never_lint - Set to 1 to not run lint on this target. +# java_in_dir_suffix - To override the /src suffix on java_in_dir. +# app_manifest_version_name - set the apps 'human readable' version number. +# app_manifest_version_code - set the apps version number. +# dependencies_locale_zip_alternative_paths - a list of paths that used to +# replace dependencies_locale_zip_paths of all_dependent_settings. +{ + 'variables': { + 'tested_apk_obfuscated_jar_path%': '/', + 'tested_apk_dex_path%': '/', + 'tested_apk_is_multidex%': 0, + 'tested_apk_generated_multidex_config%': 0, + 'additional_input_paths': [], + 'additional_locale_input_paths': [], + 'create_density_splits%': 0, + 'language_splits': [], + 'library_jars_paths': [], + 'input_jars_paths': [], + 'library_dexed_jars_paths': [], + 'additional_src_dirs': [], + 'generated_src_dirs': [], + 'app_manifest_version_name%': '<(android_app_version_name)', + 'app_manifest_version_code%': '<(android_app_version_code)', + # aapt generates this proguard.txt. + 'generated_proguard_file': '<(intermediate_dir)/proguard.txt', + 'proguard_enabled%': 'false', + 'debug_build_proguard_enabled%': 'false', + 'proguard_flags_paths': ['<(generated_proguard_file)'], + 'jar_name': 'chromium_apk_<(_target_name).jar', + 'resource_dir%':'<(DEPTH)/build/android/ant/empty/res', + 'R_package%':'', + 'include_all_resources%': 0, + 'additional_R_text_files': [], + 'dependencies_locale_zip_alternative_paths%': [], + 'dependencies_locale_zip_paths': [], + 'dependencies_res_zip_paths': [], + 'additional_res_packages': [], + 'additional_bundled_libs%': [], + 'is_test_apk%': 0, + # Allow icu data, v8 snapshots, and pak files to be loaded directly from the .apk. + # Note: These are actually suffix matches, not necessarily extensions. + 'extensions_to_not_compress%': '.dat,.bin,.pak', + 'resource_input_paths': [], + 'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)', + 'asset_location%': '<(intermediate_dir)/assets', + 'codegen_stamp': '<(intermediate_dir)/codegen.stamp', + 'package_input_paths': [], + 'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json', + 'additional_ordered_libraries_file': '<(intermediate_dir)/additional_native_libraries.json', + 'native_libraries_template': '<(DEPTH)/base/android/java/templates/NativeLibraries.template', + 'native_libraries_java_dir': '<(intermediate_dir)/native_libraries_java/', + 'native_libraries_java_file': '<(native_libraries_java_dir)/NativeLibraries.java', + 'native_libraries_java_stamp': '<(intermediate_dir)/native_libraries_java.stamp', + 'native_libraries_template_data_dir': '<(intermediate_dir)/native_libraries/', + 'native_libraries_template_data_file': '<(native_libraries_template_data_dir)/native_libraries_array.h', + 'native_libraries_template_version_file': '<(native_libraries_template_data_dir)/native_libraries_version.h', + 'generate_build_config%': 0, + 'build_config_template': '<(DEPTH)/base/android/java/templates/BuildConfig.template', + 'build_config_java_dir': '<(intermediate_dir)/build_config/', + 'build_config_java_file': '<(build_config_java_dir)/BuildConfig.java', + 'build_config_java_stamp': '<(intermediate_dir)/build_config_java.stamp', + 'compile_stamp': '<(intermediate_dir)/compile.stamp', + 'lint_stamp': '<(intermediate_dir)/lint.stamp', + 'lint_result': '<(intermediate_dir)/lint_result.xml', + 'lint_config': '<(intermediate_dir)/lint_config.xml', + 'never_lint%': 0, + 'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp', + 'run_findbugs%': 0, + 'java_in_dir_suffix%': '/src', + 'emma_instr_stamp': '<(intermediate_dir)/emma_instr.stamp', + 'jar_stamp': '<(intermediate_dir)/jar.stamp', + 'obfuscate_stamp': '<(intermediate_dir)/obfuscate.stamp', + 'pack_relocations_stamp': '<(intermediate_dir)/pack_relocations.stamp', + 'strip_stamp': '<(intermediate_dir)/strip.stamp', + 'stripped_libraries_dir': '<(intermediate_dir)/stripped_libraries', + 'strip_additional_stamp': '<(intermediate_dir)/strip_additional.stamp', + 'version_stamp': '<(intermediate_dir)/version.stamp', + 'javac_includes': [], + 'jar_excluded_classes': [], + 'javac_jar_path': '<(intermediate_dir)/<(_target_name).javac.jar', + 'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)', + 'obfuscated_jar_path': '<(intermediate_dir)/obfuscated.jar', + 'test_jar_path': '<(PRODUCT_DIR)/test.lib.java/<(apk_name).jar', + 'enable_multidex%': 0, + 'enable_multidex_configurations%': [], + 'multidex_configuration_path': '<(intermediate_dir)/multidex_config.json', + 'main_dex_list_path': '<(intermediate_dir)/main_dex_list.txt', + 'emma_device_jar': '<(android_sdk_root)/tools/lib/emma_device.jar', + 'android_manifest_path%': '<(java_in_dir)/AndroidManifest.xml', + 'split_android_manifest_path': '<(intermediate_dir)/split-manifests/<(android_app_abi)/AndroidManifest.xml', + 'push_stamp': '<(intermediate_dir)/push.stamp', + 'link_stamp': '<(intermediate_dir)/link.stamp', + 'resource_zip_path': '<(intermediate_dir)/<(_target_name).resources.zip', + 'shared_resources%': 0, + 'app_as_shared_library%': 0, + 'final_apk_path_no_extension%': '<(PRODUCT_DIR)/apks/<(apk_name)', + 'final_abi_split_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name)-abi-<(android_app_abi).apk', + 'incomplete_apk_path': '<(intermediate_dir)/<(apk_name)-incomplete.apk', + 'apk_install_record': '<(intermediate_dir)/apk_install.record.stamp', + 'device_intermediate_dir': '/data/data/org.chromium.gyp_managed_install/<(_target_name)/<(CONFIGURATION_NAME)', + 'symlink_script_host_path': '<(intermediate_dir)/create_symlinks.sh', + 'symlink_script_device_path': '<(device_intermediate_dir)/create_symlinks.sh', + 'create_standalone_apk%': 1, + 'res_v14_skip%': 0, + 'variables': { + 'variables': { + 'native_lib_target%': '', + 'native_lib_version_name%': '', + 'use_chromium_linker%' : 0, + 'use_relocation_packer%' : 0, + 'enable_chromium_linker_tests%': 0, + 'is_test_apk%': 0, + 'unsigned_apk_path': '<(intermediate_dir)/<(apk_name)-unsigned.apk', + 'unsigned_abi_split_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-unsigned.apk', + 'create_abi_split%': 0, + 'enable_multidex%': 0, + }, + 'unsigned_apk_path': '<(unsigned_apk_path)', + 'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)', + 'create_abi_split%': '<(create_abi_split)', + 'final_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name).apk', + 'conditions': [ + ['gyp_managed_install == 1 and native_lib_target != ""', { + 'conditions': [ + ['create_abi_split == 0', { + 'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-standalone-unsigned.apk', + }, { + 'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-standalone-unsigned.apk', + }], + ], + }, { + 'unsigned_standalone_apk_path': '<(unsigned_apk_path)', + }], + ['gyp_managed_install == 1', { + 'apk_package_native_libs_dir': '<(intermediate_dir)/libs.managed', + }, { + 'apk_package_native_libs_dir': '<(intermediate_dir)/libs', + }], + ['is_test_apk == 0 and emma_coverage != 0', { + 'emma_instrument%': 1, + },{ + 'emma_instrument%': 0, + }], + # When using abi splits, the abi split is modified by + # gyp_managed_install rather than the main .apk + ['create_abi_split == 1', { + 'managed_input_apk_path': '<(unsigned_abi_split_apk_path)', + }, { + 'managed_input_apk_path': '<(unsigned_apk_path)', + }], + ['enable_multidex == 1', { + 'dex_path': '<(intermediate_dir)/classes.dex.zip', + }, { + 'dex_path': '<(intermediate_dir)/classes.dex', + }], + ], + }, + 'native_lib_target%': '', + 'native_lib_version_name%': '', + 'use_chromium_linker%' : 0, + 'load_library_from_zip%' : 0, + 'use_relocation_packer%' : 0, + 'enable_chromium_linker_tests%': 0, + 'emma_instrument%': '<(emma_instrument)', + 'apk_package_native_libs_dir': '<(apk_package_native_libs_dir)', + 'unsigned_standalone_apk_path': '<(unsigned_standalone_apk_path)', + 'unsigned_apk_path': '<(unsigned_apk_path)', + 'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)', + 'create_abi_split%': '<(create_abi_split)', + 'managed_input_apk_path': '<(managed_input_apk_path)', + 'libchromium_android_linker': 'libchromium_android_linker.>(android_product_extension)', + 'extra_native_libs': [], + 'native_lib_placeholder_stamp': '<(apk_package_native_libs_dir)/<(android_app_abi)/native_lib_placeholder.stamp', + 'native_lib_placeholders': [], + 'main_apk_name': '<(apk_name)', + 'dex_path': '<(dex_path)', + 'conditions': [ + ['chromium_code == 0', { + 'enable_errorprone': 0, + }], + ], + 'enable_errorprone%': 0, + 'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone', + 'final_apk_path%': '<(final_apk_path)', + }, + # Pass the jar path to the apk's "fake" jar target. This would be better as + # direct_dependent_settings, but a variable set by a direct_dependent_settings + # cannot be lifted in a dependent to all_dependent_settings. + 'all_dependent_settings': { + 'conditions': [ + ['proguard_enabled == "true"', { + 'variables': { + 'proguard_enabled': 'true', + } + }], + ['debug_build_proguard_enabled == "true"', { + 'variables': { + 'debug_build_proguard_enabled': 'true', + } + }], + ['is_test_apk == 0', { + 'variables': { + 'tested_apk_path': '<(final_apk_path)', + 'tested_apk_obfuscated_jar_path': '<(obfuscated_jar_path)', + 'tested_apk_dex_path': '<(dex_path)', + 'tested_apk_is_multidex': '<(enable_multidex)', + 'tested_apk_generated_multidex_config': '>(generate_build_config)', + } + }] + ], + 'variables': { + 'apk_output_jar_path': '<(jar_path)', + }, + }, + 'conditions': [ + ['resource_dir!=""', { + 'variables': { + 'resource_input_paths': [ '(android_product_extension)', + '<@(chromium_linker_path)' + ], + 'package_input_paths': [ + '<(apk_package_native_libs_dir)/<(android_app_abi)/gdbserver', + ], + }, + 'copies': [ + { + # gdbserver is always copied into the APK's native libs dir. The ant + # build scripts (apkbuilder task) will only include it in a debug + # build. + 'destination': '<(apk_package_native_libs_dir)/<(android_app_abi)', + 'files': [ + '<(android_gdbserver)', + ], + }, + ], + 'actions': [ + { + 'variables': { + 'input_libraries': [ + '<@(native_libs_paths)', + '<@(extra_native_libs)', + ], + }, + 'includes': ['../build/android/write_ordered_libraries.gypi'], + }, + { + 'action_name': 'native_libraries_<(_target_name)', + 'variables': { + 'conditions': [ + ['use_chromium_linker == 1', { + 'variables': { + 'linker_gcc_preprocess_defines': [ + '--defines', 'ENABLE_CHROMIUM_LINKER', + ], + } + }, { + 'variables': { + 'linker_gcc_preprocess_defines': [], + }, + }], + ['load_library_from_zip == 1', { + 'variables': { + 'linker_load_from_zip_file_preprocess_defines': [ + '--defines', 'ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE', + ], + } + }, { + 'variables': { + 'linker_load_from_zip_file_preprocess_defines': [], + }, + }], + ['enable_chromium_linker_tests == 1', { + 'variables': { + 'linker_tests_gcc_preprocess_defines': [ + '--defines', 'ENABLE_CHROMIUM_LINKER_TESTS', + ], + } + }, { + 'variables': { + 'linker_tests_gcc_preprocess_defines': [], + }, + }], + ], + 'gcc_preprocess_defines': [ + '<@(linker_load_from_zip_file_preprocess_defines)', + '<@(linker_gcc_preprocess_defines)', + '<@(linker_tests_gcc_preprocess_defines)', + ], + }, + 'message': 'Creating NativeLibraries.java for <(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '<(ordered_libraries_file)', + '<(native_libraries_template)', + ], + 'outputs': [ + '<(native_libraries_java_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '--include-path=', + '--output=<(native_libraries_java_file)', + '--template=<(native_libraries_template)', + '--stamp=<(native_libraries_java_stamp)', + '--defines', 'NATIVE_LIBRARIES_LIST=@FileArg(<(ordered_libraries_file):java_libraries_list)', + '--defines', 'NATIVE_LIBRARIES_VERSION_NUMBER="<(native_lib_version_name)"', + '<@(gcc_preprocess_defines)', + ], + }, + { + 'action_name': 'strip_native_libraries', + 'variables': { + 'ordered_libraries_file%': '<(ordered_libraries_file)', + 'stripped_libraries_dir%': '<(stripped_libraries_dir)', + 'input_paths': [ + '<@(native_libs_paths)', + '<@(extra_native_libs)', + ], + 'stamp': '<(strip_stamp)' + }, + 'includes': ['../build/android/strip_native_libraries.gypi'], + }, + { + 'action_name': 'insert_chromium_version', + 'variables': { + 'ordered_libraries_file%': '<(ordered_libraries_file)', + 'stripped_libraries_dir%': '<(stripped_libraries_dir)', + 'version_string': '<(native_lib_version_name)', + 'input_paths': [ + '<(strip_stamp)', + ], + 'stamp': '<(version_stamp)' + }, + 'includes': ['../build/android/insert_chromium_version.gypi'], + }, + { + 'action_name': 'pack_relocations', + 'variables': { + 'conditions': [ + ['use_chromium_linker == 1 and use_relocation_packer == 1 and profiling != 1', { + 'enable_packing': 1, + }, { + 'enable_packing': 0, + }], + ], + 'exclude_packing_list': [ + '<(libchromium_android_linker)', + ], + 'ordered_libraries_file%': '<(ordered_libraries_file)', + 'stripped_libraries_dir%': '<(stripped_libraries_dir)', + 'packed_libraries_dir': '<(libraries_source_dir)', + 'input_paths': [ + '<(version_stamp)' + ], + 'stamp': '<(pack_relocations_stamp)', + }, + 'includes': ['../build/android/pack_relocations.gypi'], + }, + { + 'variables': { + 'input_libraries': [ + '<@(additional_bundled_libs)', + ], + 'ordered_libraries_file': '<(additional_ordered_libraries_file)', + 'subtarget': '_additional_libraries', + }, + 'includes': ['../build/android/write_ordered_libraries.gypi'], + }, + { + 'action_name': 'strip_additional_libraries', + 'variables': { + 'ordered_libraries_file': '<(additional_ordered_libraries_file)', + 'stripped_libraries_dir': '<(libraries_source_dir)', + 'input_paths': [ + '<@(additional_bundled_libs)', + '<(strip_stamp)', + ], + 'stamp': '<(strip_additional_stamp)' + }, + 'includes': ['../build/android/strip_native_libraries.gypi'], + }, + { + 'action_name': 'Create native lib placeholder files for previous releases', + 'variables': { + 'placeholders': ['<@(native_lib_placeholders)'], + 'conditions': [ + ['gyp_managed_install == 1', { + # This "library" just needs to be put in the .apk. It is not loaded + # at runtime. + 'placeholders': ['libfix.crbug.384638.so'], + }] + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/create_placeholder_files.py', + ], + 'outputs': [ + '<(native_lib_placeholder_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_placeholder_files.py', + '--dest-lib-dir=<(apk_package_native_libs_dir)/<(android_app_abi)/', + '--stamp=<(native_lib_placeholder_stamp)', + '<@(placeholders)', + ], + }, + ], + 'conditions': [ + ['gyp_managed_install == 1', { + 'variables': { + 'libraries_top_dir': '<(intermediate_dir)/lib.stripped', + 'libraries_source_dir': '<(libraries_top_dir)/lib/<(android_app_abi)', + 'device_library_dir': '<(device_intermediate_dir)/lib.stripped', + }, + 'dependencies': [ + '<(DEPTH)/build/android/setup.gyp:get_build_device_configurations', + '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands', + ], + 'actions': [ + { + 'includes': ['../build/android/push_libraries.gypi'], + }, + { + 'action_name': 'create device library symlinks', + 'message': 'Creating links on device for <(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/create_device_library_links.py', + '<(apk_install_record)', + '<(build_device_config_path)', + '<(ordered_libraries_file)', + ], + 'outputs': [ + '<(link_stamp)' + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_device_library_links.py', + '--build-device-configuration=<(build_device_config_path)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--script-host-path=<(symlink_script_host_path)', + '--script-device-path=<(symlink_script_device_path)', + '--target-dir=<(device_library_dir)', + '--apk=<(incomplete_apk_path)', + '--stamp=<(link_stamp)', + '--configuration-name=<(CONFIGURATION_NAME)', + '--output-directory=<(PRODUCT_DIR)', + ], + }, + ], + 'conditions': [ + ['create_standalone_apk == 1', { + 'actions': [ + { + 'action_name': 'create standalone APK', + 'variables': { + 'inputs': [ + '<(ordered_libraries_file)', + '<(strip_additional_stamp)', + '<(pack_relocations_stamp)', + ], + 'output_apk_path': '<(unsigned_standalone_apk_path)', + 'libraries_top_dir%': '<(libraries_top_dir)', + 'input_apk_path': '<(managed_input_apk_path)', + }, + 'includes': [ 'android/create_standalone_apk_action.gypi' ], + }, + ], + }], + ], + }, { + # gyp_managed_install != 1 + 'variables': { + 'libraries_source_dir': '<(apk_package_native_libs_dir)/<(android_app_abi)', + 'package_input_paths': [ + '<(strip_additional_stamp)', + '<(pack_relocations_stamp)', + ], + }, + }], + ], + }], # native_lib_target != '' + ['gyp_managed_install == 0 or create_standalone_apk == 1 or create_abi_split == 1', { + 'dependencies': [ + '<(DEPTH)/build/android/rezip.gyp:rezip_apk_jar', + ], + }], + ['create_abi_split == 1 or gyp_managed_install == 0 or create_standalone_apk == 1', { + 'actions': [ + { + 'action_name': 'finalize_base', + 'variables': { + 'output_apk_path': '<(final_apk_path)', + 'conditions': [ + ['create_abi_split == 0', { + 'input_apk_path': '<(unsigned_standalone_apk_path)', + }, { + 'input_apk_path': '<(unsigned_apk_path)', + 'load_library_from_zip': 0, + }] + ], + }, + 'includes': [ 'android/finalize_apk_action.gypi'] + }, + ], + }], + ['create_abi_split == 1', { + 'actions': [ + { + 'action_name': 'generate_split_manifest_<(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/generate_split_manifest.py', + '<(android_manifest_path)', + ], + 'outputs': [ + '<(split_android_manifest_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/generate_split_manifest.py', + '--main-manifest', '<(android_manifest_path)', + '--out-manifest', '<(split_android_manifest_path)', + '--split', 'abi_<(android_app_abi)', + ], + }, + { + 'variables': { + 'apk_name': '<(main_apk_name)-abi-<(android_app_abi)', + 'asset_location': '', + 'android_manifest_path': '<(split_android_manifest_path)', + 'create_density_splits': 0, + 'language_splits=': [], + }, + 'includes': [ 'android/package_resources_action.gypi' ], + }, + { + 'variables': { + 'apk_name': '<(main_apk_name)-abi-<(android_app_abi)', + 'apk_path': '<(unsigned_abi_split_apk_path)', + 'has_code': 0, + 'native_libs_dir': '<(apk_package_native_libs_dir)', + 'extra_inputs': ['<(native_lib_placeholder_stamp)'], + }, + 'includes': ['android/apkbuilder_action.gypi'], + }, + ], + }], + ['create_abi_split == 1 and (gyp_managed_install == 0 or create_standalone_apk == 1)', { + 'actions': [ + { + 'action_name': 'finalize_split', + 'variables': { + 'output_apk_path': '<(final_abi_split_apk_path)', + 'conditions': [ + ['gyp_managed_install == 1', { + 'input_apk_path': '<(unsigned_standalone_apk_path)', + }, { + 'input_apk_path': '<(unsigned_abi_split_apk_path)', + }], + ], + }, + 'includes': [ 'android/finalize_apk_action.gypi'] + }, + ], + }], + ['gyp_managed_install == 1', { + 'actions': [ + { + 'action_name': 'finalize incomplete apk', + 'variables': { + 'load_library_from_zip': 0, + 'input_apk_path': '<(managed_input_apk_path)', + 'output_apk_path': '<(incomplete_apk_path)', + }, + 'includes': [ 'android/finalize_apk_action.gypi'] + }, + { + 'action_name': 'apk_install_<(_target_name)', + 'message': 'Installing <(apk_name).apk', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/apk_install.py', + '<(build_device_config_path)', + '<(incomplete_apk_path)', + ], + 'outputs': [ + '<(apk_install_record)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/apk_install.py', + '--build-device-configuration=<(build_device_config_path)', + '--install-record=<(apk_install_record)', + '--configuration-name=<(CONFIGURATION_NAME)', + '--android-sdk-tools', '<(android_sdk_tools)', + '--output-directory', '<(PRODUCT_DIR)', + ], + 'conditions': [ + ['create_abi_split == 1', { + 'inputs': [ + '<(final_apk_path)', + ], + 'action': [ + '--apk-path=<(final_apk_path)', + '--split-apk-path=<(incomplete_apk_path)', + ], + }, { + 'action': [ + '--apk-path=<(incomplete_apk_path)', + ], + }], + ['create_density_splits == 1', { + 'inputs': [ + '<(final_apk_path_no_extension)-density-hdpi.apk', + '<(final_apk_path_no_extension)-density-xhdpi.apk', + '<(final_apk_path_no_extension)-density-xxhdpi.apk', + '<(final_apk_path_no_extension)-density-xxxhdpi.apk', + '<(final_apk_path_no_extension)-density-tvdpi.apk', + ], + 'action': [ + '--split-apk-path=<(final_apk_path_no_extension)-density-hdpi.apk', + '--split-apk-path=<(final_apk_path_no_extension)-density-xhdpi.apk', + '--split-apk-path=<(final_apk_path_no_extension)-density-xxhdpi.apk', + '--split-apk-path=<(final_apk_path_no_extension)-density-xxxhdpi.apk', + '--split-apk-path=<(final_apk_path_no_extension)-density-tvdpi.apk', + ], + }], + ['language_splits != []', { + 'inputs': [ + "@(input_jars_paths)', + '<(jar_path)', + '<(compile_stamp)', + ], + 'outputs': [ + '<(findbugs_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/findbugs_diff.py', + '--auxclasspath-gyp', '>(input_jars_paths)', + '--stamp', '<(findbugs_stamp)', + '<@(additional_findbugs_args)', + '<(jar_path)', + ], + }, + ], + }], + ], + 'target_conditions': [ + ['generate_build_config == 1 and tested_apk_generated_multidex_config == 0', { + 'variables': { + 'generated_src_dirs': ['<(build_config_java_dir)'], + }, + 'actions': [ + { + 'action_name': 'configure_multidex_for_<(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/configure_multidex.py', + '<(build_config_template)', + ], + 'outputs': [ + '<(multidex_configuration_path)', + '<(build_config_java_stamp)', + ], + 'variables': { + 'additional_multidex_config_options': [], + 'enabled_configurations': '>(enable_multidex_configurations)', + 'conditions': [ + ['enable_multidex == 1', { + 'additional_multidex_config_options': ['--enable-multidex'], + }], + ], + }, + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/configure_multidex.py', + '--configuration-name', '<(CONFIGURATION_NAME)', + '--enabled-configurations', '<(enabled_configurations)', + '--multidex-configuration-path', '<(multidex_configuration_path)', + '--multidex-config-java-template', '<(build_config_template)', + '--multidex-config-java-file', '<(build_config_java_file)', + '--multidex-config-java-stamp', '<(build_config_java_stamp)', + '>@(additional_multidex_config_options)', + ], + }, + ], + 'conditions': [ + ['enable_multidex == 1', { + 'actions': [ + { + 'action_name': 'main_dex_list_for_<(_target_name)', + 'variables': { + 'jar_paths': ['>@(input_jars_paths)', '<(javac_jar_path)'], + 'output_path': '<(main_dex_list_path)', + }, + 'includes': [ 'android/main_dex_action.gypi' ], + }, + ] + }] + ], + }], + ], + 'dependencies': [ + '<(DEPTH)/build/android/android_lint_cache.gyp:android_lint_cache', + '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum', + ], + 'actions': [ + { + 'action_name': 'process_resources', + 'message': 'processing resources for <(_target_name)', + 'variables': { + 'local_additional_input_paths': [ + '>@(additional_input_paths)', + ], + 'local_dependencies_res_zip_paths': [ + '>@(dependencies_res_zip_paths)' + ], + # Write the inputs list to a file, so that its mtime is updated when + # the list of inputs changes. + 'inputs_list_file': '>|(apk_codegen.<(_target_name).gypcmd >@(local_additional_input_paths) >@(resource_input_paths))', + + 'process_resources_options': [], + 'conditions': [ + ['dependencies_locale_zip_alternative_paths == []', { + 'local_dependencies_res_zip_paths': ['>@(dependencies_locale_zip_paths)'], + 'local_additional_input_paths': ['>@(additional_locale_input_paths)'] + }, { + 'local_dependencies_res_zip_paths': ['<@(dependencies_locale_zip_alternative_paths)'], + 'local_additional_input_paths': ['>@(dependencies_locale_zip_alternative_paths)'], + }], + ['is_test_apk == 1', { + 'dependencies_locale_zip_paths=': [], + 'dependencies_res_zip_paths=': [], + 'additional_res_packages=': [], + }], + ['res_v14_skip == 1', { + 'process_resources_options+': ['--v14-skip'] + }], + ['shared_resources == 1', { + 'process_resources_options+': ['--shared-resources'] + }], + ['app_as_shared_library == 1', { + 'process_resources_options+': ['--app-as-shared-lib'] + }], + ['R_package != ""', { + 'process_resources_options+': ['--custom-package', '<(R_package)'] + }], + ['include_all_resources == 1', { + 'process_resources_options+': ['--include-all-resources'] + }] + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/process_resources.py', + '<(android_manifest_path)', + '>@(local_additional_input_paths)', + '>@(resource_input_paths)', + '>@(local_dependencies_res_zip_paths)', + '>(inputs_list_file)', + ], + 'outputs': [ + '<(resource_zip_path)', + '<(generated_proguard_file)', + '<(codegen_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/process_resources.py', + '--android-sdk-jar', '<(android_sdk_jar)', + '--aapt-path', '<(android_aapt_path)', + + '--android-manifest', '<(android_manifest_path)', + '--dependencies-res-zips', '>(local_dependencies_res_zip_paths)', + + '--extra-res-packages', '>(additional_res_packages)', + '--extra-r-text-files', '>(additional_R_text_files)', + + '--proguard-file', '<(generated_proguard_file)', + + '--resource-dirs', '<(resource_dir)', + '--resource-zip-out', '<(resource_zip_path)', + + '--R-dir', '<(intermediate_dir)/gen', + + '--stamp', '<(codegen_stamp)', + + '<@(process_resources_options)', + ], + }, + { + 'action_name': 'javac_<(_target_name)', + 'message': 'Compiling java for <(_target_name)', + 'variables': { + 'extra_args': [], + 'extra_inputs': [], + 'gen_src_dirs': [ + '<(intermediate_dir)/gen', + '>@(generated_src_dirs)', + ], + # If there is a separate find for additional_src_dirs, it will find the + # wrong .java files when additional_src_dirs is empty. + # TODO(thakis): Gyp caches >! evaluation by command. Both java.gypi and + # java_apk.gypi evaluate the same command, and at the moment two targets + # set java_in_dir to "java". Add a dummy comment here to make sure + # that the two targets (one uses java.gypi, the other java_apk.gypi) + # get distinct source lists. Medium-term, make targets list all their + # Java files instead of using find. (As is, this will be broken if two + # targets use the same java_in_dir and both use java_apk.gypi or + # both use java.gypi.) + 'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java" # apk)'], + 'conditions': [ + ['enable_errorprone == 1', { + 'extra_inputs': [ + '<(errorprone_exe_path)', + ], + 'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ], + }], + ], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/javac.py', + '>@(java_sources)', + '>@(input_jars_paths)', + '<(codegen_stamp)', + '<@(extra_inputs)', + ], + 'conditions': [ + ['native_lib_target != ""', { + 'inputs': [ '<(native_libraries_java_stamp)' ], + }], + ], + 'target_conditions': [ + ['generate_build_config == 1 and tested_apk_generated_multidex_config == 0', { + 'inputs': [ '<(build_config_java_stamp)' ], + }], + ], + 'outputs': [ + '<(compile_stamp)', + '<(javac_jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/javac.py', + '--bootclasspath=<(android_sdk_jar)', + '--classpath=>(input_jars_paths) <(android_sdk_jar) >(library_jars_paths)', + '--src-gendirs=>(gen_src_dirs)', + '--javac-includes=<(javac_includes)', + '--chromium-code=<(chromium_code)', + '--jar-path=<(javac_jar_path)', + '--jar-excluded-classes=<(jar_excluded_classes)', + '--stamp=<(compile_stamp)', + '<@(extra_args)', + '>@(java_sources)', + ], + }, + { + 'action_name': 'emma_instr_jar_<(_target_name)', + 'message': 'Instrumenting <(_target_name) jar', + 'variables': { + 'input_path': '<(javac_jar_path)', + 'output_path': '<(jar_path)', + 'coverage_file': '<(PRODUCT_DIR)/lib.java/<(_target_name).em', + 'sources_list_file': '<(PRODUCT_DIR)/lib.java/<(_target_name)_sources.txt', + 'stamp_path': '<(emma_instr_stamp)', + }, + 'outputs': [ + '<(emma_instr_stamp)', + '<(jar_path)', + ], + 'inputs': [ + '<(javac_jar_path)', + ], + 'includes': [ 'android/emma_instr_action.gypi' ], + }, + { + 'variables': { + 'src_dirs': [ + '<(java_in_dir)<(java_in_dir_suffix)', + '>@(additional_src_dirs)', + ], + 'lint_jar_path': '<(jar_path)', + 'stamp_path': '<(lint_stamp)', + 'result_path': '<(lint_result)', + 'config_path': '<(lint_config)', + }, + 'outputs': [ + '<(lint_stamp)', + ], + 'includes': [ 'android/lint_action.gypi' ], + }, + { + 'action_name': 'obfuscate_<(_target_name)', + 'message': 'Obfuscating <(_target_name)', + 'variables': { + 'additional_obfuscate_options': [], + 'additional_obfuscate_input_paths': [], + 'proguard_out_dir': '<(intermediate_dir)/proguard', + 'proguard_input_jar_paths': [ + '>@(input_jars_paths)', + '<(jar_path)', + ], + 'target_conditions': [ + ['is_test_apk == 1', { + 'additional_obfuscate_options': [ + '--testapp', + ], + }], + ['is_test_apk == 1 and tested_apk_obfuscated_jar_path != "/"', { + 'additional_obfuscate_options': [ + '--tested-apk-obfuscated-jar-path', '>(tested_apk_obfuscated_jar_path)', + ], + 'additional_obfuscate_input_paths': [ + '>(tested_apk_obfuscated_jar_path).info', + ], + }], + ['proguard_enabled == "true"', { + 'additional_obfuscate_options': [ + '--proguard-enabled', + ], + }], + ['debug_build_proguard_enabled == "true"', { + 'additional_obfuscate_options': [ + '--debug-build-proguard-enabled', + ], + }], + ], + 'obfuscate_input_jars_paths': [ + '>@(input_jars_paths)', + '<(jar_path)', + ], + }, + 'conditions': [ + ['is_test_apk == 1', { + 'outputs': [ + '<(test_jar_path)', + ], + }], + ['enable_multidex == 1', { + 'inputs': [ + '<(main_dex_list_path)', + '<(multidex_configuration_path)', + ], + 'variables': { + 'additional_obfuscate_options': [ + '--main-dex-list-path', '<(main_dex_list_path)', + '--multidex-configuration-path', '<(multidex_configuration_path)', + ], + }, + }], + ], + 'inputs': [ + '<(DEPTH)/build/android/gyp/apk_obfuscate.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '>@(proguard_flags_paths)', + '>@(obfuscate_input_jars_paths)', + '>@(additional_obfuscate_input_paths)', + '<(emma_instr_stamp)', + ], + 'outputs': [ + '<(obfuscate_stamp)', + + # In non-Release builds, these paths will all be empty files. + '<(obfuscated_jar_path)', + '<(obfuscated_jar_path).info', + '<(obfuscated_jar_path).dump', + '<(obfuscated_jar_path).seeds', + '<(obfuscated_jar_path).mapping', + '<(obfuscated_jar_path).usage', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/apk_obfuscate.py', + + '--configuration-name', '<(CONFIGURATION_NAME)', + + '--android-sdk', '<(android_sdk)', + '--android-sdk-tools', '<(android_sdk_tools)', + '--android-sdk-jar', '<(android_sdk_jar)', + + '--input-jars-paths=>(proguard_input_jar_paths)', + '--proguard-configs=>(proguard_flags_paths)', + + '--test-jar-path', '<(test_jar_path)', + '--obfuscated-jar-path', '<(obfuscated_jar_path)', + + '--proguard-jar-path', '<(DEPTH)/third_party/proguard/lib/proguard.jar', + + '--stamp', '<(obfuscate_stamp)', + + '>@(additional_obfuscate_options)', + ], + }, + { + 'action_name': 'dex_<(_target_name)', + 'variables': { + 'dex_additional_options': [], + 'dex_input_paths': [ + '<(jar_path)', + ], + 'output_path': '<(dex_path)', + 'proguard_enabled_input_path': '<(obfuscated_jar_path)', + }, + 'conditions': [ + ['enable_multidex == 1', { + 'inputs': [ + '<(main_dex_list_path)', + '<(multidex_configuration_path)', + ], + 'variables': { + 'dex_additional_options': [ + '--main-dex-list-path', '<(main_dex_list_path)', + '--multidex-configuration-path', '<(multidex_configuration_path)', + ], + }, + }], + ], + 'target_conditions': [ + ['enable_multidex == 1 or tested_apk_is_multidex == 1', { + 'variables': { + 'dex_input_paths': [ + '>@(input_jars_paths)', + ], + }, + }, { + 'variables': { + 'dex_input_paths': [ + '>@(library_dexed_jars_paths)', + ], + }, + }], + ['emma_instrument != 0', { + 'variables': { + 'dex_no_locals': 1, + 'dex_input_paths': [ + '<(emma_device_jar)' + ], + }, + }], + ['is_test_apk == 1 and tested_apk_dex_path != "/"', { + 'variables': { + 'dex_additional_options': [ + '--excluded-paths', '@FileArg(>(tested_apk_dex_path).inputs)' + ], + }, + 'inputs': [ + '>(tested_apk_dex_path).inputs', + ], + }], + ['proguard_enabled == "true" or debug_build_proguard_enabled == "true"', { + 'inputs': [ '<(obfuscate_stamp)' ] + }, { + 'inputs': [ '<(emma_instr_stamp)' ] + }], + ], + 'includes': [ 'android/dex_action.gypi' ], + }, + { + 'variables': { + 'local_dependencies_res_zip_paths': ['>@(dependencies_res_zip_paths)'], + 'extra_inputs': ['<(codegen_stamp)'], + 'resource_zips': [ + '<(resource_zip_path)', + ], + 'conditions': [ + ['dependencies_locale_zip_alternative_paths == []', { + 'local_dependencies_res_zip_paths': ['>@(dependencies_locale_zip_paths)'], + }, { + 'local_dependencies_res_zip_paths': ['<@(dependencies_locale_zip_alternative_paths)'], + }], + ['is_test_apk == 0', { + 'resource_zips': [ + '>@(local_dependencies_res_zip_paths)', + ], + }], + ], + }, + 'includes': [ 'android/package_resources_action.gypi' ], + }, + { + 'variables': { + 'apk_path': '<(unsigned_apk_path)', + 'conditions': [ + ['native_lib_target != ""', { + 'extra_inputs': ['<(native_lib_placeholder_stamp)'], + }], + ['create_abi_split == 0', { + 'native_libs_dir': '<(apk_package_native_libs_dir)', + }, { + 'native_libs_dir': '<(DEPTH)/build/android/ant/empty/res', + }], + ], + }, + 'includes': ['android/apkbuilder_action.gypi'], + }, + ], +} diff --git a/build/java_prebuilt.gypi b/build/java_prebuilt.gypi new file mode 100644 index 00000000000..cb654cbe82e --- /dev/null +++ b/build/java_prebuilt.gypi @@ -0,0 +1,101 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to package prebuilt Java JARs in a consistent manner. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my-package_java', +# 'type': 'none', +# 'variables': { +# 'jar_path': 'path/to/your.jar', +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# +# Required variables: +# jar_path - The path to the prebuilt Java JAR file. + +{ + 'dependencies': [ + '<(DEPTH)/build/android/setup.gyp:build_output_dirs' + ], + 'variables': { + 'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar', + 'android_jar': '<(android_sdk)/android.jar', + 'input_jars_paths': [ '<(android_jar)' ], + 'neverlink%': 0, + 'proguard_config%': '', + 'proguard_preprocess%': '0', + 'variables': { + 'variables': { + 'proguard_preprocess%': 0, + }, + 'conditions': [ + ['proguard_preprocess == 1', { + 'dex_input_jar_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).pre.jar', + }, { + 'dex_input_jar_path': '<(jar_path)', + }], + ], + }, + 'dex_input_jar_path': '<(dex_input_jar_path)', + }, + 'all_dependent_settings': { + 'variables': { + 'input_jars_paths': ['<(dex_input_jar_path)'], + 'conditions': [ + ['neverlink == 1', { + 'library_dexed_jars_paths': [], + }, { + 'library_dexed_jars_paths': ['<(dex_path)'], + }], + ], + }, + }, + 'conditions' : [ + ['proguard_preprocess == 1', { + 'actions': [ + { + 'action_name': 'proguard_<(_target_name)', + 'message': 'Proguard preprocessing <(_target_name) jar', + 'inputs': [ + '<(DEPTH)/third_party/proguard/lib/proguard.jar', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/proguard.py', + '<(jar_path)', + '<(proguard_config)', + ], + 'outputs': [ + '<(dex_input_jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/proguard.py', + '--proguard-path=<(DEPTH)/third_party/proguard/lib/proguard.jar', + '--input-path=<(jar_path)', + '--output-path=<(dex_input_jar_path)', + '--proguard-config=<(proguard_config)', + '--classpath=>(input_jars_paths)', + ] + }, + ], + }], + ['neverlink == 0', { + 'actions': [ + { + 'action_name': 'dex_<(_target_name)', + 'message': 'Dexing <(_target_name) jar', + 'variables': { + 'dex_input_paths': [ + '<(dex_input_jar_path)', + ], + 'output_path': '<(dex_path)', + }, + 'includes': [ 'android/dex_action.gypi' ], + }, + ], + }], + ], +} diff --git a/build/java_strings_grd.gypi b/build/java_strings_grd.gypi new file mode 100644 index 00000000000..7534be5beea --- /dev/null +++ b/build/java_strings_grd.gypi @@ -0,0 +1,62 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to generate localized strings.xml from a grd file. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my-package_strings_grd', +# 'type': 'none', +# 'variables': { +# 'grd_file': 'path/to/grd/file', +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# +# Required variables: +# grd_file - The path to the grd file to use. +{ + 'variables': { + 'res_grit_dir': '<(INTERMEDIATE_DIR)/<(_target_name)/res_grit', + 'grit_grd_file': '<(grd_file)', + 'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip', + 'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'], + 'grit_out_dir': '<(res_grit_dir)', + # resource_ids is unneeded since we don't generate .h headers. + 'grit_resource_ids': '', + 'grit_outputs': [ + ' %r' % (func.__name__, val[0]) + return val[0] + return inner + return memoizer + + +@memoize() +def IsWindows(): + return sys.platform in ['win32', 'cygwin'] + + +@memoize() +def IsLinux(): + return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd')) + + +@memoize() +def IsMac(): + return sys.platform == 'darwin' + + +@memoize() +def gyp_defines(): + """Parses and returns GYP_DEFINES env var as a dictionary.""" + return dict(arg.split('=', 1) + for arg in shlex.split(os.environ.get('GYP_DEFINES', ''))) + + +@memoize() +def gyp_generator_flags(): + """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary.""" + return dict(arg.split('=', 1) + for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))) + + +@memoize() +def gyp_msvs_version(): + return os.environ.get('GYP_MSVS_VERSION', '') + + +@memoize() +def distributor(): + """ + Returns a string which is the distributed build engine in use (if any). + Possible values: 'goma', None + """ + if 'goma' in gyp_defines(): + return 'goma' + + +@memoize() +def platform(): + """ + Returns a string representing the platform this build is targetted for. + Possible values: 'win', 'mac', 'linux', 'ios', 'android' + """ + if 'OS' in gyp_defines(): + if 'android' in gyp_defines()['OS']: + return 'android' + else: + return gyp_defines()['OS'] + elif IsWindows(): + return 'win' + elif IsLinux(): + return 'linux' + else: + return 'mac' diff --git a/build/landmines.py b/build/landmines.py new file mode 100644 index 00000000000..1b2f1cdbb08 --- /dev/null +++ b/build/landmines.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This script runs every build as the first hook (See DEPS). If it detects that +the build should be clobbered, it will delete the contents of the build +directory. + +A landmine is tripped when a builder checks out a different revision, and the +diff between the new landmines and the old ones is non-null. At this point, the +build is clobbered. +""" + +import difflib +import errno +import gyp_environment +import logging +import optparse +import os +import sys +import subprocess +import time + +import clobber +import landmine_utils + + +def get_build_dir(src_dir): + """ + Returns output directory absolute path dependent on build and targets. + Examples: + r'c:\b\build\slave\win\build\src\out' + '/mnt/data/b/build/slave/linux/build/src/out' + '/b/build/slave/ios_rel_device/build/src/out' + + Keep this function in sync with tools/build/scripts/slave/compile.py + """ + if 'CHROMIUM_OUT_DIR' in os.environ: + output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip() + if not output_dir: + raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!') + else: + output_dir = landmine_utils.gyp_generator_flags().get('output_dir', 'out') + return os.path.abspath(os.path.join(src_dir, output_dir)) + + +def clobber_if_necessary(new_landmines, src_dir): + """Does the work of setting, planting, and triggering landmines.""" + out_dir = get_build_dir(src_dir) + landmines_path = os.path.normpath(os.path.join(src_dir, '.landmines')) + try: + os.makedirs(out_dir) + except OSError as e: + if e.errno == errno.EEXIST: + pass + + if os.path.exists(landmines_path): + with open(landmines_path, 'r') as f: + old_landmines = f.readlines() + if old_landmines != new_landmines: + old_date = time.ctime(os.stat(landmines_path).st_ctime) + diff = difflib.unified_diff(old_landmines, new_landmines, + fromfile='old_landmines', tofile='new_landmines', + fromfiledate=old_date, tofiledate=time.ctime(), n=0) + sys.stdout.write('Clobbering due to:\n') + sys.stdout.writelines(diff) + sys.stdout.flush() + + clobber.clobber(out_dir) + + # Save current set of landmines for next time. + with open(landmines_path, 'w') as f: + f.writelines(new_landmines) + + +def process_options(): + """Returns an options object containing the configuration for this script.""" + parser = optparse.OptionParser() + parser.add_option( + '-s', '--landmine-scripts', action='append', + help='Path to the script which emits landmines to stdout. The target ' + 'is passed to this script via option -t. Note that an extra ' + 'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.') + parser.add_option('-d', '--src-dir', + help='Path of the source root dir. Overrides the default location of the ' + 'source root dir when calculating the build directory.') + parser.add_option('-v', '--verbose', action='store_true', + default=('LANDMINES_VERBOSE' in os.environ), + help=('Emit some extra debugging information (default off). This option ' + 'is also enabled by the presence of a LANDMINES_VERBOSE environment ' + 'variable.')) + + options, args = parser.parse_args() + + if args: + parser.error('Unknown arguments %s' % args) + + logging.basicConfig( + level=logging.DEBUG if options.verbose else logging.ERROR) + + if options.src_dir: + if not os.path.isdir(options.src_dir): + parser.error('Cannot find source root dir at %s' % options.src_dir) + logging.debug('Overriding source root dir. Using: %s', options.src_dir) + else: + options.src_dir = \ + os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + + if not options.landmine_scripts: + options.landmine_scripts = [os.path.join(options.src_dir, 'build', + 'get_landmines.py')] + + extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT') + if extra_script: + options.landmine_scripts += [extra_script] + + return options + + +def main(): + options = process_options() + + gyp_environment.SetEnvironment() + + landmines = [] + for s in options.landmine_scripts: + proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE) + output, _ = proc.communicate() + landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()]) + clobber_if_necessary(landmines, options.src_dir) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/linux/BUILD.gn b/build/linux/BUILD.gn new file mode 100644 index 00000000000..c81172e427f --- /dev/null +++ b/build/linux/BUILD.gn @@ -0,0 +1,74 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") + +# If brlapi isn't needed, don't require it to be installed. +if (use_brlapi) { + deps = [ + "//build/linux/libbrlapi", + ] +} +if (use_gio) { + pkg_config("gio_config") { + packages = [ "gio-2.0" ] + + # glib >=2.40 deprecate g_settings_list_schemas in favor of + # g_settings_schema_source_list_schemas. This function is not available on + # earlier versions that we still need to support (specifically, 2.32), so + # disable the warning with the GLIB_DISABLE_DEPRECATION_WARNINGS define. + # TODO(mgiuca): Remove this suppression when we drop support for Ubuntu + # 13.10 (saucy) and earlier. Update the code to use + # g_settings_schema_source_list_schemas instead. + defines = [ + "USE_GIO", + "GLIB_DISABLE_DEPRECATION_WARNINGS", + ] + + # TODO(brettw) Theoretically I think ignore_libs should be set so that we + # don't link directly to GIO and use the loader generated below. But the + # gio target in GYP doesn't make any sense to me and appears to link + # directly to GIO in addition to making a loader. This this uncommented, + # the link in component build fails, so I think this is closer to the + # GYP build. + #ignore_libs = true # Loader generated below. + } + + deps = [ + "//build/linux/libgio", + ] +} + +# Looking for libspeechd? Use //third_party/speech-dispatcher + +group("fontconfig") { + if (is_chromecast) { + # Chromecast platform does not provide fontconfig + public_deps = [ + "//third_party/fontconfig", + ] + } else { + public_configs = [ "//build/config/linux:fontconfig" ] + } +} + +if (!is_chromecast) { + pkg_config("freetype2_config") { + visibility = [ ":freetype2" ] + packages = [ "freetype2" ] + } +} + +group("freetype2") { + if (is_chromecast) { + # Chromecast platform doesn't provide freetype, so use Chromium's. + # The version in freetype-android is unmodified from freetype2 upstream. + public_deps = [ + "//third_party/freetype-android:freetype", + ] + } else { + public_configs = [ ":freetype2_config" ] + } +} diff --git a/build/linux/OWNERS b/build/linux/OWNERS new file mode 100644 index 00000000000..4a60b79d484 --- /dev/null +++ b/build/linux/OWNERS @@ -0,0 +1,3 @@ +mmoss@chromium.org +phajdan.jr@chromium.org +thestig@chromium.org diff --git a/build/linux/bin/eu-strip.sha1 b/build/linux/bin/eu-strip.sha1 new file mode 100644 index 00000000000..43f290a701a --- /dev/null +++ b/build/linux/bin/eu-strip.sha1 @@ -0,0 +1 @@ +0a9b8f68615ce388b65201e6d22da7a9cf2e729c \ No newline at end of file diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc new file mode 100644 index 00000000000..f4003060f69 --- /dev/null +++ b/build/linux/chrome_linux.croc @@ -0,0 +1,29 @@ +# -*- python -*- +# Crocodile config file for Chromium linux + +# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot +# for that platform. + +{ + # List of rules, applied in order + 'rules' : [ + # Specify inclusions before exclusions, since rules are in order. + + # Don't include non-Linux platform dirs + { + 'regexp' : '.*/(chromeos|views)/', + 'include' : 0, + }, + # Don't include chromeos, windows, or mac specific files + { + 'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)', + 'include' : 0, + }, + + # Groups + { + 'regexp' : '.*_test_linux\\.', + 'group' : 'test', + }, + ], +} diff --git a/build/linux/dump_app_syms.py b/build/linux/dump_app_syms.py new file mode 100644 index 00000000000..212c1bb6716 --- /dev/null +++ b/build/linux/dump_app_syms.py @@ -0,0 +1,32 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Helper script to run dump_syms on Chrome Linux executables and strip +# them if needed. + +import os +import subprocess +import sys + +if len(sys.argv) != 5: + print "dump_app_syms.py " + print " " + sys.exit(1) + +dumpsyms = sys.argv[1] +strip_binary = sys.argv[2] +infile = sys.argv[3] +outfile = sys.argv[4] + +# Dump only when the output file is out-of-date. +if not os.path.isfile(outfile) or \ + os.stat(outfile).st_mtime > os.stat(infile).st_mtime: + with open(outfile, 'w') as outfileobj: + subprocess.check_call([dumpsyms, '-r', infile], stdout=outfileobj) + +if strip_binary == '1': + strip_binary = 'strip' + +if strip_binary != '0': + subprocess.check_call([strip_binary, infile]) diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py new file mode 100644 index 00000000000..71712870b3b --- /dev/null +++ b/build/linux/install-chromeos-fonts.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Script to install the Chrome OS fonts on Linux. +# This script can be run manually (as root), but is also run as part +# install-build-deps.sh. + +import os +import shutil +import subprocess +import sys + +URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/' + 'distfiles/%(name)s-%(version)s.tar.bz2') + +# Taken from the media-fonts/ ebuilds in chromiumos-overlay. +SOURCES = [ + { + 'name': 'notofonts', + 'version': '20160310' + }, { + 'name': 'robotofonts', + 'version': '2.132' + } +] + +URLS = sorted([URL_TEMPLATE % d for d in SOURCES]) +FONTS_DIR = '/usr/local/share/fonts' + +def main(args): + if not sys.platform.startswith('linux'): + print "Error: %s must be run on Linux." % __file__ + return 1 + + if os.getuid() != 0: + print "Error: %s must be run as root." % __file__ + return 1 + + if not os.path.isdir(FONTS_DIR): + print "Error: Destination directory does not exist: %s" % FONTS_DIR + return 1 + + dest_dir = os.path.join(FONTS_DIR, 'chromeos') + + stamp = os.path.join(dest_dir, ".stamp02") + if os.path.exists(stamp): + with open(stamp) as s: + if s.read() == '\n'.join(URLS): + print "Chrome OS fonts already up to date in %s." % dest_dir + return 0 + + if os.path.isdir(dest_dir): + shutil.rmtree(dest_dir) + os.mkdir(dest_dir) + os.chmod(dest_dir, 0755) + + print "Installing Chrome OS fonts to %s." % dest_dir + for url in URLS: + tarball = os.path.join(dest_dir, os.path.basename(url)) + subprocess.check_call(['curl', '-L', url, '-o', tarball]) + subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions', + '-xf', tarball, '-C', dest_dir]) + os.remove(tarball) + + readme = os.path.join(dest_dir, "README") + with open(readme, 'w') as s: + s.write("This directory and its contents are auto-generated.\n") + s.write("It may be deleted and recreated. Do not modify.\n") + s.write("Script: %s\n" % __file__) + + with open(stamp, 'w') as s: + s.write('\n'.join(URLS)) + + for base, dirs, files in os.walk(dest_dir): + for dir in dirs: + os.chmod(os.path.join(base, dir), 0755) + for file in files: + os.chmod(os.path.join(base, file), 0644) + + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/linux/libbrlapi/BUILD.gn b/build/linux/libbrlapi/BUILD.gn new file mode 100644 index 00000000000..fa99540e620 --- /dev/null +++ b/build/linux/libbrlapi/BUILD.gn @@ -0,0 +1,32 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +config("brlapi_config") { + defines = [ "USE_BRLAPI" ] +} + +# TODO(GYP) linux_link_brlapi support. Is this needed? +generate_library_loader("libbrlapi") { + name = "LibBrlapiLoader" + output_h = "libbrlapi.h" + output_cc = "libbrlapi_loader.cc" + header = "" + config = ":brlapi_config" + + functions = [ + "brlapi_getHandleSize", + "brlapi_error_location", + "brlapi_strerror", + "brlapi__acceptKeys", + "brlapi__openConnection", + "brlapi__closeConnection", + "brlapi__getDisplaySize", + "brlapi__enterTtyModeWithPath", + "brlapi__leaveTtyMode", + "brlapi__writeDots", + "brlapi__readKey", + ] +} diff --git a/build/linux/libgio/BUILD.gn b/build/linux/libgio/BUILD.gn new file mode 100644 index 00000000000..3deb63dd6ca --- /dev/null +++ b/build/linux/libgio/BUILD.gn @@ -0,0 +1,26 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +# This generates a target named "libgio". +generate_library_loader("libgio") { + name = "LibGioLoader" + output_h = "libgio.h" + output_cc = "libgio_loader.cc" + header = "" + config = "//build/linux:gio_config" + + functions = [ + "glib_check_version", + "g_type_init", + "g_settings_new", + "g_settings_get_child", + "g_settings_get_string", + "g_settings_get_boolean", + "g_settings_get_int", + "g_settings_get_strv", + "g_settings_list_schemas", + ] +} diff --git a/build/linux/libpci/BUILD.gn b/build/linux/libpci/BUILD.gn new file mode 100644 index 00000000000..2d1e267e126 --- /dev/null +++ b/build/linux/libpci/BUILD.gn @@ -0,0 +1,22 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +# This generates a target named "libpci". +generate_library_loader("libpci") { + name = "LibPciLoader" + output_h = "libpci.h" + output_cc = "libpci_loader.cc" + header = "" + + functions = [ + "pci_alloc", + "pci_init", + "pci_cleanup", + "pci_scan_bus", + "pci_fill_info", + "pci_lookup_name", + ] +} diff --git a/build/linux/libudev/BUILD.gn b/build/linux/libudev/BUILD.gn new file mode 100644 index 00000000000..9486a03292d --- /dev/null +++ b/build/linux/libudev/BUILD.gn @@ -0,0 +1,65 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +libudev_functions = [ + "udev_device_get_action", + "udev_device_get_devnode", + "udev_device_get_parent", + "udev_device_get_parent_with_subsystem_devtype", + "udev_device_get_property_value", + "udev_device_get_subsystem", + "udev_device_get_sysattr_value", + "udev_device_get_sysname", + "udev_device_get_syspath", + "udev_device_new_from_devnum", + "udev_device_new_from_subsystem_sysname", + "udev_device_new_from_syspath", + "udev_device_unref", + "udev_enumerate_add_match_subsystem", + "udev_enumerate_get_list_entry", + "udev_enumerate_new", + "udev_enumerate_scan_devices", + "udev_enumerate_unref", + "udev_list_entry_get_next", + "udev_list_entry_get_name", + "udev_monitor_enable_receiving", + "udev_monitor_filter_add_match_subsystem_devtype", + "udev_monitor_get_fd", + "udev_monitor_new_from_netlink", + "udev_monitor_receive_device", + "udev_monitor_unref", + "udev_new", + "udev_set_log_fn", + "udev_set_log_priority", + "udev_unref", +] + +# This generates a target named "udev0_loader". +generate_library_loader("udev0_loader") { + name = "LibUdev0Loader" + output_h = "libudev0.h" + output_cc = "libudev0_loader.cc" + header = "\"third_party/libudev/libudev0.h\"" + + functions = libudev_functions +} + +# This generates a target named "udev1_loader". +generate_library_loader("udev1_loader") { + name = "LibUdev1Loader" + output_h = "libudev1.h" + output_cc = "libudev1_loader.cc" + header = "\"third_party/libudev/libudev1.h\"" + + functions = libudev_functions +} + +group("libudev") { + public_deps = [ + ":udev0_loader", + ":udev1_loader", + ] +} diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper new file mode 100644 index 00000000000..c4935d7b597 --- /dev/null +++ b/build/linux/pkg-config-wrapper @@ -0,0 +1,46 @@ +#!/bin/bash +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This program wraps around pkg-config to generate the correct include and +# library paths when cross-compiling using a sysroot. +# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig +# and usr/share/pkgconfig (relative to the sysroot) and that they output paths +# relative to some parent path of the sysroot. +# This assumption is valid for a range of sysroots, in particular: a +# LSB-compliant root filesystem mounted at the sysroot, and a board build +# directory of a Chromium OS chroot. + +set -o nounset +set -o errexit + +root="$1" +shift +target_arch="$1" +shift +libpath="$1" +shift + +if [ -z "$root" -o -z "$target_arch" ] +then + echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2 + exit 1 +fi + +rewrite=`dirname $0`/rewrite_dirs.py +package=${!#} + +libdir=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig + +set -e +# Some sysroots, like the Chromium OS ones, may generate paths that are not +# relative to the sysroot. For example, +# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths +# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of +# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). +# To support this correctly, it's necessary to extract the prefix to strip from +# pkg-config's |prefix| variable. +prefix=`PKG_CONFIG_LIBDIR=$libdir pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'` +result=`PKG_CONFIG_LIBDIR=$libdir pkg-config "$@"` +echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix" diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py new file mode 100644 index 00000000000..30f22f0cd61 --- /dev/null +++ b/build/linux/rewrite_dirs.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Rewrites paths in -I, -L and other option to be relative to a sysroot.""" + +import sys +import os +import optparse + +REWRITE_PREFIX = ['-I', + '-idirafter', + '-imacros', + '-imultilib', + '-include', + '-iprefix', + '-iquote', + '-isystem', + '-L'] + +def RewritePath(path, opts): + """Rewrites a path by stripping the prefix and prepending the sysroot.""" + sysroot = opts.sysroot + prefix = opts.strip_prefix + if os.path.isabs(path) and not path.startswith(sysroot): + if path.startswith(prefix): + path = path[len(prefix):] + path = path.lstrip('/') + return os.path.join(sysroot, path) + else: + return path + + +def RewriteLine(line, opts): + """Rewrites all the paths in recognized options.""" + args = line.split() + count = len(args) + i = 0 + while i < count: + for prefix in REWRITE_PREFIX: + # The option can be either in the form "-I /path/to/dir" or + # "-I/path/to/dir" so handle both. + if args[i] == prefix: + i += 1 + try: + args[i] = RewritePath(args[i], opts) + except IndexError: + sys.stderr.write('Missing argument following %s\n' % prefix) + break + elif args[i].startswith(prefix): + args[i] = prefix + RewritePath(args[i][len(prefix):], opts) + i += 1 + + return ' '.join(args) + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend') + parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip') + opts, args = parser.parse_args(argv[1:]) + + for line in sys.stdin.readlines(): + line = RewriteLine(line.strip(), opts) + print line + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh new file mode 100644 index 00000000000..4b8bf7305e2 --- /dev/null +++ b/build/linux/sysroot_ld_path.sh @@ -0,0 +1,100 @@ +#!/bin/sh +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the +# appropriate linker flags. +# +# sysroot_ld_path.sh /abspath/to/sysroot +# + +log_error_and_exit() { + echo $0: $@ + exit 1 +} + +process_entry() { + if [ -z "$1" ] || [ -z "$2" ]; then + log_error_and_exit "bad arguments to process_entry()" + fi + local root="$1" + local localpath="$2" + + echo $localpath | grep -qs '^/' + if [ $? -ne 0 ]; then + log_error_and_exit $localpath does not start with / + fi + local entry="$root$localpath" + echo -L$entry + echo -Wl,-rpath-link=$entry +} + +process_ld_so_conf() { + if [ -z "$1" ] || [ -z "$2" ]; then + log_error_and_exit "bad arguments to process_ld_so_conf()" + fi + local root="$1" + local ld_so_conf="$2" + + # ld.so.conf may include relative include paths. pushd is a bashism. + local saved_pwd=$(pwd) + cd $(dirname "$ld_so_conf") + + cat "$ld_so_conf" | \ + while read ENTRY; do + echo "$ENTRY" | grep -qs ^include + if [ $? -eq 0 ]; then + local included_files=$(echo "$ENTRY" | sed 's/^include //') + echo "$included_files" | grep -qs ^/ + if [ $? -eq 0 ]; then + if ls $root$included_files >/dev/null 2>&1 ; then + for inc_file in $root$included_files; do + process_ld_so_conf "$root" "$inc_file" + done + fi + else + if ls $(pwd)/$included_files >/dev/null 2>&1 ; then + for inc_file in $(pwd)/$included_files; do + process_ld_so_conf "$root" "$inc_file" + done + fi + fi + continue + fi + + echo "$ENTRY" | grep -qs ^/ + if [ $? -eq 0 ]; then + process_entry "$root" "$ENTRY" + fi + done + + # popd is a bashism + cd "$saved_pwd" +} + +# Main + +if [ $# -ne 1 ]; then + echo Usage $0 /abspath/to/sysroot + exit 1 +fi + +echo $1 | grep -qs ' ' +if [ $? -eq 0 ]; then + log_error_and_exit $1 contains whitespace. +fi + +LD_SO_CONF="$1/etc/ld.so.conf" +LD_SO_CONF_D="$1/etc/ld.so.conf.d" + +if [ -e "$LD_SO_CONF" ]; then + process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo +elif [ -e "$LD_SO_CONF_D" ]; then + find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null + if [ $? -eq 0 ]; then + for entry in $LD_SO_CONF_D/*.conf; do + process_ld_so_conf "$1" "$entry" + done | xargs echo + fi +fi diff --git a/build/linux/sysroot_scripts/debian-archive-jessie-stable.gpg b/build/linux/sysroot_scripts/debian-archive-jessie-stable.gpg new file mode 100644 index 00000000000..6a3696efc28 Binary files /dev/null and b/build/linux/sysroot_scripts/debian-archive-jessie-stable.gpg differ diff --git a/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg b/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg new file mode 100644 index 00000000000..d587901e440 Binary files /dev/null and b/build/linux/sysroot_scripts/debian-archive-wheezy-stable.gpg differ diff --git a/build/linux/sysroot_scripts/install-sysroot.py b/build/linux/sysroot_scripts/install-sysroot.py new file mode 100644 index 00000000000..9622176bf9b --- /dev/null +++ b/build/linux/sysroot_scripts/install-sysroot.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Install Debian sysroots for building chromium. +""" + +# The sysroot is needed to ensure that binaries will run on Debian Wheezy, +# the oldest supported linux distribution. For ARM64 linux, we have Debian +# Jessie sysroot as Jessie is the first version with ARM64 support. This script +# can be run manually but is more often run as part of gclient hooks. When run +# from hooks this script is a no-op on non-linux platforms. + +# The sysroot image could be constructed from scratch based on the current +# state or Debian Wheezy/Jessie but for consistency we currently use a +# pre-built root image. The image will normally need to be rebuilt every time +# chrome's build dependencies are changed. + +import hashlib +import platform +import optparse +import os +import re +import shutil +import subprocess +import sys + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.dirname(os.path.dirname(SCRIPT_DIR))) +import detect_host_arch +import gyp_chromium +import gyp_environment + + +URL_PREFIX = 'https://commondatastorage.googleapis.com' +URL_PATH = 'chrome-linux-sysroot/toolchain' +REVISION_AMD64 = '24f935a3d8cdfcdfbabd23928a42304b1ffc52ba' +REVISION_ARM = '24f935a3d8cdfcdfbabd23928a42304b1ffc52ba' +REVISION_ARM64 = '24f935a3d8cdfcdfbabd23928a42304b1ffc52ba' +REVISION_I386 = '24f935a3d8cdfcdfbabd23928a42304b1ffc52ba' +REVISION_MIPS = '24f935a3d8cdfcdfbabd23928a42304b1ffc52ba' +TARBALL_AMD64 = 'debian_wheezy_amd64_sysroot.tgz' +TARBALL_ARM = 'debian_wheezy_arm_sysroot.tgz' +TARBALL_ARM64 = 'debian_jessie_arm64_sysroot.tgz' +TARBALL_I386 = 'debian_wheezy_i386_sysroot.tgz' +TARBALL_MIPS = 'debian_wheezy_mips_sysroot.tgz' +TARBALL_AMD64_SHA1SUM = 'a7f3df28b02799fbd7675c2ab24f1924c104c0ee' +TARBALL_ARM_SHA1SUM = '2df01b8173a363977daf04e176b8c7dba5b0b933' +TARBALL_ARM64_SHA1SUM = 'df9270e00c258e6cd80f8172b1bfa39aafc4756f' +TARBALL_I386_SHA1SUM = 'e2c7131fa5f711de28c37fd9442e77d32abfb3ff' +TARBALL_MIPS_SHA1SUM = '22fe7b45b144691aeb515083025f0fceb131d724' +SYSROOT_DIR_AMD64 = 'debian_wheezy_amd64-sysroot' +SYSROOT_DIR_ARM = 'debian_wheezy_arm-sysroot' +SYSROOT_DIR_ARM64 = 'debian_jessie_arm64-sysroot' +SYSROOT_DIR_I386 = 'debian_wheezy_i386-sysroot' +SYSROOT_DIR_MIPS = 'debian_wheezy_mips-sysroot' + +valid_archs = ('arm', 'arm64', 'i386', 'amd64', 'mips') + + +class Error(Exception): + pass + + +def GetSha1(filename): + sha1 = hashlib.sha1() + with open(filename, 'rb') as f: + while True: + # Read in 1mb chunks, so it doesn't all have to be loaded into memory. + chunk = f.read(1024*1024) + if not chunk: + break + sha1.update(chunk) + return sha1.hexdigest() + + +def DetectHostArch(): + # Figure out host arch using build/detect_host_arch.py and + # set target_arch to host arch + detected_host_arch = detect_host_arch.HostArch() + if detected_host_arch == 'x64': + return 'amd64' + elif detected_host_arch == 'ia32': + return 'i386' + elif detected_host_arch == 'arm': + return 'arm' + elif detected_host_arch == 'mips': + return 'mips' + + raise Error('Unrecognized host arch: %s' % detected_host_arch) + + +def DetectTargetArch(): + """Attempt for determine target architecture. + + This works by looking for target_arch in GYP_DEFINES. + """ + # TODO(agrieve): Make this script not depend on GYP_DEFINES so that it works + # with GN as well. + gyp_environment.SetEnvironment() + supplemental_includes = gyp_chromium.GetSupplementalFiles() + gyp_defines = gyp_chromium.GetGypVars(supplemental_includes) + target_arch = gyp_defines.get('target_arch') + if target_arch == 'x64': + return 'amd64' + elif target_arch == 'ia32': + return 'i386' + elif target_arch == 'arm': + return 'arm' + elif target_arch == 'arm64': + return 'arm64' + elif target_arch == 'mipsel': + return 'mips' + + return None + + +def InstallDefaultSysroots(): + """Install the default set of sysroot images. + + This includes at least the sysroot for host architecture, and the 32-bit + sysroot for building the v8 snapshot image. It can also include the cross + compile sysroot for ARM/MIPS if cross compiling environment can be detected. + """ + host_arch = DetectHostArch() + InstallSysroot(host_arch) + + if host_arch == 'amd64': + InstallSysroot('i386') + + # Finally, if we can detect a non-standard target_arch such as ARM or + # MIPS, then install the sysroot too. + # Don't attampt to install arm64 since this is currently and android-only + # architecture. + target_arch = DetectTargetArch() + if target_arch and target_arch not in (host_arch, 'i386'): + InstallSysroot(target_arch) + + +def main(args): + parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__) + parser.add_option('--running-as-hook', action='store_true', + default=False, help='Used when running from gclient hooks.' + ' Installs default sysroot images.') + parser.add_option('--arch', type='choice', choices=valid_archs, + help='Sysroot architecture: %s' % ', '.join(valid_archs)) + options, _ = parser.parse_args(args) + if options.running_as_hook and not sys.platform.startswith('linux'): + return 0 + + if options.running_as_hook: + InstallDefaultSysroots() + else: + if not options.arch: + print 'You much specify either --arch or --running-as-hook' + return 1 + InstallSysroot(options.arch) + + return 0 + + +def InstallSysroot(target_arch): + # The sysroot directory should match the one specified in build/common.gypi. + # TODO(thestig) Consider putting this else where to avoid having to recreate + # it on every build. + linux_dir = os.path.dirname(SCRIPT_DIR) + debian_release = 'Wheezy' + if target_arch == 'amd64': + sysroot = os.path.join(linux_dir, SYSROOT_DIR_AMD64) + tarball_filename = TARBALL_AMD64 + tarball_sha1sum = TARBALL_AMD64_SHA1SUM + revision = REVISION_AMD64 + elif target_arch == 'arm': + sysroot = os.path.join(linux_dir, SYSROOT_DIR_ARM) + tarball_filename = TARBALL_ARM + tarball_sha1sum = TARBALL_ARM_SHA1SUM + revision = REVISION_ARM + elif target_arch == 'arm64': + debian_release = 'Jessie' + sysroot = os.path.join(linux_dir, SYSROOT_DIR_ARM64) + tarball_filename = TARBALL_ARM64 + tarball_sha1sum = TARBALL_ARM64_SHA1SUM + revision = REVISION_ARM64 + elif target_arch == 'i386': + sysroot = os.path.join(linux_dir, SYSROOT_DIR_I386) + tarball_filename = TARBALL_I386 + tarball_sha1sum = TARBALL_I386_SHA1SUM + revision = REVISION_I386 + elif target_arch == 'mips': + sysroot = os.path.join(linux_dir, SYSROOT_DIR_MIPS) + tarball_filename = TARBALL_MIPS + tarball_sha1sum = TARBALL_MIPS_SHA1SUM + revision = REVISION_MIPS + else: + raise Error('Unknown architecture: %s' % target_arch) + + url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, revision, tarball_filename) + + stamp = os.path.join(sysroot, '.stamp') + if os.path.exists(stamp): + with open(stamp) as s: + if s.read() == url: + print 'Debian %s %s root image already up to date: %s' % \ + (debian_release, target_arch, sysroot) + return + + print 'Installing Debian %s %s root image: %s' % \ + (debian_release, target_arch, sysroot) + if os.path.isdir(sysroot): + shutil.rmtree(sysroot) + os.mkdir(sysroot) + tarball = os.path.join(sysroot, tarball_filename) + print 'Downloading %s' % url + sys.stdout.flush() + sys.stderr.flush() + subprocess.check_call( + ['curl', '--fail', '--retry', '3', '-L', url, '-o', tarball]) + sha1sum = GetSha1(tarball) + if sha1sum != tarball_sha1sum: + raise Error('Tarball sha1sum is wrong.' + 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)) + subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot]) + os.remove(tarball) + + with open(stamp, 'w') as s: + s.write(url) + + +if __name__ == '__main__': + try: + sys.exit(main(sys.argv[1:])) + except Error as e: + sys.stderr.write(str(e) + '\n') + sys.exit(1) diff --git a/build/linux/sysroot_scripts/merge-package-lists.py b/build/linux/sysroot_scripts/merge-package-lists.py new file mode 100644 index 00000000000..caad144ab8b --- /dev/null +++ b/build/linux/sysroot_scripts/merge-package-lists.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merge package entries from different package lists. +""" + +# This is used for replacing packages in eg. trusty with those in +# trusty-updates. The updated packages are ABI compatible, but +# include security patches, so we should use those instead in our +# sysroots. + +import sys + +if len(sys.argv) != 2: + exit(1) + +packages = {} + +def AddPackagesFromFile(file): + global packages + lines = file.readlines() + if len(lines) % 3 != 0: + exit(1) + for i in xrange(0, len(lines), 3): + packages[lines[i]] = (lines[i + 1], lines[i + 2]) + +AddPackagesFromFile(open(sys.argv[1], 'r')) +AddPackagesFromFile(sys.stdin) + +output_file = open(sys.argv[1], 'w') + +for (package, (filename, sha256)) in packages.iteritems(): + output_file.write(package + filename + sha256) diff --git a/build/linux/sysroot_scripts/packagelist.jessie.amd64 b/build/linux/sysroot_scripts/packagelist.jessie.amd64 new file mode 100644 index 00000000000..f54fc2ee7bc --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.jessie.amd64 @@ -0,0 +1,185 @@ +main/a/alsa-lib/libasound2_1.0.28-1_amd64.deb +main/a/alsa-lib/libasound2-dev_1.0.28-1_amd64.deb +main/a/atk1.0/libatk1.0-0_2.14.0-1_amd64.deb +main/a/atk1.0/libatk1.0-dev_2.14.0-1_amd64.deb +main/a/attr/libattr1_2.4.47-2_amd64.deb +main/a/avahi/libavahi-client3_0.6.31-5_amd64.deb +main/a/avahi/libavahi-common3_0.6.31-5_amd64.deb +main/b/bluez/libbluetooth3_5.23-2+b1_amd64.deb +main/b/bluez/libbluetooth-dev_5.23-2+b1_amd64.deb +main/b/brltty/libbrlapi0.6_5.2~20141018-5_amd64.deb +main/b/brltty/libbrlapi-dev_5.2~20141018-5_amd64.deb +main/c/cairo/libcairo2_1.14.0-2.1+deb8u1_amd64.deb +main/c/cairo/libcairo2-dev_1.14.0-2.1+deb8u1_amd64.deb +main/c/cairo/libcairo-gobject2_1.14.0-2.1+deb8u1_amd64.deb +main/c/cairo/libcairo-script-interpreter2_1.14.0-2.1+deb8u1_amd64.deb +main/c/cups/libcups2_1.7.5-11+deb8u1_amd64.deb +main/c/cups/libcups2-dev_1.7.5-11+deb8u1_amd64.deb +main/d/dbus-glib/libdbus-glib-1-2_0.102-1_amd64.deb +main/d/dbus/libdbus-1-3_1.8.20-0+deb8u1_amd64.deb +main/d/dbus/libdbus-1-dev_1.8.20-0+deb8u1_amd64.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.12-1.1_amd64.deb +main/e/e2fsprogs/libcomerr2_1.42.12-1.1_amd64.deb +main/e/elfutils/libelf1_0.159-4.2_amd64.deb +main/e/elfutils/libelf-dev_0.159-4.2_amd64.deb +main/e/expat/libexpat1_2.1.0-6+deb8u2_amd64.deb +main/e/expat/libexpat1-dev_2.1.0-6+deb8u2_amd64.deb +main/f/fontconfig/libfontconfig1_2.11.0-6.3_amd64.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-6.3_amd64.deb +main/f/freetype/libfreetype6_2.5.2-3+deb8u1_amd64.deb +main/f/freetype/libfreetype6-dev_2.5.2-3+deb8u1_amd64.deb +main/g/gcc-4.8/libasan0_4.8.4-1_amd64.deb +main/g/gcc-4.8/libgcc-4.8-dev_4.8.4-1_amd64.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-1_amd64.deb +main/g/gcc-4.9/libatomic1_4.9.2-10_amd64.deb +main/g/gcc-4.9/libcilkrts5_4.9.2-10_amd64.deb +main/g/gcc-4.9/libgcc1_4.9.2-10_amd64.deb +main/g/gcc-4.9/libgomp1_4.9.2-10_amd64.deb +main/g/gcc-4.9/libitm1_4.9.2-10_amd64.deb +main/g/gcc-4.9/liblsan0_4.9.2-10_amd64.deb +main/g/gcc-4.9/libquadmath0_4.9.2-10_amd64.deb +main/g/gcc-4.9/libstdc++6_4.9.2-10_amd64.deb +main/g/gcc-4.9/libtsan0_4.9.2-10_amd64.deb +main/g/gcc-4.9/libubsan0_4.9.2-10_amd64.deb +main/g/gconf/libgconf2-4_3.2.6-3_amd64.deb +main/g/gconf/libgconf-2-4_3.2.6-3_amd64.deb +main/g/gconf/libgconf2-dev_3.2.6-3_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.31.1-2+deb8u4_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.31.1-2+deb8u4_amd64.deb +main/g/glib2.0/libglib2.0-0_2.42.1-1+b1_amd64.deb +main/g/glib2.0/libglib2.0-dev_2.42.1-1+b1_amd64.deb +main/g/glibc/libc6_2.19-18+deb8u4_amd64.deb +main/g/glibc/libc6-dev_2.19-18+deb8u4_amd64.deb +main/g/gnutls28/libgnutls28-dev_3.3.8-6+deb8u3_amd64.deb +main/g/gnutls28/libgnutls-deb0-28_3.3.8-6+deb8u3_amd64.deb +main/g/gnutls28/libgnutls-openssl27_3.3.8-6+deb8u3_amd64.deb +main/g/gnutls28/libgnutlsxx28_3.3.8-6+deb8u3_amd64.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.25-3+deb8u1_amd64.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.25-3+deb8u1_amd64.deb +main/h/harfbuzz/libharfbuzz0b_0.9.35-2_amd64.deb +main/h/harfbuzz/libharfbuzz-dev_0.9.35-2_amd64.deb +main/h/harfbuzz/libharfbuzz-gobject0_0.9.35-2_amd64.deb +main/h/harfbuzz/libharfbuzz-icu0_0.9.35-2_amd64.deb +main/k/keyutils/libkeyutils1_1.5.9-5+b1_amd64.deb +main/k/krb5/krb5-multidev_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libgssapi-krb5-2_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libgssrpc4_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libk5crypto3_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkadm5clnt-mit9_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkadm5srv-mit9_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkdb5-7_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkrb5-3_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkrb5-dev_1.12.1+dfsg-19+deb8u2_amd64.deb +main/k/krb5/libkrb5support0_1.12.1+dfsg-19+deb8u2_amd64.deb +main/libc/libcap2/libcap2_2.24-8_amd64.deb +main/libc/libcap2/libcap-dev_2.24-8_amd64.deb +main/libd/libdrm/libdrm2_2.4.58-2_amd64.deb +main/libd/libdrm/libdrm-dev_2.4.58-2_amd64.deb +main/libd/libdrm/libdrm-intel1_2.4.58-2_amd64.deb +main/libd/libdrm/libdrm-nouveau2_2.4.58-2_amd64.deb +main/libd/libdrm/libdrm-radeon1_2.4.58-2_amd64.deb +main/libf/libffi/libffi6_3.1-2+b2_amd64.deb +main/libf/libffi/libffi-dev_3.1-2+b2_amd64.deb +main/libg/libgcrypt20/libgcrypt20_1.6.3-2+deb8u1_amd64.deb +main/libg/libgcrypt20/libgcrypt20-dev_1.6.3-2+deb8u1_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b1_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b1_amd64.deb +main/libg/libgpg-error/libgpg-error0_1.17-3_amd64.deb +main/libg/libgpg-error/libgpg-error-dev_1.17-3_amd64.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5+b3_amd64.deb +main/libp/libp11/libp11-2_0.2.8-5_amd64.deb +main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_amd64.deb +main/libp/libpng/libpng12-dev_1.2.50-2+deb8u2_amd64.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_amd64.deb +main/libs/libselinux/libselinux1_2.3-2_amd64.deb +main/libt/libtasn1-6/libtasn1-6_4.2-3+deb8u2_amd64.deb +main/libx/libx11/libx11-6_1.6.2-3_amd64.deb +main/libx/libx11/libx11-dev_1.6.2-3_amd64.deb +main/libx/libx11/libx11-xcb1_1.6.2-3_amd64.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-3_amd64.deb +main/libx/libxau/libxau6_1.0.8-1_amd64.deb +main/libx/libxau/libxau-dev_1.0.8-1_amd64.deb +main/libx/libxcb/libxcb1_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb1-dev_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb-glx0_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb-render0_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb-render0-dev_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb-shm0_1.10-3+b1_amd64.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-3+b1_amd64.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_amd64.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_amd64.deb +main/libx/libxcursor/libxcursor1_1.1.14-1+b1_amd64.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1+b1_amd64.deb +main/libx/libxdamage/libxdamage1_1.1.4-2+b1_amd64.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-2+b1_amd64.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1+b1_amd64.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1+b1_amd64.deb +main/libx/libxext/libxext6_1.3.3-1_amd64.deb +main/libx/libxext/libxext-dev_1.3.3-1_amd64.deb +main/libx/libxfixes/libxfixes3_5.0.1-2+b2_amd64.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-2+b2_amd64.deb +main/libx/libxi/libxi6_1.7.4-1+b2_amd64.deb +main/libx/libxi/libxi-dev_1.7.4-1+b2_amd64.deb +main/libx/libxinerama/libxinerama1_1.1.3-1+b1_amd64.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1+b1_amd64.deb +main/libx/libxrandr/libxrandr2_1.4.2-1+b1_amd64.deb +main/libx/libxrandr/libxrandr-dev_1.4.2-1+b1_amd64.deb +main/libx/libxrender/libxrender1_0.9.8-1+b1_amd64.deb +main/libx/libxrender/libxrender-dev_0.9.8-1+b1_amd64.deb +main/libx/libxss/libxss1_1.2.2-1_amd64.deb +main/libx/libxss/libxss-dev_1.2.2-1_amd64.deb +main/libx/libxt/libxt6_1.1.4-1+b1_amd64.deb +main/libx/libxt/libxt-dev_1.1.4-1+b1_amd64.deb +main/libx/libxtst/libxtst6_1.2.2-1+b1_amd64.deb +main/libx/libxtst/libxtst-dev_1.2.2-1+b1_amd64.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1+b1_amd64.deb +main/l/linux/linux-libc-dev_3.16.7-ckt25-2_amd64.deb +main/m/mesa/libgl1-mesa-dev_10.3.2-1+deb8u1_amd64.deb +main/m/mesa/libgl1-mesa-glx_10.3.2-1+deb8u1_amd64.deb +main/m/mesa/libglapi-mesa_10.3.2-1+deb8u1_amd64.deb +main/m/mesa/mesa-common-dev_10.3.2-1+deb8u1_amd64.deb +main/n/nspr/libnspr4_4.10.7-1+deb8u1_amd64.deb +main/n/nspr/libnspr4-dev_4.10.7-1+deb8u1_amd64.deb +main/n/nss/libnss3_3.17.2-1.1+deb8u2_amd64.deb +main/n/nss/libnss3-dev_3.17.2-1.1+deb8u2_amd64.deb +main/o/openssl/libssl1.0.0_1.0.1t-1+deb8u2_amd64.deb +main/o/openssl/libssl-dev_1.0.1t-1+deb8u2_amd64.deb +main/o/orbit2/liborbit2_2.14.19-0.3_amd64.deb +main/p/p11-kit/libp11-kit0_0.20.7-1_amd64.deb +main/p/pam/libpam0g_1.1.8-3.1+deb8u1+b1_amd64.deb +main/p/pam/libpam0g-dev_1.1.8-3.1+deb8u1+b1_amd64.deb +main/p/pango1.0/libpango-1.0-0_1.36.8-3_amd64.deb +main/p/pango1.0/libpango1.0-dev_1.36.8-3_amd64.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.8-3_amd64.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.8-3_amd64.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.8-3_amd64.deb +main/p/pangox-compat/libpangox-1.0-0_0.0.2-5_amd64.deb +main/p/pciutils/libpci3_3.2.1-3_amd64.deb +main/p/pciutils/libpci-dev_3.2.1-3_amd64.deb +main/p/pcre3/libpcre3_8.35-3.3+deb8u4_amd64.deb +main/p/pcre3/libpcre3-dev_8.35-3.3+deb8u4_amd64.deb +main/p/pcre3/libpcrecpp0_8.35-3.3+deb8u4_amd64.deb +main/p/pixman/libpixman-1-0_0.32.6-3_amd64.deb +main/p/pixman/libpixman-1-dev_0.32.6-3_amd64.deb +main/p/pulseaudio/libpulse0_5.0-13_amd64.deb +main/p/pulseaudio/libpulse-dev_5.0-13_amd64.deb +main/p/pulseaudio/libpulse-mainloop-glib0_5.0-13_amd64.deb +main/s/speech-dispatcher/libspeechd2_0.8-7_amd64.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-7_amd64.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-7_amd64.deb +main/s/systemd/libudev1_215-17+deb8u4_amd64.deb +main/s/systemd/libudev-dev_215-17+deb8u4_amd64.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3.1-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-2+b1_amd64.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-2+b1_amd64.deb diff --git a/build/linux/sysroot_scripts/packagelist.jessie.arm b/build/linux/sysroot_scripts/packagelist.jessie.arm new file mode 100644 index 00000000000..cc88fe4e74f --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.jessie.arm @@ -0,0 +1,182 @@ +main/a/alsa-lib/libasound2_1.0.28-1_armhf.deb +main/a/alsa-lib/libasound2-dev_1.0.28-1_armhf.deb +main/a/atk1.0/libatk1.0-0_2.14.0-1_armhf.deb +main/a/atk1.0/libatk1.0-dev_2.14.0-1_armhf.deb +main/a/attr/libattr1_2.4.47-2_armhf.deb +main/a/avahi/libavahi-client3_0.6.31-5_armhf.deb +main/a/avahi/libavahi-common3_0.6.31-5_armhf.deb +main/b/bluez/libbluetooth3_5.23-2+b1_armhf.deb +main/b/bluez/libbluetooth-dev_5.23-2+b1_armhf.deb +main/b/brltty/libbrlapi0.6_5.2~20141018-5_armhf.deb +main/b/brltty/libbrlapi-dev_5.2~20141018-5_armhf.deb +main/c/cairo/libcairo2_1.14.0-2.1+deb8u1_armhf.deb +main/c/cairo/libcairo2-dev_1.14.0-2.1+deb8u1_armhf.deb +main/c/cairo/libcairo-gobject2_1.14.0-2.1+deb8u1_armhf.deb +main/c/cairo/libcairo-script-interpreter2_1.14.0-2.1+deb8u1_armhf.deb +main/c/cups/libcups2_1.7.5-11+deb8u1_armhf.deb +main/c/cups/libcups2-dev_1.7.5-11+deb8u1_armhf.deb +main/d/dbus-glib/libdbus-glib-1-2_0.102-1_armhf.deb +main/d/dbus/libdbus-1-3_1.8.20-0+deb8u1_armhf.deb +main/d/dbus/libdbus-1-dev_1.8.20-0+deb8u1_armhf.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.12-1.1_armhf.deb +main/e/e2fsprogs/libcomerr2_1.42.12-1.1_armhf.deb +main/e/elfutils/libelf1_0.159-4.2_armhf.deb +main/e/elfutils/libelf-dev_0.159-4.2_armhf.deb +main/e/expat/libexpat1_2.1.0-6+deb8u2_armhf.deb +main/e/expat/libexpat1-dev_2.1.0-6+deb8u2_armhf.deb +main/f/fontconfig/libfontconfig1_2.11.0-6.3_armhf.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-6.3_armhf.deb +main/f/freetype/libfreetype6_2.5.2-3+deb8u1_armhf.deb +main/f/freetype/libfreetype6-dev_2.5.2-3+deb8u1_armhf.deb +main/g/gcc-4.8/libasan0_4.8.4-1_armhf.deb +main/g/gcc-4.8/libgcc-4.8-dev_4.8.4-1_armhf.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-1_armhf.deb +main/g/gcc-4.9/libatomic1_4.9.2-10_armhf.deb +main/g/gcc-4.9/libgcc1_4.9.2-10_armhf.deb +main/g/gcc-4.9/libgomp1_4.9.2-10_armhf.deb +main/g/gcc-4.9/libstdc++6_4.9.2-10_armhf.deb +main/g/gcc-4.9/libubsan0_4.9.2-10_armhf.deb +main/g/gconf/libgconf2-4_3.2.6-3_armhf.deb +main/g/gconf/libgconf-2-4_3.2.6-3_armhf.deb +main/g/gconf/libgconf2-dev_3.2.6-3_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.31.1-2+deb8u4_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.31.1-2+deb8u4_armhf.deb +main/g/glib2.0/libglib2.0-0_2.42.1-1+b1_armhf.deb +main/g/glib2.0/libglib2.0-dev_2.42.1-1+b1_armhf.deb +main/g/glibc/libc6_2.19-18+deb8u4_armhf.deb +main/g/glibc/libc6-dev_2.19-18+deb8u4_armhf.deb +main/g/gnutls28/libgnutls28-dev_3.3.8-6+deb8u3_armhf.deb +main/g/gnutls28/libgnutls-deb0-28_3.3.8-6+deb8u3_armhf.deb +main/g/gnutls28/libgnutls-openssl27_3.3.8-6+deb8u3_armhf.deb +main/g/gnutls28/libgnutlsxx28_3.3.8-6+deb8u3_armhf.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.25-3+deb8u1_armhf.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.25-3+deb8u1_armhf.deb +main/h/harfbuzz/libharfbuzz0b_0.9.35-2_armhf.deb +main/h/harfbuzz/libharfbuzz-dev_0.9.35-2_armhf.deb +main/h/harfbuzz/libharfbuzz-gobject0_0.9.35-2_armhf.deb +main/h/harfbuzz/libharfbuzz-icu0_0.9.35-2_armhf.deb +main/k/keyutils/libkeyutils1_1.5.9-5+b1_armhf.deb +main/k/krb5/krb5-multidev_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libgssapi-krb5-2_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libgssrpc4_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libk5crypto3_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkadm5clnt-mit9_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkadm5srv-mit9_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkdb5-7_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkrb5-3_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkrb5-dev_1.12.1+dfsg-19+deb8u2_armhf.deb +main/k/krb5/libkrb5support0_1.12.1+dfsg-19+deb8u2_armhf.deb +main/libc/libcap2/libcap2_2.24-8_armhf.deb +main/libc/libcap2/libcap-dev_2.24-8_armhf.deb +main/libd/libdrm/libdrm2_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-dev_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-exynos1_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-freedreno1_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-nouveau2_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-omap1_2.4.58-2_armhf.deb +main/libd/libdrm/libdrm-radeon1_2.4.58-2_armhf.deb +main/libf/libffi/libffi6_3.1-2+b2_armhf.deb +main/libf/libffi/libffi-dev_3.1-2+b2_armhf.deb +main/libg/libgcrypt20/libgcrypt20_1.6.3-2+deb8u1_armhf.deb +main/libg/libgcrypt20/libgcrypt20-dev_1.6.3-2+deb8u1_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b1_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b1_armhf.deb +main/libg/libgpg-error/libgpg-error0_1.17-3_armhf.deb +main/libg/libgpg-error/libgpg-error-dev_1.17-3_armhf.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5+b3_armhf.deb +main/libp/libp11/libp11-2_0.2.8-5_armhf.deb +main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_armhf.deb +main/libp/libpng/libpng12-dev_1.2.50-2+deb8u2_armhf.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_armhf.deb +main/libs/libselinux/libselinux1_2.3-2_armhf.deb +main/libt/libtasn1-6/libtasn1-6_4.2-3+deb8u2_armhf.deb +main/libx/libx11/libx11-6_1.6.2-3_armhf.deb +main/libx/libx11/libx11-dev_1.6.2-3_armhf.deb +main/libx/libx11/libx11-xcb1_1.6.2-3_armhf.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-3_armhf.deb +main/libx/libxau/libxau6_1.0.8-1_armhf.deb +main/libx/libxau/libxau-dev_1.0.8-1_armhf.deb +main/libx/libxcb/libxcb1_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb1-dev_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb-glx0_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb-render0_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb-render0-dev_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb-shm0_1.10-3+b1_armhf.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-3+b1_armhf.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_armhf.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_armhf.deb +main/libx/libxcursor/libxcursor1_1.1.14-1+b1_armhf.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1+b1_armhf.deb +main/libx/libxdamage/libxdamage1_1.1.4-2+b1_armhf.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-2+b1_armhf.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1+b1_armhf.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1+b1_armhf.deb +main/libx/libxext/libxext6_1.3.3-1_armhf.deb +main/libx/libxext/libxext-dev_1.3.3-1_armhf.deb +main/libx/libxfixes/libxfixes3_5.0.1-2+b2_armhf.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-2+b2_armhf.deb +main/libx/libxi/libxi6_1.7.4-1+b2_armhf.deb +main/libx/libxi/libxi-dev_1.7.4-1+b2_armhf.deb +main/libx/libxinerama/libxinerama1_1.1.3-1+b1_armhf.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1+b1_armhf.deb +main/libx/libxrandr/libxrandr2_1.4.2-1+b1_armhf.deb +main/libx/libxrandr/libxrandr-dev_1.4.2-1+b1_armhf.deb +main/libx/libxrender/libxrender1_0.9.8-1+b1_armhf.deb +main/libx/libxrender/libxrender-dev_0.9.8-1+b1_armhf.deb +main/libx/libxss/libxss1_1.2.2-1_armhf.deb +main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb +main/libx/libxt/libxt6_1.1.4-1+b1_armhf.deb +main/libx/libxt/libxt-dev_1.1.4-1+b1_armhf.deb +main/libx/libxtst/libxtst6_1.2.2-1+b1_armhf.deb +main/libx/libxtst/libxtst-dev_1.2.2-1+b1_armhf.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1+b1_armhf.deb +main/l/linux/linux-libc-dev_3.16.7-ckt25-2_armhf.deb +main/m/mesa/libgl1-mesa-dev_10.3.2-1+deb8u1_armhf.deb +main/m/mesa/libgl1-mesa-glx_10.3.2-1+deb8u1_armhf.deb +main/m/mesa/libglapi-mesa_10.3.2-1+deb8u1_armhf.deb +main/m/mesa/mesa-common-dev_10.3.2-1+deb8u1_armhf.deb +main/n/nspr/libnspr4_4.10.7-1+deb8u1_armhf.deb +main/n/nspr/libnspr4-dev_4.10.7-1+deb8u1_armhf.deb +main/n/nss/libnss3_3.17.2-1.1+deb8u2_armhf.deb +main/n/nss/libnss3-dev_3.17.2-1.1+deb8u2_armhf.deb +main/o/openssl/libssl1.0.0_1.0.1t-1+deb8u2_armhf.deb +main/o/openssl/libssl-dev_1.0.1t-1+deb8u2_armhf.deb +main/o/orbit2/liborbit2_2.14.19-0.3_armhf.deb +main/p/p11-kit/libp11-kit0_0.20.7-1_armhf.deb +main/p/pam/libpam0g_1.1.8-3.1+deb8u1+b1_armhf.deb +main/p/pam/libpam0g-dev_1.1.8-3.1+deb8u1+b1_armhf.deb +main/p/pango1.0/libpango-1.0-0_1.36.8-3_armhf.deb +main/p/pango1.0/libpango1.0-dev_1.36.8-3_armhf.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.8-3_armhf.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.8-3_armhf.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.8-3_armhf.deb +main/p/pangox-compat/libpangox-1.0-0_0.0.2-5_armhf.deb +main/p/pciutils/libpci3_3.2.1-3_armhf.deb +main/p/pciutils/libpci-dev_3.2.1-3_armhf.deb +main/p/pcre3/libpcre3_8.35-3.3+deb8u4_armhf.deb +main/p/pcre3/libpcre3-dev_8.35-3.3+deb8u4_armhf.deb +main/p/pcre3/libpcrecpp0_8.35-3.3+deb8u4_armhf.deb +main/p/pixman/libpixman-1-0_0.32.6-3_armhf.deb +main/p/pixman/libpixman-1-dev_0.32.6-3_armhf.deb +main/p/pulseaudio/libpulse0_5.0-13_armhf.deb +main/p/pulseaudio/libpulse-dev_5.0-13_armhf.deb +main/p/pulseaudio/libpulse-mainloop-glib0_5.0-13_armhf.deb +main/s/speech-dispatcher/libspeechd2_0.8-7_armhf.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-7_armhf.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-7_armhf.deb +main/s/systemd/libudev1_215-17+deb8u4_armhf.deb +main/s/systemd/libudev-dev_215-17+deb8u4_armhf.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3.1-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-2+b1_armhf.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-2+b1_armhf.deb diff --git a/build/linux/sysroot_scripts/packagelist.jessie.arm64 b/build/linux/sysroot_scripts/packagelist.jessie.arm64 new file mode 100644 index 00000000000..f13eb474580 --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.jessie.arm64 @@ -0,0 +1,184 @@ +main/a/alsa-lib/libasound2_1.0.28-1_arm64.deb +main/a/alsa-lib/libasound2-dev_1.0.28-1_arm64.deb +main/a/atk1.0/libatk1.0-0_2.14.0-1_arm64.deb +main/a/atk1.0/libatk1.0-dev_2.14.0-1_arm64.deb +main/a/attr/libattr1_2.4.47-2_arm64.deb +main/a/avahi/libavahi-client3_0.6.31-5_arm64.deb +main/a/avahi/libavahi-common3_0.6.31-5_arm64.deb +main/b/bluez/libbluetooth3_5.23-2+b1_arm64.deb +main/b/bluez/libbluetooth-dev_5.23-2+b1_arm64.deb +main/b/brltty/libbrlapi0.6_5.2~20141018-5_arm64.deb +main/b/brltty/libbrlapi-dev_5.2~20141018-5_arm64.deb +main/c/cairo/libcairo2_1.14.0-2.1+deb8u1_arm64.deb +main/c/cairo/libcairo2-dev_1.14.0-2.1+deb8u1_arm64.deb +main/c/cairo/libcairo-gobject2_1.14.0-2.1+deb8u1_arm64.deb +main/c/cairo/libcairo-script-interpreter2_1.14.0-2.1+deb8u1_arm64.deb +main/c/cups/libcups2_1.7.5-11+deb8u1_arm64.deb +main/c/cups/libcups2-dev_1.7.5-11+deb8u1_arm64.deb +main/d/dbus-glib/libdbus-glib-1-2_0.102-1_arm64.deb +main/d/dbus/libdbus-1-3_1.8.20-0+deb8u1_arm64.deb +main/d/dbus/libdbus-1-dev_1.8.20-0+deb8u1_arm64.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.12-1.1_arm64.deb +main/e/e2fsprogs/libcomerr2_1.42.12-1.1_arm64.deb +main/e/elfutils/libelf1_0.159-4.2_arm64.deb +main/e/elfutils/libelf-dev_0.159-4.2_arm64.deb +main/e/expat/libexpat1_2.1.0-6+deb8u2_arm64.deb +main/e/expat/libexpat1-dev_2.1.0-6+deb8u2_arm64.deb +main/f/fontconfig/libfontconfig1_2.11.0-6.3_arm64.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-6.3_arm64.deb +main/f/freetype/libfreetype6_2.5.2-3+deb8u1_arm64.deb +main/f/freetype/libfreetype6-dev_2.5.2-3+deb8u1_arm64.deb +main/g/gcc-4.8/libgcc-4.8-dev_4.8.4-1_arm64.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-1_arm64.deb +main/g/gcc-4.9/libatomic1_4.9.2-10_arm64.deb +main/g/gcc-4.9/libgcc1_4.9.2-10_arm64.deb +main/g/gcc-4.9/libgomp1_4.9.2-10_arm64.deb +main/g/gcc-4.9/libitm1_4.9.2-10_arm64.deb +main/g/gcc-4.9/libstdc++6_4.9.2-10_arm64.deb +main/g/gconf/libgconf2-4_3.2.6-3_arm64.deb +main/g/gconf/libgconf-2-4_3.2.6-3_arm64.deb +main/g/gconf/libgconf2-dev_3.2.6-3_arm64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.31.1-2+deb8u4_arm64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.31.1-2+deb8u4_arm64.deb +main/g/glib2.0/libglib2.0-0_2.42.1-1+b1_arm64.deb +main/g/glib2.0/libglib2.0-dev_2.42.1-1+b1_arm64.deb +main/g/glibc/libc6_2.19-18+deb8u4_arm64.deb +main/g/glibc/libc6-dev_2.19-18+deb8u4_arm64.deb +main/g/gmp/libgmp10_6.0.0+dfsg-6_arm64.deb +main/g/gnutls28/libgnutls28-dev_3.3.8-6+deb8u3_arm64.deb +main/g/gnutls28/libgnutls-deb0-28_3.3.8-6+deb8u3_arm64.deb +main/g/gnutls28/libgnutls-openssl27_3.3.8-6+deb8u3_arm64.deb +main/g/gnutls28/libgnutlsxx28_3.3.8-6+deb8u3_arm64.deb +main/g/graphite2/libgraphite2-3_1.3.6-1~deb8u1_arm64.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.25-3+deb8u1_arm64.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.25-3+deb8u1_arm64.deb +main/h/harfbuzz/libharfbuzz0b_0.9.35-2_arm64.deb +main/h/harfbuzz/libharfbuzz-dev_0.9.35-2_arm64.deb +main/h/harfbuzz/libharfbuzz-gobject0_0.9.35-2_arm64.deb +main/h/harfbuzz/libharfbuzz-icu0_0.9.35-2_arm64.deb +main/k/keyutils/libkeyutils1_1.5.9-5+b1_arm64.deb +main/k/krb5/krb5-multidev_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libgssapi-krb5-2_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libgssrpc4_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libk5crypto3_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkadm5clnt-mit9_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkadm5srv-mit9_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkdb5-7_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkrb5-3_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkrb5-dev_1.12.1+dfsg-19+deb8u2_arm64.deb +main/k/krb5/libkrb5support0_1.12.1+dfsg-19+deb8u2_arm64.deb +main/libc/libcap2/libcap2_2.24-8_arm64.deb +main/libc/libcap2/libcap-dev_2.24-8_arm64.deb +main/libd/libdatrie/libdatrie1_0.2.8-1_arm64.deb +main/libd/libdrm/libdrm2_2.4.58-2_arm64.deb +main/libd/libdrm/libdrm-dev_2.4.58-2_arm64.deb +main/libd/libdrm/libdrm-nouveau2_2.4.58-2_arm64.deb +main/libd/libdrm/libdrm-radeon1_2.4.58-2_arm64.deb +main/libf/libffi/libffi6_3.1-2+b2_arm64.deb +main/libf/libffi/libffi-dev_3.1-2+b2_arm64.deb +main/libg/libgcrypt20/libgcrypt20_1.6.3-2+deb8u1_arm64.deb +main/libg/libgcrypt20/libgcrypt20-dev_1.6.3-2+deb8u1_arm64.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b1_arm64.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b1_arm64.deb +main/libg/libgpg-error/libgpg-error0_1.17-3_arm64.deb +main/libg/libgpg-error/libgpg-error-dev_1.17-3_arm64.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5_arm64.deb +main/libp/libp11/libp11-2_0.2.8-5_arm64.deb +main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_arm64.deb +main/libp/libpng/libpng12-dev_1.2.50-2+deb8u2_arm64.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_arm64.deb +main/libs/libselinux/libselinux1_2.3-2_arm64.deb +main/libt/libtasn1-6/libtasn1-6_4.2-3+deb8u2_arm64.deb +main/libt/libthai/libthai0_0.1.21-1_arm64.deb +main/libx/libx11/libx11-6_1.6.2-3_arm64.deb +main/libx/libx11/libx11-dev_1.6.2-3_arm64.deb +main/libx/libx11/libx11-xcb1_1.6.2-3_arm64.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-3_arm64.deb +main/libx/libxau/libxau6_1.0.8-1_arm64.deb +main/libx/libxau/libxau-dev_1.0.8-1_arm64.deb +main/libx/libxcb/libxcb1_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb1-dev_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb-glx0_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb-render0_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb-render0-dev_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb-shm0_1.10-3+b1_arm64.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-3+b1_arm64.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_arm64.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_arm64.deb +main/libx/libxcursor/libxcursor1_1.1.14-1+b1_arm64.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1+b1_arm64.deb +main/libx/libxdamage/libxdamage1_1.1.4-2+b1_arm64.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-2+b1_arm64.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1+b1_arm64.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1+b1_arm64.deb +main/libx/libxext/libxext6_1.3.3-1_arm64.deb +main/libx/libxext/libxext-dev_1.3.3-1_arm64.deb +main/libx/libxfixes/libxfixes3_5.0.1-2+b2_arm64.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-2+b2_arm64.deb +main/libx/libxi/libxi6_1.7.4-1+b2_arm64.deb +main/libx/libxi/libxi-dev_1.7.4-1+b2_arm64.deb +main/libx/libxinerama/libxinerama1_1.1.3-1+b1_arm64.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1+b1_arm64.deb +main/libx/libxrandr/libxrandr2_1.4.2-1+b1_arm64.deb +main/libx/libxrandr/libxrandr-dev_1.4.2-1+b1_arm64.deb +main/libx/libxrender/libxrender1_0.9.8-1+b1_arm64.deb +main/libx/libxrender/libxrender-dev_0.9.8-1+b1_arm64.deb +main/libx/libxss/libxss1_1.2.2-1_arm64.deb +main/libx/libxss/libxss-dev_1.2.2-1_arm64.deb +main/libx/libxt/libxt6_1.1.4-1+b1_arm64.deb +main/libx/libxt/libxt-dev_1.1.4-1+b1_arm64.deb +main/libx/libxtst/libxtst6_1.2.2-1+b1_arm64.deb +main/libx/libxtst/libxtst-dev_1.2.2-1+b1_arm64.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1+b1_arm64.deb +main/l/linux/linux-libc-dev_3.16.7-ckt25-2_arm64.deb +main/m/mesa/libgl1-mesa-dev_10.3.2-1+deb8u1_arm64.deb +main/m/mesa/libgl1-mesa-glx_10.3.2-1+deb8u1_arm64.deb +main/m/mesa/libglapi-mesa_10.3.2-1+deb8u1_arm64.deb +main/m/mesa/mesa-common-dev_10.3.2-1+deb8u1_arm64.deb +main/n/nettle/libhogweed2_2.7.1-5+deb8u1_arm64.deb +main/n/nettle/libnettle4_2.7.1-5+deb8u1_arm64.deb +main/n/nspr/libnspr4_4.10.7-1+deb8u1_arm64.deb +main/n/nspr/libnspr4-dev_4.10.7-1+deb8u1_arm64.deb +main/n/nss/libnss3_3.17.2-1.1+deb8u2_arm64.deb +main/n/nss/libnss3-dev_3.17.2-1.1+deb8u2_arm64.deb +main/o/openssl/libssl1.0.0_1.0.1t-1+deb8u2_arm64.deb +main/o/openssl/libssl-dev_1.0.1t-1+deb8u2_arm64.deb +main/o/orbit2/liborbit2_2.14.19-0.3_arm64.deb +main/p/p11-kit/libp11-kit0_0.20.7-1_arm64.deb +main/p/pam/libpam0g_1.1.8-3.1+deb8u1+b1_arm64.deb +main/p/pam/libpam0g-dev_1.1.8-3.1+deb8u1+b1_arm64.deb +main/p/pango1.0/libpango-1.0-0_1.36.8-3_arm64.deb +main/p/pango1.0/libpango1.0-dev_1.36.8-3_arm64.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.8-3_arm64.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.8-3_arm64.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.8-3_arm64.deb +main/p/pangox-compat/libpangox-1.0-0_0.0.2-5_arm64.deb +main/p/pciutils/libpci3_3.2.1-3_arm64.deb +main/p/pciutils/libpci-dev_3.2.1-3_arm64.deb +main/p/pcre3/libpcre3_8.35-3.3+deb8u4_arm64.deb +main/p/pcre3/libpcre3-dev_8.35-3.3+deb8u4_arm64.deb +main/p/pcre3/libpcrecpp0_8.35-3.3+deb8u4_arm64.deb +main/p/pixman/libpixman-1-0_0.32.6-3_arm64.deb +main/p/pixman/libpixman-1-dev_0.32.6-3_arm64.deb +main/p/pulseaudio/libpulse0_5.0-13_arm64.deb +main/p/pulseaudio/libpulse-dev_5.0-13_arm64.deb +main/p/pulseaudio/libpulse-mainloop-glib0_5.0-13_arm64.deb +main/s/speech-dispatcher/libspeechd2_0.8-7_arm64.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-7_arm64.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-7_arm64.deb +main/s/systemd/libudev1_215-17+deb8u4_arm64.deb +main/s/systemd/libudev-dev_215-17+deb8u4_arm64.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3.1-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-2+b1_arm64.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-2+b1_arm64.deb diff --git a/build/linux/sysroot_scripts/packagelist.jessie.i386 b/build/linux/sysroot_scripts/packagelist.jessie.i386 new file mode 100644 index 00000000000..a293f113df2 --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.jessie.i386 @@ -0,0 +1,183 @@ +main/a/alsa-lib/libasound2_1.0.28-1_i386.deb +main/a/alsa-lib/libasound2-dev_1.0.28-1_i386.deb +main/a/atk1.0/libatk1.0-0_2.14.0-1_i386.deb +main/a/atk1.0/libatk1.0-dev_2.14.0-1_i386.deb +main/a/attr/libattr1_2.4.47-2_i386.deb +main/a/avahi/libavahi-client3_0.6.31-5_i386.deb +main/a/avahi/libavahi-common3_0.6.31-5_i386.deb +main/b/bluez/libbluetooth3_5.23-2+b1_i386.deb +main/b/bluez/libbluetooth-dev_5.23-2+b1_i386.deb +main/b/brltty/libbrlapi0.6_5.2~20141018-5_i386.deb +main/b/brltty/libbrlapi-dev_5.2~20141018-5_i386.deb +main/c/cairo/libcairo2_1.14.0-2.1+deb8u1_i386.deb +main/c/cairo/libcairo2-dev_1.14.0-2.1+deb8u1_i386.deb +main/c/cairo/libcairo-gobject2_1.14.0-2.1+deb8u1_i386.deb +main/c/cairo/libcairo-script-interpreter2_1.14.0-2.1+deb8u1_i386.deb +main/c/cups/libcups2_1.7.5-11+deb8u1_i386.deb +main/c/cups/libcups2-dev_1.7.5-11+deb8u1_i386.deb +main/d/dbus-glib/libdbus-glib-1-2_0.102-1_i386.deb +main/d/dbus/libdbus-1-3_1.8.20-0+deb8u1_i386.deb +main/d/dbus/libdbus-1-dev_1.8.20-0+deb8u1_i386.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.12-1.1_i386.deb +main/e/e2fsprogs/libcomerr2_1.42.12-1.1_i386.deb +main/e/elfutils/libelf1_0.159-4.2_i386.deb +main/e/elfutils/libelf-dev_0.159-4.2_i386.deb +main/e/expat/libexpat1_2.1.0-6+deb8u2_i386.deb +main/e/expat/libexpat1-dev_2.1.0-6+deb8u2_i386.deb +main/f/fontconfig/libfontconfig1_2.11.0-6.3_i386.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-6.3_i386.deb +main/f/freetype/libfreetype6_2.5.2-3+deb8u1_i386.deb +main/f/freetype/libfreetype6-dev_2.5.2-3+deb8u1_i386.deb +main/g/gcc-4.8/libasan0_4.8.4-1_i386.deb +main/g/gcc-4.8/libgcc-4.8-dev_4.8.4-1_i386.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-1_i386.deb +main/g/gcc-4.9/libatomic1_4.9.2-10_i386.deb +main/g/gcc-4.9/libcilkrts5_4.9.2-10_i386.deb +main/g/gcc-4.9/libgcc1_4.9.2-10_i386.deb +main/g/gcc-4.9/libgomp1_4.9.2-10_i386.deb +main/g/gcc-4.9/libitm1_4.9.2-10_i386.deb +main/g/gcc-4.9/libquadmath0_4.9.2-10_i386.deb +main/g/gcc-4.9/libstdc++6_4.9.2-10_i386.deb +main/g/gcc-4.9/libubsan0_4.9.2-10_i386.deb +main/g/gconf/libgconf-2-4_3.2.6-3_i386.deb +main/g/gconf/libgconf2-4_3.2.6-3_i386.deb +main/g/gconf/libgconf2-dev_3.2.6-3_i386.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.31.1-2+deb8u4_i386.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.31.1-2+deb8u4_i386.deb +main/g/glib2.0/libglib2.0-0_2.42.1-1+b1_i386.deb +main/g/glib2.0/libglib2.0-dev_2.42.1-1+b1_i386.deb +main/g/glibc/libc6_2.19-18+deb8u4_i386.deb +main/g/glibc/libc6-dev_2.19-18+deb8u4_i386.deb +main/g/gnutls28/libgnutls28-dev_3.3.8-6+deb8u3_i386.deb +main/g/gnutls28/libgnutls-deb0-28_3.3.8-6+deb8u3_i386.deb +main/g/gnutls28/libgnutls-openssl27_3.3.8-6+deb8u3_i386.deb +main/g/gnutls28/libgnutlsxx28_3.3.8-6+deb8u3_i386.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.25-3+deb8u1_i386.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.25-3+deb8u1_i386.deb +main/h/harfbuzz/libharfbuzz0b_0.9.35-2_i386.deb +main/h/harfbuzz/libharfbuzz-dev_0.9.35-2_i386.deb +main/h/harfbuzz/libharfbuzz-gobject0_0.9.35-2_i386.deb +main/h/harfbuzz/libharfbuzz-icu0_0.9.35-2_i386.deb +main/k/keyutils/libkeyutils1_1.5.9-5+b1_i386.deb +main/k/krb5/krb5-multidev_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libgssapi-krb5-2_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libgssrpc4_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libk5crypto3_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkadm5clnt-mit9_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkadm5srv-mit9_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkdb5-7_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkrb5-3_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkrb5-dev_1.12.1+dfsg-19+deb8u2_i386.deb +main/k/krb5/libkrb5support0_1.12.1+dfsg-19+deb8u2_i386.deb +main/libc/libcap2/libcap2_2.24-8_i386.deb +main/libc/libcap2/libcap-dev_2.24-8_i386.deb +main/libd/libdrm/libdrm2_2.4.58-2_i386.deb +main/libd/libdrm/libdrm-dev_2.4.58-2_i386.deb +main/libd/libdrm/libdrm-intel1_2.4.58-2_i386.deb +main/libd/libdrm/libdrm-nouveau2_2.4.58-2_i386.deb +main/libd/libdrm/libdrm-radeon1_2.4.58-2_i386.deb +main/libf/libffi/libffi6_3.1-2+b2_i386.deb +main/libf/libffi/libffi-dev_3.1-2+b2_i386.deb +main/libg/libgcrypt20/libgcrypt20_1.6.3-2+deb8u1_i386.deb +main/libg/libgcrypt20/libgcrypt20-dev_1.6.3-2+deb8u1_i386.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b1_i386.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b1_i386.deb +main/libg/libgpg-error/libgpg-error0_1.17-3_i386.deb +main/libg/libgpg-error/libgpg-error-dev_1.17-3_i386.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5+b3_i386.deb +main/libp/libp11/libp11-2_0.2.8-5_i386.deb +main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_i386.deb +main/libp/libpng/libpng12-dev_1.2.50-2+deb8u2_i386.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_i386.deb +main/libs/libselinux/libselinux1_2.3-2_i386.deb +main/libt/libtasn1-6/libtasn1-6_4.2-3+deb8u2_i386.deb +main/libx/libx11/libx11-6_1.6.2-3_i386.deb +main/libx/libx11/libx11-dev_1.6.2-3_i386.deb +main/libx/libx11/libx11-xcb1_1.6.2-3_i386.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-3_i386.deb +main/libx/libxau/libxau6_1.0.8-1_i386.deb +main/libx/libxau/libxau-dev_1.0.8-1_i386.deb +main/libx/libxcb/libxcb1_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb1-dev_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb-glx0_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb-render0_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb-render0-dev_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb-shm0_1.10-3+b1_i386.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-3+b1_i386.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_i386.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_i386.deb +main/libx/libxcursor/libxcursor1_1.1.14-1+b1_i386.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1+b1_i386.deb +main/libx/libxdamage/libxdamage1_1.1.4-2+b1_i386.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-2+b1_i386.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1+b1_i386.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1+b1_i386.deb +main/libx/libxext/libxext6_1.3.3-1_i386.deb +main/libx/libxext/libxext-dev_1.3.3-1_i386.deb +main/libx/libxfixes/libxfixes3_5.0.1-2+b2_i386.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-2+b2_i386.deb +main/libx/libxi/libxi6_1.7.4-1+b2_i386.deb +main/libx/libxi/libxi-dev_1.7.4-1+b2_i386.deb +main/libx/libxinerama/libxinerama1_1.1.3-1+b1_i386.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1+b1_i386.deb +main/libx/libxrandr/libxrandr2_1.4.2-1+b1_i386.deb +main/libx/libxrandr/libxrandr-dev_1.4.2-1+b1_i386.deb +main/libx/libxrender/libxrender1_0.9.8-1+b1_i386.deb +main/libx/libxrender/libxrender-dev_0.9.8-1+b1_i386.deb +main/libx/libxss/libxss1_1.2.2-1_i386.deb +main/libx/libxss/libxss-dev_1.2.2-1_i386.deb +main/libx/libxt/libxt6_1.1.4-1+b1_i386.deb +main/libx/libxt/libxt-dev_1.1.4-1+b1_i386.deb +main/libx/libxtst/libxtst6_1.2.2-1+b1_i386.deb +main/libx/libxtst/libxtst-dev_1.2.2-1+b1_i386.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1+b1_i386.deb +main/l/linux/linux-libc-dev_3.16.7-ckt25-2_i386.deb +main/m/mesa/libgl1-mesa-dev_10.3.2-1+deb8u1_i386.deb +main/m/mesa/libgl1-mesa-glx_10.3.2-1+deb8u1_i386.deb +main/m/mesa/libglapi-mesa_10.3.2-1+deb8u1_i386.deb +main/m/mesa/mesa-common-dev_10.3.2-1+deb8u1_i386.deb +main/n/nspr/libnspr4_4.10.7-1+deb8u1_i386.deb +main/n/nspr/libnspr4-dev_4.10.7-1+deb8u1_i386.deb +main/n/nss/libnss3_3.17.2-1.1+deb8u2_i386.deb +main/n/nss/libnss3-dev_3.17.2-1.1+deb8u2_i386.deb +main/o/openssl/libssl1.0.0_1.0.1t-1+deb8u2_i386.deb +main/o/openssl/libssl-dev_1.0.1t-1+deb8u2_i386.deb +main/o/orbit2/liborbit2_2.14.19-0.3_i386.deb +main/p/p11-kit/libp11-kit0_0.20.7-1_i386.deb +main/p/pam/libpam0g_1.1.8-3.1+deb8u1+b1_i386.deb +main/p/pam/libpam0g-dev_1.1.8-3.1+deb8u1+b1_i386.deb +main/p/pango1.0/libpango-1.0-0_1.36.8-3_i386.deb +main/p/pango1.0/libpango1.0-dev_1.36.8-3_i386.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.8-3_i386.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.8-3_i386.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.8-3_i386.deb +main/p/pangox-compat/libpangox-1.0-0_0.0.2-5_i386.deb +main/p/pciutils/libpci3_3.2.1-3_i386.deb +main/p/pciutils/libpci-dev_3.2.1-3_i386.deb +main/p/pcre3/libpcre3_8.35-3.3+deb8u4_i386.deb +main/p/pcre3/libpcre3-dev_8.35-3.3+deb8u4_i386.deb +main/p/pcre3/libpcrecpp0_8.35-3.3+deb8u4_i386.deb +main/p/pixman/libpixman-1-0_0.32.6-3_i386.deb +main/p/pixman/libpixman-1-dev_0.32.6-3_i386.deb +main/p/pulseaudio/libpulse0_5.0-13_i386.deb +main/p/pulseaudio/libpulse-dev_5.0-13_i386.deb +main/p/pulseaudio/libpulse-mainloop-glib0_5.0-13_i386.deb +main/s/speech-dispatcher/libspeechd2_0.8-7_i386.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-7_i386.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-7_i386.deb +main/s/systemd/libudev1_215-17+deb8u4_i386.deb +main/s/systemd/libudev-dev_215-17+deb8u4_i386.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3.1-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-2+b1_i386.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-2+b1_i386.deb diff --git a/build/linux/sysroot_scripts/packagelist.jessie.mipsel b/build/linux/sysroot_scripts/packagelist.jessie.mipsel new file mode 100644 index 00000000000..95032dee89d --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.jessie.mipsel @@ -0,0 +1,177 @@ +main/a/alsa-lib/libasound2_1.0.28-1_mipsel.deb +main/a/alsa-lib/libasound2-dev_1.0.28-1_mipsel.deb +main/a/atk1.0/libatk1.0-0_2.14.0-1_mipsel.deb +main/a/atk1.0/libatk1.0-dev_2.14.0-1_mipsel.deb +main/a/attr/libattr1_2.4.47-2_mipsel.deb +main/a/avahi/libavahi-client3_0.6.31-5_mipsel.deb +main/a/avahi/libavahi-common3_0.6.31-5_mipsel.deb +main/b/bluez/libbluetooth3_5.23-2+b1_mipsel.deb +main/b/bluez/libbluetooth-dev_5.23-2+b1_mipsel.deb +main/b/brltty/libbrlapi0.6_5.2~20141018-5_mipsel.deb +main/b/brltty/libbrlapi-dev_5.2~20141018-5_mipsel.deb +main/c/cairo/libcairo2_1.14.0-2.1+deb8u1_mipsel.deb +main/c/cairo/libcairo2-dev_1.14.0-2.1+deb8u1_mipsel.deb +main/c/cairo/libcairo-gobject2_1.14.0-2.1+deb8u1_mipsel.deb +main/c/cairo/libcairo-script-interpreter2_1.14.0-2.1+deb8u1_mipsel.deb +main/c/cups/libcups2_1.7.5-11+deb8u1_mipsel.deb +main/c/cups/libcups2-dev_1.7.5-11+deb8u1_mipsel.deb +main/d/dbus-glib/libdbus-glib-1-2_0.102-1_mipsel.deb +main/d/dbus/libdbus-1-3_1.8.20-0+deb8u1_mipsel.deb +main/d/dbus/libdbus-1-dev_1.8.20-0+deb8u1_mipsel.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.12-1.1_mipsel.deb +main/e/e2fsprogs/libcomerr2_1.42.12-1.1_mipsel.deb +main/e/elfutils/libelf1_0.159-4.2_mipsel.deb +main/e/elfutils/libelf-dev_0.159-4.2_mipsel.deb +main/e/expat/libexpat1_2.1.0-6+deb8u2_mipsel.deb +main/e/expat/libexpat1-dev_2.1.0-6+deb8u2_mipsel.deb +main/f/fontconfig/libfontconfig1_2.11.0-6.3_mipsel.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-6.3_mipsel.deb +main/f/freetype/libfreetype6_2.5.2-3+deb8u1_mipsel.deb +main/f/freetype/libfreetype6-dev_2.5.2-3+deb8u1_mipsel.deb +main/g/gcc-4.8/libgcc-4.8-dev_4.8.4-1_mipsel.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-1_mipsel.deb +main/g/gcc-4.9/libatomic1_4.9.2-10_mipsel.deb +main/g/gcc-4.9/libgcc1_4.9.2-10_mipsel.deb +main/g/gcc-4.9/libgomp1_4.9.2-10_mipsel.deb +main/g/gcc-4.9/libstdc++6_4.9.2-10_mipsel.deb +main/g/gconf/libgconf2-4_3.2.6-3_mipsel.deb +main/g/gconf/libgconf-2-4_3.2.6-3_mipsel.deb +main/g/gconf/libgconf2-dev_3.2.6-3_mipsel.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.31.1-2+deb8u4_mipsel.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.31.1-2+deb8u4_mipsel.deb +main/g/glib2.0/libglib2.0-0_2.42.1-1+b1_mipsel.deb +main/g/glib2.0/libglib2.0-dev_2.42.1-1+b1_mipsel.deb +main/g/glibc/libc6_2.19-18+deb8u4_mipsel.deb +main/g/glibc/libc6-dev_2.19-18+deb8u4_mipsel.deb +main/g/gnutls28/libgnutls28-dev_3.3.8-6+deb8u3_mipsel.deb +main/g/gnutls28/libgnutls-deb0-28_3.3.8-6+deb8u3_mipsel.deb +main/g/gnutls28/libgnutls-openssl27_3.3.8-6+deb8u3_mipsel.deb +main/g/gnutls28/libgnutlsxx28_3.3.8-6+deb8u3_mipsel.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.25-3+deb8u1_mipsel.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.25-3+deb8u1_mipsel.deb +main/h/harfbuzz/libharfbuzz0b_0.9.35-2_mipsel.deb +main/h/harfbuzz/libharfbuzz-dev_0.9.35-2_mipsel.deb +main/h/harfbuzz/libharfbuzz-gobject0_0.9.35-2_mipsel.deb +main/h/harfbuzz/libharfbuzz-icu0_0.9.35-2_mipsel.deb +main/k/keyutils/libkeyutils1_1.5.9-5+b1_mipsel.deb +main/k/krb5/krb5-multidev_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libgssapi-krb5-2_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libgssrpc4_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libk5crypto3_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkadm5clnt-mit9_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkadm5srv-mit9_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkdb5-7_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkrb5-3_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkrb5-dev_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/k/krb5/libkrb5support0_1.12.1+dfsg-19+deb8u2_mipsel.deb +main/libc/libcap2/libcap2_2.24-8_mipsel.deb +main/libc/libcap2/libcap-dev_2.24-8_mipsel.deb +main/libd/libdrm/libdrm2_2.4.58-2_mipsel.deb +main/libd/libdrm/libdrm-dev_2.4.58-2_mipsel.deb +main/libd/libdrm/libdrm-nouveau2_2.4.58-2_mipsel.deb +main/libd/libdrm/libdrm-radeon1_2.4.58-2_mipsel.deb +main/libf/libffi/libffi6_3.1-2+b2_mipsel.deb +main/libf/libffi/libffi-dev_3.1-2+b2_mipsel.deb +main/libg/libgcrypt20/libgcrypt20_1.6.3-2+deb8u1_mipsel.deb +main/libg/libgcrypt20/libgcrypt20-dev_1.6.3-2+deb8u1_mipsel.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b1_mipsel.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b1_mipsel.deb +main/libg/libgpg-error/libgpg-error0_1.17-3_mipsel.deb +main/libg/libgpg-error/libgpg-error-dev_1.17-3_mipsel.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5+b3_mipsel.deb +main/libp/libp11/libp11-2_0.2.8-5_mipsel.deb +main/libp/libpng/libpng12-0_1.2.50-2+deb8u2_mipsel.deb +main/libp/libpng/libpng12-dev_1.2.50-2+deb8u2_mipsel.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_mipsel.deb +main/libs/libselinux/libselinux1_2.3-2_mipsel.deb +main/libt/libtasn1-6/libtasn1-6_4.2-3+deb8u2_mipsel.deb +main/libx/libx11/libx11-6_1.6.2-3_mipsel.deb +main/libx/libx11/libx11-dev_1.6.2-3_mipsel.deb +main/libx/libx11/libx11-xcb1_1.6.2-3_mipsel.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-3_mipsel.deb +main/libx/libxau/libxau6_1.0.8-1_mipsel.deb +main/libx/libxau/libxau-dev_1.0.8-1_mipsel.deb +main/libx/libxcb/libxcb1_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb1-dev_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb-glx0_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb-render0_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb-render0-dev_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb-shm0_1.10-3+b1_mipsel.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-3+b1_mipsel.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_mipsel.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_mipsel.deb +main/libx/libxcursor/libxcursor1_1.1.14-1+b1_mipsel.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1+b1_mipsel.deb +main/libx/libxdamage/libxdamage1_1.1.4-2+b1_mipsel.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-2+b1_mipsel.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1+b1_mipsel.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1+b1_mipsel.deb +main/libx/libxext/libxext6_1.3.3-1_mipsel.deb +main/libx/libxext/libxext-dev_1.3.3-1_mipsel.deb +main/libx/libxfixes/libxfixes3_5.0.1-2+b2_mipsel.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-2+b2_mipsel.deb +main/libx/libxi/libxi6_1.7.4-1+b2_mipsel.deb +main/libx/libxi/libxi-dev_1.7.4-1+b2_mipsel.deb +main/libx/libxinerama/libxinerama1_1.1.3-1+b1_mipsel.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1+b1_mipsel.deb +main/libx/libxrandr/libxrandr2_1.4.2-1+b1_mipsel.deb +main/libx/libxrandr/libxrandr-dev_1.4.2-1+b1_mipsel.deb +main/libx/libxrender/libxrender1_0.9.8-1+b1_mipsel.deb +main/libx/libxrender/libxrender-dev_0.9.8-1+b1_mipsel.deb +main/libx/libxss/libxss1_1.2.2-1_mipsel.deb +main/libx/libxss/libxss-dev_1.2.2-1_mipsel.deb +main/libx/libxt/libxt6_1.1.4-1+b1_mipsel.deb +main/libx/libxt/libxt-dev_1.1.4-1+b1_mipsel.deb +main/libx/libxtst/libxtst6_1.2.2-1+b1_mipsel.deb +main/libx/libxtst/libxtst-dev_1.2.2-1+b1_mipsel.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1+b1_mipsel.deb +main/l/linux/linux-libc-dev_3.16.7-ckt25-2_mipsel.deb +main/m/mesa/libgl1-mesa-dev_10.3.2-1+deb8u1_mipsel.deb +main/m/mesa/libgl1-mesa-glx_10.3.2-1+deb8u1_mipsel.deb +main/m/mesa/libglapi-mesa_10.3.2-1+deb8u1_mipsel.deb +main/m/mesa/mesa-common-dev_10.3.2-1+deb8u1_mipsel.deb +main/n/nspr/libnspr4_4.10.7-1+deb8u1_mipsel.deb +main/n/nspr/libnspr4-dev_4.10.7-1+deb8u1_mipsel.deb +main/n/nss/libnss3_3.17.2-1.1+deb8u2_mipsel.deb +main/n/nss/libnss3-dev_3.17.2-1.1+deb8u2_mipsel.deb +main/o/openssl/libssl1.0.0_1.0.1t-1+deb8u2_mipsel.deb +main/o/openssl/libssl-dev_1.0.1t-1+deb8u2_mipsel.deb +main/o/orbit2/liborbit2_2.14.19-0.3_mipsel.deb +main/p/p11-kit/libp11-kit0_0.20.7-1_mipsel.deb +main/p/pam/libpam0g_1.1.8-3.1+deb8u1+b1_mipsel.deb +main/p/pam/libpam0g-dev_1.1.8-3.1+deb8u1+b1_mipsel.deb +main/p/pango1.0/libpango-1.0-0_1.36.8-3_mipsel.deb +main/p/pango1.0/libpango1.0-dev_1.36.8-3_mipsel.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.8-3_mipsel.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.8-3_mipsel.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.8-3_mipsel.deb +main/p/pangox-compat/libpangox-1.0-0_0.0.2-5_mipsel.deb +main/p/pciutils/libpci3_3.2.1-3_mipsel.deb +main/p/pciutils/libpci-dev_3.2.1-3_mipsel.deb +main/p/pcre3/libpcre3_8.35-3.3+deb8u4_mipsel.deb +main/p/pcre3/libpcre3-dev_8.35-3.3+deb8u4_mipsel.deb +main/p/pcre3/libpcrecpp0_8.35-3.3+deb8u4_mipsel.deb +main/p/pixman/libpixman-1-0_0.32.6-3_mipsel.deb +main/p/pixman/libpixman-1-dev_0.32.6-3_mipsel.deb +main/p/pulseaudio/libpulse0_5.0-13_mipsel.deb +main/p/pulseaudio/libpulse-dev_5.0-13_mipsel.deb +main/p/pulseaudio/libpulse-mainloop-glib0_5.0-13_mipsel.deb +main/s/speech-dispatcher/libspeechd2_0.8-7_mipsel.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-7_mipsel.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-7_mipsel.deb +main/s/systemd/libudev1_215-17+deb8u4_mipsel.deb +main/s/systemd/libudev-dev_215-17+deb8u4_mipsel.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3.1-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-2+b1_mipsel.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-2+b1_mipsel.deb diff --git a/build/linux/sysroot_scripts/packagelist.precise.amd64 b/build/linux/sysroot_scripts/packagelist.precise.amd64 new file mode 100644 index 00000000000..59670882f6d --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.precise.amd64 @@ -0,0 +1,168 @@ +main/a/alsa-lib/libasound2_1.0.25-1ubuntu10.2_amd64.deb +main/a/alsa-lib/libasound2-dev_1.0.25-1ubuntu10.2_amd64.deb +main/a/atk1.0/libatk1.0-0_2.4.0-0ubuntu1_amd64.deb +main/a/atk1.0/libatk1.0-dev_2.4.0-0ubuntu1_amd64.deb +main/a/avahi/libavahi-client3_0.6.30-5ubuntu2.2_amd64.deb +main/a/avahi/libavahi-common3_0.6.30-5ubuntu2.2_amd64.deb +main/b/bluez/libbluetooth3_4.98-2ubuntu7.2_amd64.deb +main/b/bluez/libbluetooth-dev_4.98-2ubuntu7.2_amd64.deb +main/b/brltty/libbrlapi0.5_4.3-1ubuntu5_amd64.deb +main/b/brltty/libbrlapi-dev_4.3-1ubuntu5_amd64.deb +main/c/cairo/libcairo2_1.10.2-6.1ubuntu3_amd64.deb +main/c/cairo/libcairo2-dev_1.10.2-6.1ubuntu3_amd64.deb +main/c/cairo/libcairo-gobject2_1.10.2-6.1ubuntu3_amd64.deb +main/c/cairo/libcairo-script-interpreter2_1.10.2-6.1ubuntu3_amd64.deb +main/c/cups/libcups2_1.5.3-0ubuntu8.7_amd64.deb +main/c/cups/libcups2-dev_1.5.3-0ubuntu8.7_amd64.deb +main/d/dbus-glib/libdbus-glib-1-2_0.98-1ubuntu1.1_amd64.deb +main/d/dbus/libdbus-1-3_1.4.18-1ubuntu1.7_amd64.deb +main/d/dbus/libdbus-1-dev_1.4.18-1ubuntu1.7_amd64.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42-1ubuntu2.3_amd64.deb +main/e/e2fsprogs/libcomerr2_1.42-1ubuntu2.3_amd64.deb +main/e/eglibc/libc6_2.15-0ubuntu10.15_amd64.deb +main/e/eglibc/libc6-dev_2.15-0ubuntu10.15_amd64.deb +main/e/elfutils/libelf1_0.152-1ubuntu3.1_amd64.deb +main/e/elfutils/libelf-dev_0.152-1ubuntu3.1_amd64.deb +main/e/expat/libexpat1_2.0.1-7.2ubuntu1.4_amd64.deb +main/e/expat/libexpat1-dev_2.0.1-7.2ubuntu1.4_amd64.deb +main/f/fontconfig/libfontconfig1_2.8.0-3ubuntu9.2_amd64.deb +main/f/fontconfig/libfontconfig1-dev_2.8.0-3ubuntu9.2_amd64.deb +main/f/freetype/libfreetype6_2.4.8-1ubuntu2.3_amd64.deb +main/f/freetype/libfreetype6-dev_2.4.8-1ubuntu2.3_amd64.deb +main/g/gcc-4.6/gcc-4.6_4.6.3-1ubuntu5_amd64.deb +main/g/gcc-4.6/libgcc1_4.6.3-1ubuntu5_amd64.deb +main/g/gcc-4.6/libgomp1_4.6.3-1ubuntu5_amd64.deb +main/g/gcc-4.6/libquadmath0_4.6.3-1ubuntu5_amd64.deb +main/g/gcc-4.6/libstdc++6_4.6.3-1ubuntu5_amd64.deb +main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-1ubuntu5_amd64.deb +main/g/gconf/libgconf-2-4_3.2.5-0ubuntu2_amd64.deb +main/g/gconf/libgconf2-4_3.2.5-0ubuntu2_amd64.deb +main/g/gconf/libgconf2-dev_3.2.5-0ubuntu2_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1ubuntu1.3_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1ubuntu1.3_amd64.deb +main/g/glib2.0/libglib2.0-0_2.32.4-0ubuntu1_amd64.deb +main/g/glib2.0/libglib2.0-dev_2.32.4-0ubuntu1_amd64.deb +main/g/gnutls26/libgnutls26_2.12.14-5ubuntu3.12_amd64.deb +main/g/gnutls26/libgnutls-dev_2.12.14-5ubuntu3.12_amd64.deb +main/g/gnutls26/libgnutls-openssl27_2.12.14-5ubuntu3.12_amd64.deb +main/g/gnutls26/libgnutlsxx27_2.12.14-5ubuntu3.12_amd64.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.10-0ubuntu6.3_amd64.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.10-0ubuntu6.3_amd64.deb +main/k/keyutils/libkeyutils1_1.5.2-2_amd64.deb +main/k/krb5/krb5-multidev_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libgssapi-krb5-2_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libgssrpc4_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libk5crypto3_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkadm5clnt-mit8_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkadm5srv-mit8_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkdb5-6_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkrb5-3_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkrb5-dev_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/k/krb5/libkrb5support0_1.10+dfsg~beta1-2ubuntu0.7_amd64.deb +main/libc/libcap2/libcap2_2.22-1ubuntu3_amd64.deb +main/libc/libcap2/libcap-dev_2.22-1ubuntu3_amd64.deb +main/libd/libdrm/libdrm2_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libdrm-dev_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libdrm-intel1_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libdrm-nouveau1a_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libdrm-nouveau2_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libdrm-radeon1_2.4.52-1~precise2_amd64.deb +main/libd/libdrm/libkms1_2.4.46-1ubuntu0.0.0.1_amd64.deb +main/libf/libffi/libffi6_3.0.11~rc1-5_amd64.deb +main/libf/libffi/libffi-dev_3.0.11~rc1-5_amd64.deb +main/libg/libgcrypt11/libgcrypt11_1.5.0-3ubuntu0.6_amd64.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-3ubuntu0.6_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.2.2-2_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.2.2-2_amd64.deb +main/libg/libgpg-error/libgpg-error0_1.10-2ubuntu1_amd64.deb +main/libg/libgpg-error/libgpg-error-dev_1.10-2ubuntu1_amd64.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-3.2ubuntu3_amd64.deb +main/libp/libp11/libp11-2_0.2.8-2_amd64.deb +main/libp/libpng/libpng12-0_1.2.46-3ubuntu4.2_amd64.deb +main/libp/libpng/libpng12-dev_1.2.46-3ubuntu4.2_amd64.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_amd64.deb +main/libs/libselinux/libselinux1_2.1.0-4.1ubuntu1_amd64.deb +main/libt/libtasn1-3/libtasn1-3_2.10-1ubuntu1.5_amd64.deb +main/libx/libx11/libx11-6_1.4.99.1-0ubuntu2.3_amd64.deb +main/libx/libx11/libx11-dev_1.4.99.1-0ubuntu2.3_amd64.deb +main/libx/libx11/libx11-xcb1_1.4.99.1-0ubuntu2.3_amd64.deb +main/libx/libx11/libx11-xcb-dev_1.4.99.1-0ubuntu2.3_amd64.deb +main/libx/libxau/libxau6_1.0.6-4_amd64.deb +main/libx/libxau/libxau-dev_1.0.6-4_amd64.deb +main/libx/libxcb/libxcb1_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb1-dev_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb-glx0_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb-render0_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb-render0-dev_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb-shm0_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcb/libxcb-shm0-dev_1.8.1-1ubuntu0.2_amd64.deb +main/libx/libxcomposite/libxcomposite1_0.4.3-2build1_amd64.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.3-2build1_amd64.deb +main/libx/libxcursor/libxcursor1_1.1.12-1ubuntu0.1_amd64.deb +main/libx/libxcursor/libxcursor-dev_1.1.12-1ubuntu0.1_amd64.deb +main/libx/libxdamage/libxdamage1_1.1.3-2build1_amd64.deb +main/libx/libxdamage/libxdamage-dev_1.1.3-2build1_amd64.deb +main/libx/libxdmcp/libxdmcp6_1.1.0-4_amd64.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.0-4_amd64.deb +main/libx/libxext/libxext6_1.3.0-3ubuntu0.2_amd64.deb +main/libx/libxext/libxext-dev_1.3.0-3ubuntu0.2_amd64.deb +main/libx/libxfixes/libxfixes3_5.0-4ubuntu4.4_amd64.deb +main/libx/libxfixes/libxfixes-dev_5.0-4ubuntu4.4_amd64.deb +main/libx/libxi/libxi6_1.7.1.901-1ubuntu1~precise3_amd64.deb +main/libx/libxi/libxi-dev_1.7.1.901-1ubuntu1~precise3_amd64.deb +main/libx/libxinerama/libxinerama1_1.1.1-3ubuntu0.1_amd64.deb +main/libx/libxinerama/libxinerama-dev_1.1.1-3ubuntu0.1_amd64.deb +main/libx/libxrandr/libxrandr2_1.3.2-2ubuntu0.3_amd64.deb +main/libx/libxrandr/libxrandr-dev_1.3.2-2ubuntu0.3_amd64.deb +main/libx/libxrender/libxrender1_0.9.6-2ubuntu0.2_amd64.deb +main/libx/libxrender/libxrender-dev_0.9.6-2ubuntu0.2_amd64.deb +main/libx/libxss/libxss1_1.2.1-2_amd64.deb +main/libx/libxss/libxss-dev_1.2.1-2_amd64.deb +main/libx/libxt/libxt6_1.1.1-2ubuntu0.1_amd64.deb +main/libx/libxt/libxt-dev_1.1.1-2ubuntu0.1_amd64.deb +main/libx/libxtst/libxtst6_1.2.0-4ubuntu0.1_amd64.deb +main/libx/libxtst/libxtst-dev_1.2.0-4ubuntu0.1_amd64.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.1-2ubuntu0.1_amd64.deb +main/l/linux/linux-libc-dev_3.2.0-107.148_amd64.deb +main/m/mesa/libgl1-mesa-dev_8.0.4-0ubuntu0.7_amd64.deb +main/m/mesa/libgl1-mesa-glx_8.0.4-0ubuntu0.7_amd64.deb +main/m/mesa/libglapi-mesa_8.0.4-0ubuntu0.7_amd64.deb +main/m/mesa/mesa-common-dev_8.0.4-0ubuntu0.7_amd64.deb +main/n/nspr/libnspr4_4.12-0ubuntu0.12.04.1_amd64.deb +main/n/nspr/libnspr4-dev_4.12-0ubuntu0.12.04.1_amd64.deb +main/n/nss/libnss3_3.23-0ubuntu0.12.04.1_amd64.deb +main/n/nss/libnss3-dev_3.23-0ubuntu0.12.04.1_amd64.deb +main/o/openssl/libssl1.0.0_1.0.1-4ubuntu5.36_amd64.deb +main/o/openssl/libssl-dev_1.0.1-4ubuntu5.36_amd64.deb +main/o/orbit2/liborbit2_2.14.19-0.1ubuntu1_amd64.deb +main/p/p11-kit/libp11-kit0_0.12-2ubuntu1_amd64.deb +main/p/pam/libpam0g_1.1.3-7ubuntu2.3_amd64.deb +main/p/pam/libpam0g-dev_1.1.3-7ubuntu2.3_amd64.deb +main/p/pango1.0/libpango1.0-0_1.30.0-0ubuntu3.1_amd64.deb +main/p/pango1.0/libpango1.0-dev_1.30.0-0ubuntu3.1_amd64.deb +main/p/pciutils/libpci3_3.1.8-2ubuntu6_amd64.deb +main/p/pciutils/libpci-dev_3.1.8-2ubuntu6_amd64.deb +main/p/pcre3/libpcre3_8.12-4ubuntu0.2_amd64.deb +main/p/pcre3/libpcre3-dev_8.12-4ubuntu0.2_amd64.deb +main/p/pcre3/libpcrecpp0_8.12-4ubuntu0.2_amd64.deb +main/p/pixman/libpixman-1-0_0.30.2-1ubuntu0.0.0.0.3_amd64.deb +main/p/pixman/libpixman-1-dev_0.30.2-1ubuntu0.0.0.0.3_amd64.deb +main/p/pulseaudio/libpulse0_1.1-0ubuntu15.4_amd64.deb +main/p/pulseaudio/libpulse-dev_1.1-0ubuntu15.4_amd64.deb +main/p/pulseaudio/libpulse-mainloop-glib0_1.1-0ubuntu15.4_amd64.deb +main/s/speech-dispatcher/libspeechd2_0.7.1-6ubuntu3_amd64.deb +main/s/speech-dispatcher/libspeechd-dev_0.7.1-6ubuntu3_amd64.deb +main/s/speech-dispatcher/speech-dispatcher_0.7.1-6ubuntu3_amd64.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.22-1ubuntu0.2_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2ubuntu1_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3-1~precise2_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.5-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.4.0+git20120101.is.really.1.4.0-0ubuntu1~precise2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.1-2_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.1-2_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1~precise2_all.deb +main/z/zlib/zlib1g_1.2.3.4.dfsg-3ubuntu4_amd64.deb +main/z/zlib/zlib1g-dev_1.2.3.4.dfsg-3ubuntu4_amd64.deb diff --git a/build/linux/sysroot_scripts/packagelist.trusty.arm b/build/linux/sysroot_scripts/packagelist.trusty.arm new file mode 100644 index 00000000000..0fcaa379946 --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.trusty.arm @@ -0,0 +1,159 @@ +main/a/alsa-lib/libasound2_1.0.27.2-3ubuntu7_armhf.deb +main/a/alsa-lib/libasound2-dev_1.0.27.2-3ubuntu7_armhf.deb +main/a/atk1.0/libatk1.0-0_2.10.0-2ubuntu2_armhf.deb +main/a/atk1.0/libatk1.0-dev_2.10.0-2ubuntu2_armhf.deb +main/a/avahi/libavahi-client3_0.6.31-4ubuntu1.1_armhf.deb +main/a/avahi/libavahi-common3_0.6.31-4ubuntu1.1_armhf.deb +main/b/bluez/libbluetooth3_4.101-0ubuntu13.1_armhf.deb +main/b/bluez/libbluetooth-dev_4.101-0ubuntu13.1_armhf.deb +main/c/cairo/libcairo2_1.13.0~20140204-0ubuntu1.1_armhf.deb +main/c/cairo/libcairo2-dev_1.13.0~20140204-0ubuntu1.1_armhf.deb +main/c/cairo/libcairo-gobject2_1.13.0~20140204-0ubuntu1.1_armhf.deb +main/c/cairo/libcairo-script-interpreter2_1.13.0~20140204-0ubuntu1.1_armhf.deb +main/c/cups/libcups2_1.7.2-0ubuntu1.7_armhf.deb +main/c/cups/libcups2-dev_1.7.2-0ubuntu1.7_armhf.deb +main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb +main/d/dbus/libdbus-1-3_1.6.18-0ubuntu4.3_armhf.deb +main/d/dbus/libdbus-1-dev_1.6.18-0ubuntu4.3_armhf.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.9-3ubuntu1.3_armhf.deb +main/e/e2fsprogs/libcomerr2_1.42.9-3ubuntu1.3_armhf.deb +main/e/eglibc/libc6_2.19-0ubuntu6.9_armhf.deb +main/e/eglibc/libc6-dev_2.19-0ubuntu6.9_armhf.deb +main/e/elfutils/libelf1_0.158-0ubuntu5.2_armhf.deb +main/e/elfutils/libelf-dev_0.158-0ubuntu5.2_armhf.deb +main/e/expat/libexpat1_2.1.0-4ubuntu1.3_armhf.deb +main/e/expat/libexpat1-dev_2.1.0-4ubuntu1.3_armhf.deb +main/f/fontconfig/libfontconfig1_2.11.0-0ubuntu4.2_armhf.deb +main/f/fontconfig/libfontconfig1-dev_2.11.0-0ubuntu4.2_armhf.deb +main/f/freetype/libfreetype6_2.5.2-1ubuntu2.5_armhf.deb +main/f/freetype/libfreetype6-dev_2.5.2-1ubuntu2.5_armhf.deb +main/g/gcc-4.8/gcc-4.8_4.8.4-2ubuntu1~14.04.3_armhf.deb +main/g/gcc-4.8/libgomp1_4.8.4-2ubuntu1~14.04.3_armhf.deb +main/g/gcc-4.8/libstdc++-4.8-dev_4.8.4-2ubuntu1~14.04.3_armhf.deb +main/g/gcc-4.8/libstdc++6_4.8.4-2ubuntu1~14.04.3_armhf.deb +main/g/gccgo-4.9/libgcc1_4.9.3-0ubuntu4_armhf.deb +main/g/gconf/libgconf2-4_3.2.6-0ubuntu2_armhf.deb +main/g/gconf/libgconf-2-4_3.2.6-0ubuntu2_armhf.deb +main/g/gconf/libgconf2-dev_3.2.6-0ubuntu2_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.30.7-0ubuntu1.2_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.30.7-0ubuntu1.2_armhf.deb +main/g/glib2.0/libglib2.0-0_2.40.2-0ubuntu1_armhf.deb +main/g/glib2.0/libglib2.0-dev_2.40.2-0ubuntu1_armhf.deb +main/g/gnutls26/libgnutls26_2.12.23-12ubuntu2.5_armhf.deb +main/g/gnutls26/libgnutls-dev_2.12.23-12ubuntu2.5_armhf.deb +main/g/gnutls26/libgnutls-openssl27_2.12.23-12ubuntu2.5_armhf.deb +main/g/gnutls26/libgnutlsxx27_2.12.23-12ubuntu2.5_armhf.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.23-0ubuntu1.4_armhf.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.23-0ubuntu1.4_armhf.deb +main/k/keyutils/libkeyutils1_1.5.6-1_armhf.deb +main/k/krb5/krb5-multidev_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libgssapi-krb5-2_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libgssrpc4_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libk5crypto3_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkadm5clnt-mit9_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkadm5srv-mit9_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkdb5-7_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkrb5-3_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkrb5-dev_1.12+dfsg-2ubuntu5.2_armhf.deb +main/k/krb5/libkrb5support0_1.12+dfsg-2ubuntu5.2_armhf.deb +main/libc/libcap2/libcap2_2.24-0ubuntu2_armhf.deb +main/libc/libcap2/libcap-dev_2.24-0ubuntu2_armhf.deb +main/libd/libdrm/libdrm2_2.4.67-1ubuntu0.14.04.1_armhf.deb +main/libf/libffi/libffi6_3.1~rc1+r3.0.13-12ubuntu0.1_armhf.deb +main/libg/libgcrypt11/libgcrypt11_1.5.3-2ubuntu4.4_armhf.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.3-2ubuntu4.4_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.8.0-2_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.8.0-2_armhf.deb +main/libg/libgpg-error/libgpg-error0_1.12-0.2ubuntu1_armhf.deb +main/libg/libgpg-error/libgpg-error-dev_1.12-0.2ubuntu1_armhf.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-5build3_armhf.deb +main/libp/libp11/libp11-2_0.2.8-3ubuntu1_armhf.deb +main/libp/libpng/libpng12-0_1.2.50-1ubuntu2.14.04.2_armhf.deb +main/libp/libpng/libpng12-dev_1.2.50-1ubuntu2.14.04.2_armhf.deb +main/libs/libselinux/libselinux1_2.2.2-1ubuntu0.1_armhf.deb +main/libt/libtasn1-6/libtasn1-6_3.4-3ubuntu0.4_armhf.deb +main/libx/libx11/libx11-6_1.6.2-1ubuntu2_armhf.deb +main/libx/libx11/libx11-dev_1.6.2-1ubuntu2_armhf.deb +main/libx/libx11/libx11-xcb1_1.6.2-1ubuntu2_armhf.deb +main/libx/libx11/libx11-xcb-dev_1.6.2-1ubuntu2_armhf.deb +main/libx/libxau/libxau6_1.0.8-1_armhf.deb +main/libx/libxau/libxau-dev_1.0.8-1_armhf.deb +main/libx/libxcb/libxcb1_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb1-dev_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb-glx0_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb-render0_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb-render0-dev_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb-shm0_1.10-2ubuntu1_armhf.deb +main/libx/libxcb/libxcb-shm0-dev_1.10-2ubuntu1_armhf.deb +main/libx/libxcomposite/libxcomposite1_0.4.4-1_armhf.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_armhf.deb +main/libx/libxcursor/libxcursor1_1.1.14-1_armhf.deb +main/libx/libxcursor/libxcursor-dev_1.1.14-1_armhf.deb +main/libx/libxdamage/libxdamage1_1.1.4-1ubuntu1_armhf.deb +main/libx/libxdamage/libxdamage-dev_1.1.4-1ubuntu1_armhf.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb +main/libx/libxext/libxext6_1.3.2-1ubuntu0.0.14.04.1_armhf.deb +main/libx/libxext/libxext-dev_1.3.2-1ubuntu0.0.14.04.1_armhf.deb +main/libx/libxfixes/libxfixes3_5.0.1-1ubuntu1.1_armhf.deb +main/libx/libxfixes/libxfixes-dev_5.0.1-1ubuntu1.1_armhf.deb +main/libx/libxi/libxi6_1.7.1.901-1ubuntu1.1_armhf.deb +main/libx/libxi/libxi-dev_1.7.1.901-1ubuntu1.1_armhf.deb +main/libx/libxinerama/libxinerama1_1.1.3-1_armhf.deb +main/libx/libxinerama/libxinerama-dev_1.1.3-1_armhf.deb +main/libx/libxrandr/libxrandr2_1.5.0-1~trusty1_armhf.deb +main/libx/libxrandr/libxrandr-dev_1.5.0-1~trusty1_armhf.deb +main/libx/libxrender/libxrender1_0.9.8-1build0.14.04.1_armhf.deb +main/libx/libxrender/libxrender-dev_0.9.8-1build0.14.04.1_armhf.deb +main/libx/libxss/libxss1_1.2.2-1_armhf.deb +main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb +main/libx/libxt/libxt6_1.1.4-1_armhf.deb +main/libx/libxt/libxt-dev_1.1.4-1_armhf.deb +main/libx/libxtst/libxtst6_1.2.2-1_armhf.deb +main/libx/libxtst/libxtst-dev_1.2.2-1_armhf.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.3-1_armhf.deb +main/l/linux/linux-libc-dev_3.13.0-93.140_armhf.deb +main/m/mesa/libgl1-mesa-dev_10.1.3-0ubuntu0.6_armhf.deb +main/m/mesa/libgl1-mesa-glx_10.1.3-0ubuntu0.6_armhf.deb +main/m/mesa/libglapi-mesa_10.1.3-0ubuntu0.6_armhf.deb +main/m/mesa/mesa-common-dev_10.1.3-0ubuntu0.6_armhf.deb +main/n/nspr/libnspr4_4.12-0ubuntu0.14.04.1_armhf.deb +main/n/nspr/libnspr4-dev_4.12-0ubuntu0.14.04.1_armhf.deb +main/n/nss/libnss3_3.23-0ubuntu0.14.04.1_armhf.deb +main/n/nss/libnss3-dev_3.23-0ubuntu0.14.04.1_armhf.deb +main/o/openssl/libssl1.0.0_1.0.1f-1ubuntu2.19_armhf.deb +main/o/openssl/libssl-dev_1.0.1f-1ubuntu2.19_armhf.deb +main/o/orbit2/liborbit2_2.14.19-0.3_armhf.deb +main/p/p11-kit/libp11-kit0_0.20.2-2ubuntu2_armhf.deb +main/p/pam/libpam0g_1.1.8-1ubuntu2.2_armhf.deb +main/p/pam/libpam0g-dev_1.1.8-1ubuntu2.2_armhf.deb +main/p/pango1.0/libpango-1.0-0_1.36.3-1ubuntu1.1_armhf.deb +main/p/pango1.0/libpango1.0-dev_1.36.3-1ubuntu1.1_armhf.deb +main/p/pango1.0/libpangocairo-1.0-0_1.36.3-1ubuntu1.1_armhf.deb +main/p/pango1.0/libpangoft2-1.0-0_1.36.3-1ubuntu1.1_armhf.deb +main/p/pango1.0/libpangoxft-1.0-0_1.36.3-1ubuntu1.1_armhf.deb +main/p/pciutils/libpci3_3.2.1-1ubuntu5.1_armhf.deb +main/p/pciutils/libpci-dev_3.2.1-1ubuntu5.1_armhf.deb +main/p/pcre3/libpcre3_8.31-2ubuntu2.3_armhf.deb +main/p/pcre3/libpcre3-dev_8.31-2ubuntu2.3_armhf.deb +main/p/pcre3/libpcrecpp0_8.31-2ubuntu2.3_armhf.deb +main/p/pixman/libpixman-1-0_0.30.2-2ubuntu1.1_armhf.deb +main/p/pixman/libpixman-1-dev_0.30.2-2ubuntu1.1_armhf.deb +main/p/pulseaudio/libpulse0_4.0-0ubuntu11.1_armhf.deb +main/p/pulseaudio/libpulse-dev_4.0-0ubuntu11.1_armhf.deb +main/p/pulseaudio/libpulse-mainloop-glib0_4.0-0ubuntu11.1_armhf.deb +main/s/speech-dispatcher/libspeechd2_0.8-5ubuntu1_armhf.deb +main/s/speech-dispatcher/libspeechd-dev_0.8-5ubuntu1_armhf.deb +main/s/speech-dispatcher/speech-dispatcher_0.8-5ubuntu1_armhf.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.26-1~ubuntu2_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2ubuntu2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.3-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.5.0-1~trusty1_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb +main/z/zlib/zlib1g_1.2.8.dfsg-1ubuntu1_armhf.deb +main/z/zlib/zlib1g-dev_1.2.8.dfsg-1ubuntu1_armhf.deb diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.amd64 b/build/linux/sysroot_scripts/packagelist.wheezy.amd64 new file mode 100644 index 00000000000..e46c7fe28b6 --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.wheezy.amd64 @@ -0,0 +1,171 @@ +main/a/alsa-lib/libasound2_1.0.25-4_amd64.deb +main/a/alsa-lib/libasound2-dev_1.0.25-4_amd64.deb +main/a/atk1.0/libatk1.0-0_2.4.0-2_amd64.deb +main/a/atk1.0/libatk1.0-dev_2.4.0-2_amd64.deb +main/a/attr/libattr1_2.4.46-8_amd64.deb +main/a/avahi/libavahi-client3_0.6.31-2_amd64.deb +main/a/avahi/libavahi-common3_0.6.31-2_amd64.deb +main/b/bluez/libbluetooth3_4.99-2_amd64.deb +main/b/bluez/libbluetooth-dev_4.99-2_amd64.deb +main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_amd64.deb +main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_amd64.deb +main/c/cairo/libcairo2_1.12.2-3_amd64.deb +main/c/cairo/libcairo2-dev_1.12.2-3_amd64.deb +main/c/cairo/libcairo-gobject2_1.12.2-3_amd64.deb +main/c/cairo/libcairo-script-interpreter2_1.12.2-3_amd64.deb +main/c/cups/libcups2_1.5.3-5+deb7u6_amd64.deb +main/c/cups/libcups2-dev_1.5.3-5+deb7u6_amd64.deb +main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_amd64.deb +main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_amd64.deb +main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_amd64.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_amd64.deb +main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_amd64.deb +main/e/eglibc/libc6_2.13-38+deb7u10_amd64.deb +main/e/eglibc/libc6-dev_2.13-38+deb7u10_amd64.deb +main/e/elfutils/libelf1_0.152-1+wheezy1_amd64.deb +main/e/elfutils/libelf-dev_0.152-1+wheezy1_amd64.deb +main/e/expat/libexpat1_2.1.0-1+deb7u2_amd64.deb +main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_amd64.deb +main/f/fontconfig/libfontconfig1_2.9.0-7.1_amd64.deb +main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_amd64.deb +main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_amd64.deb +main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_amd64.deb +main/g/gcc-4.6/gcc-4.6_4.6.3-14_amd64.deb +main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_amd64.deb +main/g/gcc-4.7/libgcc1_4.7.2-5_amd64.deb +main/g/gcc-4.7/libgomp1_4.7.2-5_amd64.deb +main/g/gcc-4.7/libquadmath0_4.7.2-5_amd64.deb +main/g/gcc-4.7/libstdc++6_4.7.2-5_amd64.deb +main/g/gconf/libgconf-2-4_3.2.5-1+build1_amd64.deb +main/g/gconf/libgconf2-4_3.2.5-1+build1_amd64.deb +main/g/gconf/libgconf2-dev_3.2.5-1+build1_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_amd64.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_amd64.deb +main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_amd64.deb +main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_amd64.deb +main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_amd64.deb +main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_amd64.deb +main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_amd64.deb +main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_amd64.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_amd64.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_amd64.deb +main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_amd64.deb +main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_amd64.deb +main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_amd64.deb +main/libc/libcap2/libcap2_2.22-1.2_amd64.deb +main/libc/libcap2/libcap-dev_2.22-1.2_amd64.deb +main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_amd64.deb +main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_amd64.deb +main/libd/libdrm/libdrm-intel1_2.4.40-1~deb7u2_amd64.deb +main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_amd64.deb +main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_amd64.deb +main/libd/libdrm/libkms1_2.4.40-1~deb7u2_amd64.deb +main/libf/libffi/libffi5_3.0.10-3_amd64.deb +main/libf/libffi/libffi-dev_3.0.10-3_amd64.deb +main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_amd64.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_amd64.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_amd64.deb +main/libg/libgpg-error/libgpg-error0_1.10-3.1_amd64.deb +main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_amd64.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-4_amd64.deb +main/libp/libp11/libp11-2_0.2.8-2_amd64.deb +main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_amd64.deb +main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_amd64.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_amd64.deb +main/libs/libselinux/libselinux1_2.1.9-5_amd64.deb +main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_amd64.deb +main/libx/libx11/libx11-6_1.5.0-1+deb7u2_amd64.deb +main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_amd64.deb +main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_amd64.deb +main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_amd64.deb +main/libx/libxau/libxau6_1.0.7-1_amd64.deb +main/libx/libxau/libxau-dev_1.0.7-1_amd64.deb +main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_amd64.deb +main/libx/libxcomposite/libxcomposite1_0.4.3-2_amd64.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_amd64.deb +main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_amd64.deb +main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_amd64.deb +main/libx/libxdamage/libxdamage1_1.1.3-2_amd64.deb +main/libx/libxdamage/libxdamage-dev_1.1.3-2_amd64.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1_amd64.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_amd64.deb +main/libx/libxext/libxext6_1.3.1-2+deb7u1_amd64.deb +main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_amd64.deb +main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_amd64.deb +main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_amd64.deb +main/libx/libxi/libxi6_1.6.1-1+deb7u1_amd64.deb +main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_amd64.deb +main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_amd64.deb +main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_amd64.deb +main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_amd64.deb +main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_amd64.deb +main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_amd64.deb +main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_amd64.deb +main/libx/libxss/libxss1_1.2.2-1_amd64.deb +main/libx/libxss/libxss-dev_1.2.2-1_amd64.deb +main/libx/libxt/libxt6_1.1.3-1+deb7u1_amd64.deb +main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_amd64.deb +main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_amd64.deb +main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_amd64.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_amd64.deb +main/l/linux/linux-libc-dev_3.2.78-1_amd64.deb +main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_amd64.deb +main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_amd64.deb +main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_amd64.deb +main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_amd64.deb +main/n/nspr/libnspr4_4.9.2-1+deb7u3_amd64.deb +main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_amd64.deb +main/n/nss/libnss3_3.14.5-1+deb7u5_amd64.deb +main/n/nss/libnss3-dev_3.14.5-1+deb7u5_amd64.deb +main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_amd64.deb +main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_amd64.deb +main/o/orbit2/liborbit2_2.14.19-0.1_amd64.deb +main/p/p11-kit/libp11-kit0_0.12-3_amd64.deb +main/p/pam/libpam0g_1.1.3-7.1_amd64.deb +main/p/pam/libpam0g-dev_1.1.3-7.1_amd64.deb +main/p/pango1.0/libpango1.0-0_1.30.0-1_amd64.deb +main/p/pango1.0/libpango1.0-dev_1.30.0-1_amd64.deb +main/p/pciutils/libpci3_3.1.9-6_amd64.deb +main/p/pciutils/libpci-dev_3.1.9-6_amd64.deb +main/p/pcre3/libpcre3_8.30-5_amd64.deb +main/p/pcre3/libpcre3-dev_8.30-5_amd64.deb +main/p/pcre3/libpcrecpp0_8.30-5_amd64.deb +main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_amd64.deb +main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_amd64.deb +main/p/pulseaudio/libpulse0_2.0-6.1_amd64.deb +main/p/pulseaudio/libpulse-dev_2.0-6.1_amd64.deb +main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_amd64.deb +main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_amd64.deb +main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_amd64.deb +main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_amd64.deb +main/u/udev/libudev0_175-7.2_amd64.deb +main/u/udev/libudev-dev_175-7.2_amd64.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.7.dfsg-13_amd64.deb +main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_amd64.deb diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.arm b/build/linux/sysroot_scripts/packagelist.wheezy.arm new file mode 100644 index 00000000000..ae91267dbfd --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.wheezy.arm @@ -0,0 +1,170 @@ +main/a/alsa-lib/libasound2_1.0.25-4_armhf.deb +main/a/alsa-lib/libasound2-dev_1.0.25-4_armhf.deb +main/a/atk1.0/libatk1.0-0_2.4.0-2_armhf.deb +main/a/atk1.0/libatk1.0-dev_2.4.0-2_armhf.deb +main/a/attr/libattr1_2.4.46-8_armhf.deb +main/a/avahi/libavahi-client3_0.6.31-2_armhf.deb +main/a/avahi/libavahi-common3_0.6.31-2_armhf.deb +main/b/bluez/libbluetooth3_4.99-2_armhf.deb +main/b/bluez/libbluetooth-dev_4.99-2_armhf.deb +main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_armhf.deb +main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_armhf.deb +main/c/cairo/libcairo2_1.12.2-3_armhf.deb +main/c/cairo/libcairo2-dev_1.12.2-3_armhf.deb +main/c/cairo/libcairo-gobject2_1.12.2-3_armhf.deb +main/c/cairo/libcairo-script-interpreter2_1.12.2-3_armhf.deb +main/c/cups/libcups2_1.5.3-5+deb7u6_armhf.deb +main/c/cups/libcups2-dev_1.5.3-5+deb7u6_armhf.deb +main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb +main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_armhf.deb +main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_armhf.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_armhf.deb +main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_armhf.deb +main/e/eglibc/libc6_2.13-38+deb7u10_armhf.deb +main/e/eglibc/libc6-dev_2.13-38+deb7u10_armhf.deb +main/e/elfutils/libelf1_0.152-1+wheezy1_armhf.deb +main/e/elfutils/libelf-dev_0.152-1+wheezy1_armhf.deb +main/e/expat/libexpat1_2.1.0-1+deb7u2_armhf.deb +main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_armhf.deb +main/f/fontconfig/libfontconfig1_2.9.0-7.1_armhf.deb +main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_armhf.deb +main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_armhf.deb +main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_armhf.deb +main/g/gcc-4.6/gcc-4.6_4.6.3-14_armhf.deb +main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_armhf.deb +main/g/gcc-4.7/libgcc1_4.7.2-5_armhf.deb +main/g/gcc-4.7/libgomp1_4.7.2-5_armhf.deb +main/g/gcc-4.7/libstdc++6_4.7.2-5_armhf.deb +main/g/gconf/libgconf2-4_3.2.5-1+build1_armhf.deb +main/g/gconf/libgconf-2-4_3.2.5-1+build1_armhf.deb +main/g/gconf/libgconf2-dev_3.2.5-1+build1_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_armhf.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_armhf.deb +main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_armhf.deb +main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_armhf.deb +main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_armhf.deb +main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_armhf.deb +main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_armhf.deb +main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_armhf.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_armhf.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_armhf.deb +main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_armhf.deb +main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_armhf.deb +main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_armhf.deb +main/libc/libcap2/libcap2_2.22-1.2_armhf.deb +main/libc/libcap2/libcap-dev_2.22-1.2_armhf.deb +main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_armhf.deb +main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_armhf.deb +main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_armhf.deb +main/libd/libdrm/libdrm-omap1_2.4.40-1~deb7u2_armhf.deb +main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_armhf.deb +main/libd/libdrm/libkms1_2.4.40-1~deb7u2_armhf.deb +main/libf/libffi/libffi5_3.0.10-3+b1_armhf.deb +main/libf/libffi/libffi-dev_3.0.10-3+b1_armhf.deb +main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_armhf.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_armhf.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_armhf.deb +main/libg/libgpg-error/libgpg-error0_1.10-3.1_armhf.deb +main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_armhf.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-4_armhf.deb +main/libp/libp11/libp11-2_0.2.8-2_armhf.deb +main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_armhf.deb +main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_armhf.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3+b1_armhf.deb +main/libs/libselinux/libselinux1_2.1.9-5_armhf.deb +main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_armhf.deb +main/libx/libx11/libx11-6_1.5.0-1+deb7u2_armhf.deb +main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_armhf.deb +main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_armhf.deb +main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_armhf.deb +main/libx/libxau/libxau6_1.0.7-1_armhf.deb +main/libx/libxau/libxau-dev_1.0.7-1_armhf.deb +main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_armhf.deb +main/libx/libxcomposite/libxcomposite1_0.4.3-2+b1_armhf.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.3-2+b1_armhf.deb +main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_armhf.deb +main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_armhf.deb +main/libx/libxdamage/libxdamage1_1.1.3-2+b1_armhf.deb +main/libx/libxdamage/libxdamage-dev_1.1.3-2+b1_armhf.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_armhf.deb +main/libx/libxext/libxext6_1.3.1-2+deb7u1_armhf.deb +main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_armhf.deb +main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_armhf.deb +main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_armhf.deb +main/libx/libxi/libxi6_1.6.1-1+deb7u1_armhf.deb +main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_armhf.deb +main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_armhf.deb +main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_armhf.deb +main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_armhf.deb +main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_armhf.deb +main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_armhf.deb +main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_armhf.deb +main/libx/libxss/libxss1_1.2.2-1_armhf.deb +main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb +main/libx/libxt/libxt6_1.1.3-1+deb7u1_armhf.deb +main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_armhf.deb +main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_armhf.deb +main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_armhf.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_armhf.deb +main/l/linux/linux-libc-dev_3.2.78-1_armhf.deb +main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_armhf.deb +main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_armhf.deb +main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_armhf.deb +main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_armhf.deb +main/n/nspr/libnspr4_4.9.2-1+deb7u3_armhf.deb +main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_armhf.deb +main/n/nss/libnss3_3.14.5-1+deb7u5_armhf.deb +main/n/nss/libnss3-dev_3.14.5-1+deb7u5_armhf.deb +main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_armhf.deb +main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_armhf.deb +main/o/orbit2/liborbit2_2.14.19-0.1_armhf.deb +main/p/p11-kit/libp11-kit0_0.12-3_armhf.deb +main/p/pam/libpam0g_1.1.3-7.1_armhf.deb +main/p/pam/libpam0g-dev_1.1.3-7.1_armhf.deb +main/p/pango1.0/libpango1.0-0_1.30.0-1_armhf.deb +main/p/pango1.0/libpango1.0-dev_1.30.0-1_armhf.deb +main/p/pciutils/libpci3_3.1.9-6_armhf.deb +main/p/pciutils/libpci-dev_3.1.9-6_armhf.deb +main/p/pcre3/libpcre3_8.30-5_armhf.deb +main/p/pcre3/libpcre3-dev_8.30-5_armhf.deb +main/p/pcre3/libpcrecpp0_8.30-5_armhf.deb +main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_armhf.deb +main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_armhf.deb +main/p/pulseaudio/libpulse0_2.0-6.1_armhf.deb +main/p/pulseaudio/libpulse-dev_2.0-6.1_armhf.deb +main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_armhf.deb +main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_armhf.deb +main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_armhf.deb +main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_armhf.deb +main/u/udev/libudev0_175-7.2_armhf.deb +main/u/udev/libudev-dev_175-7.2_armhf.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.7.dfsg-13_armhf.deb +main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_armhf.deb diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.i386 b/build/linux/sysroot_scripts/packagelist.wheezy.i386 new file mode 100644 index 00000000000..8e4b3080e76 --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.wheezy.i386 @@ -0,0 +1,171 @@ +main/a/alsa-lib/libasound2_1.0.25-4_i386.deb +main/a/alsa-lib/libasound2-dev_1.0.25-4_i386.deb +main/a/atk1.0/libatk1.0-0_2.4.0-2_i386.deb +main/a/atk1.0/libatk1.0-dev_2.4.0-2_i386.deb +main/a/attr/libattr1_2.4.46-8_i386.deb +main/a/avahi/libavahi-client3_0.6.31-2_i386.deb +main/a/avahi/libavahi-common3_0.6.31-2_i386.deb +main/b/bluez/libbluetooth3_4.99-2_i386.deb +main/b/bluez/libbluetooth-dev_4.99-2_i386.deb +main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_i386.deb +main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_i386.deb +main/c/cairo/libcairo2_1.12.2-3_i386.deb +main/c/cairo/libcairo2-dev_1.12.2-3_i386.deb +main/c/cairo/libcairo-gobject2_1.12.2-3_i386.deb +main/c/cairo/libcairo-script-interpreter2_1.12.2-3_i386.deb +main/c/cups/libcups2_1.5.3-5+deb7u6_i386.deb +main/c/cups/libcups2-dev_1.5.3-5+deb7u6_i386.deb +main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_i386.deb +main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_i386.deb +main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_i386.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_i386.deb +main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_i386.deb +main/e/eglibc/libc6_2.13-38+deb7u10_i386.deb +main/e/eglibc/libc6-dev_2.13-38+deb7u10_i386.deb +main/e/elfutils/libelf1_0.152-1+wheezy1_i386.deb +main/e/elfutils/libelf-dev_0.152-1+wheezy1_i386.deb +main/e/expat/libexpat1_2.1.0-1+deb7u2_i386.deb +main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_i386.deb +main/f/fontconfig/libfontconfig1_2.9.0-7.1_i386.deb +main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_i386.deb +main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_i386.deb +main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_i386.deb +main/g/gcc-4.6/gcc-4.6_4.6.3-14_i386.deb +main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_i386.deb +main/g/gcc-4.7/libgcc1_4.7.2-5_i386.deb +main/g/gcc-4.7/libgomp1_4.7.2-5_i386.deb +main/g/gcc-4.7/libquadmath0_4.7.2-5_i386.deb +main/g/gcc-4.7/libstdc++6_4.7.2-5_i386.deb +main/g/gconf/libgconf-2-4_3.2.5-1+build1_i386.deb +main/g/gconf/libgconf2-4_3.2.5-1+build1_i386.deb +main/g/gconf/libgconf2-dev_3.2.5-1+build1_i386.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_i386.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_i386.deb +main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_i386.deb +main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_i386.deb +main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_i386.deb +main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_i386.deb +main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_i386.deb +main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_i386.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_i386.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_i386.deb +main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_i386.deb +main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_i386.deb +main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_i386.deb +main/libc/libcap2/libcap2_2.22-1.2_i386.deb +main/libc/libcap2/libcap-dev_2.22-1.2_i386.deb +main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_i386.deb +main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_i386.deb +main/libd/libdrm/libdrm-intel1_2.4.40-1~deb7u2_i386.deb +main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_i386.deb +main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_i386.deb +main/libd/libdrm/libkms1_2.4.40-1~deb7u2_i386.deb +main/libf/libffi/libffi5_3.0.10-3_i386.deb +main/libf/libffi/libffi-dev_3.0.10-3_i386.deb +main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_i386.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_i386.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_i386.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_i386.deb +main/libg/libgpg-error/libgpg-error0_1.10-3.1_i386.deb +main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_i386.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-4_i386.deb +main/libp/libp11/libp11-2_0.2.8-2_i386.deb +main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_i386.deb +main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_i386.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_i386.deb +main/libs/libselinux/libselinux1_2.1.9-5_i386.deb +main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_i386.deb +main/libx/libx11/libx11-6_1.5.0-1+deb7u2_i386.deb +main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_i386.deb +main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_i386.deb +main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_i386.deb +main/libx/libxau/libxau6_1.0.7-1_i386.deb +main/libx/libxau/libxau-dev_1.0.7-1_i386.deb +main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_i386.deb +main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_i386.deb +main/libx/libxcomposite/libxcomposite1_0.4.3-2_i386.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_i386.deb +main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_i386.deb +main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_i386.deb +main/libx/libxdamage/libxdamage1_1.1.3-2_i386.deb +main/libx/libxdamage/libxdamage-dev_1.1.3-2_i386.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1_i386.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_i386.deb +main/libx/libxext/libxext6_1.3.1-2+deb7u1_i386.deb +main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_i386.deb +main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_i386.deb +main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_i386.deb +main/libx/libxi/libxi6_1.6.1-1+deb7u1_i386.deb +main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_i386.deb +main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_i386.deb +main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_i386.deb +main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_i386.deb +main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_i386.deb +main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_i386.deb +main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_i386.deb +main/libx/libxss/libxss1_1.2.2-1_i386.deb +main/libx/libxss/libxss-dev_1.2.2-1_i386.deb +main/libx/libxt/libxt6_1.1.3-1+deb7u1_i386.deb +main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_i386.deb +main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_i386.deb +main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_i386.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_i386.deb +main/l/linux/linux-libc-dev_3.2.78-1_i386.deb +main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_i386.deb +main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_i386.deb +main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_i386.deb +main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_i386.deb +main/n/nspr/libnspr4_4.9.2-1+deb7u3_i386.deb +main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_i386.deb +main/n/nss/libnss3_3.14.5-1+deb7u5_i386.deb +main/n/nss/libnss3-dev_3.14.5-1+deb7u5_i386.deb +main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_i386.deb +main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_i386.deb +main/o/orbit2/liborbit2_2.14.19-0.1_i386.deb +main/p/p11-kit/libp11-kit0_0.12-3_i386.deb +main/p/pam/libpam0g_1.1.3-7.1_i386.deb +main/p/pam/libpam0g-dev_1.1.3-7.1_i386.deb +main/p/pango1.0/libpango1.0-0_1.30.0-1_i386.deb +main/p/pango1.0/libpango1.0-dev_1.30.0-1_i386.deb +main/p/pciutils/libpci3_3.1.9-6_i386.deb +main/p/pciutils/libpci-dev_3.1.9-6_i386.deb +main/p/pcre3/libpcre3_8.30-5_i386.deb +main/p/pcre3/libpcre3-dev_8.30-5_i386.deb +main/p/pcre3/libpcrecpp0_8.30-5_i386.deb +main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_i386.deb +main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_i386.deb +main/p/pulseaudio/libpulse0_2.0-6.1_i386.deb +main/p/pulseaudio/libpulse-dev_2.0-6.1_i386.deb +main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_i386.deb +main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_i386.deb +main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_i386.deb +main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_i386.deb +main/u/udev/libudev0_175-7.2_i386.deb +main/u/udev/libudev-dev_175-7.2_i386.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.7.dfsg-13_i386.deb +main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_i386.deb diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.mipsel b/build/linux/sysroot_scripts/packagelist.wheezy.mipsel new file mode 100644 index 00000000000..2282ca5444a --- /dev/null +++ b/build/linux/sysroot_scripts/packagelist.wheezy.mipsel @@ -0,0 +1,169 @@ +main/a/alsa-lib/libasound2_1.0.25-4_mipsel.deb +main/a/alsa-lib/libasound2-dev_1.0.25-4_mipsel.deb +main/a/atk1.0/libatk1.0-0_2.4.0-2_mipsel.deb +main/a/atk1.0/libatk1.0-dev_2.4.0-2_mipsel.deb +main/a/attr/libattr1_2.4.46-8_mipsel.deb +main/a/avahi/libavahi-client3_0.6.31-2_mipsel.deb +main/a/avahi/libavahi-common3_0.6.31-2_mipsel.deb +main/b/bluez/libbluetooth3_4.99-2_mipsel.deb +main/b/bluez/libbluetooth-dev_4.99-2_mipsel.deb +main/b/brltty/libbrlapi0.5_4.4-10+deb7u1_mipsel.deb +main/b/brltty/libbrlapi-dev_4.4-10+deb7u1_mipsel.deb +main/c/cairo/libcairo2_1.12.2-3_mipsel.deb +main/c/cairo/libcairo2-dev_1.12.2-3_mipsel.deb +main/c/cairo/libcairo-gobject2_1.12.2-3_mipsel.deb +main/c/cairo/libcairo-script-interpreter2_1.12.2-3_mipsel.deb +main/c/cups/libcups2_1.5.3-5+deb7u6_mipsel.deb +main/c/cups/libcups2-dev_1.5.3-5+deb7u6_mipsel.deb +main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_mipsel.deb +main/d/dbus/libdbus-1-3_1.6.8-1+deb7u6_mipsel.deb +main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u6_mipsel.deb +main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1+deb7u1_mipsel.deb +main/e/e2fsprogs/libcomerr2_1.42.5-1.1+deb7u1_mipsel.deb +main/e/eglibc/libc6_2.13-38+deb7u10_mipsel.deb +main/e/eglibc/libc6-dev_2.13-38+deb7u10_mipsel.deb +main/e/elfutils/libelf1_0.152-1+wheezy1_mipsel.deb +main/e/elfutils/libelf-dev_0.152-1+wheezy1_mipsel.deb +main/e/expat/libexpat1_2.1.0-1+deb7u2_mipsel.deb +main/e/expat/libexpat1-dev_2.1.0-1+deb7u2_mipsel.deb +main/f/fontconfig/libfontconfig1_2.9.0-7.1_mipsel.deb +main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_mipsel.deb +main/f/freetype/libfreetype6_2.4.9-1.1+deb7u3_mipsel.deb +main/f/freetype/libfreetype6-dev_2.4.9-1.1+deb7u3_mipsel.deb +main/g/gcc-4.6/gcc-4.6_4.6.3-14_mipsel.deb +main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_mipsel.deb +main/g/gcc-4.7/libgcc1_4.7.2-5_mipsel.deb +main/g/gcc-4.7/libgomp1_4.7.2-5_mipsel.deb +main/g/gcc-4.7/libstdc++6_4.7.2-5_mipsel.deb +main/g/gconf/libgconf2-4_3.2.5-1+build1_mipsel.deb +main/g/gconf/libgconf-2-4_3.2.5-1+build1_mipsel.deb +main/g/gconf/libgconf2-dev_3.2.5-1+build1_mipsel.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1+deb7u3_mipsel.deb +main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1+deb7u3_mipsel.deb +main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_mipsel.deb +main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_mipsel.deb +main/g/gnutls26/libgnutls26_2.12.20-8+deb7u5_mipsel.deb +main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u5_mipsel.deb +main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u5_mipsel.deb +main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u5_mipsel.deb +main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_mipsel.deb +main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_mipsel.deb +main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_mipsel.deb +main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u7_mipsel.deb +main/libc/libcap2/libcap2_2.22-1.2_mipsel.deb +main/libc/libcap2/libcap-dev_2.22-1.2_mipsel.deb +main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_mipsel.deb +main/libd/libdrm/libdrm-dev_2.4.40-1~deb7u2_mipsel.deb +main/libd/libdrm/libdrm-nouveau1a_2.4.40-1~deb7u2_mipsel.deb +main/libd/libdrm/libdrm-radeon1_2.4.40-1~deb7u2_mipsel.deb +main/libd/libdrm/libkms1_2.4.40-1~deb7u2_mipsel.deb +main/libf/libffi/libffi5_3.0.10-3_mipsel.deb +main/libf/libffi/libffi-dev_3.0.10-3_mipsel.deb +main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u4_mipsel.deb +main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u4_mipsel.deb +main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_mipsel.deb +main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_mipsel.deb +main/libg/libgpg-error/libgpg-error0_1.10-3.1_mipsel.deb +main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_mipsel.deb +main/libn/libnss-db/libnss-db_2.2.3pre1-4_mipsel.deb +main/libp/libp11/libp11-2_0.2.8-2_mipsel.deb +main/libp/libpng/libpng12-0_1.2.49-1+deb7u2_mipsel.deb +main/libp/libpng/libpng12-dev_1.2.49-1+deb7u2_mipsel.deb +main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-3_mipsel.deb +main/libs/libselinux/libselinux1_2.1.9-5_mipsel.deb +main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u2_mipsel.deb +main/libx/libx11/libx11-6_1.5.0-1+deb7u2_mipsel.deb +main/libx/libx11/libx11-dev_1.5.0-1+deb7u2_mipsel.deb +main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u2_mipsel.deb +main/libx/libx11/libx11-xcb-dev_1.5.0-1+deb7u2_mipsel.deb +main/libx/libxau/libxau6_1.0.7-1_mipsel.deb +main/libx/libxau/libxau-dev_1.0.7-1_mipsel.deb +main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_mipsel.deb +main/libx/libxcomposite/libxcomposite1_0.4.3-2_mipsel.deb +main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_mipsel.deb +main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_mipsel.deb +main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_mipsel.deb +main/libx/libxdamage/libxdamage1_1.1.3-2_mipsel.deb +main/libx/libxdamage/libxdamage-dev_1.1.3-2_mipsel.deb +main/libx/libxdmcp/libxdmcp6_1.1.1-1_mipsel.deb +main/libx/libxdmcp/libxdmcp-dev_1.1.1-1_mipsel.deb +main/libx/libxext/libxext6_1.3.1-2+deb7u1_mipsel.deb +main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_mipsel.deb +main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_mipsel.deb +main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_mipsel.deb +main/libx/libxi/libxi6_1.6.1-1+deb7u1_mipsel.deb +main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_mipsel.deb +main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_mipsel.deb +main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_mipsel.deb +main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_mipsel.deb +main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_mipsel.deb +main/libx/libxrender/libxrender1_0.9.7-1+deb7u2_mipsel.deb +main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u2_mipsel.deb +main/libx/libxss/libxss1_1.2.2-1_mipsel.deb +main/libx/libxss/libxss-dev_1.2.2-1_mipsel.deb +main/libx/libxt/libxt6_1.1.3-1+deb7u1_mipsel.deb +main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_mipsel.deb +main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_mipsel.deb +main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_mipsel.deb +main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_mipsel.deb +main/l/linux/linux-libc-dev_3.2.78-1_mipsel.deb +main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_mipsel.deb +main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_mipsel.deb +main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_mipsel.deb +main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_mipsel.deb +main/n/nspr/libnspr4_4.9.2-1+deb7u3_mipsel.deb +main/n/nspr/libnspr4-dev_4.9.2-1+deb7u3_mipsel.deb +main/n/nss/libnss3_3.14.5-1+deb7u5_mipsel.deb +main/n/nss/libnss3-dev_3.14.5-1+deb7u5_mipsel.deb +main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u20_mipsel.deb +main/o/openssl/libssl-dev_1.0.1e-2+deb7u20_mipsel.deb +main/o/orbit2/liborbit2_2.14.19-0.1_mipsel.deb +main/p/p11-kit/libp11-kit0_0.12-3_mipsel.deb +main/p/pam/libpam0g_1.1.3-7.1_mipsel.deb +main/p/pam/libpam0g-dev_1.1.3-7.1_mipsel.deb +main/p/pango1.0/libpango1.0-0_1.30.0-1_mipsel.deb +main/p/pango1.0/libpango1.0-dev_1.30.0-1_mipsel.deb +main/p/pciutils/libpci3_3.1.9-6_mipsel.deb +main/p/pciutils/libpci-dev_3.1.9-6_mipsel.deb +main/p/pcre3/libpcre3_8.30-5_mipsel.deb +main/p/pcre3/libpcre3-dev_8.30-5_mipsel.deb +main/p/pcre3/libpcrecpp0_8.30-5_mipsel.deb +main/p/pixman/libpixman-1-0_0.26.0-4+deb7u2_mipsel.deb +main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u2_mipsel.deb +main/p/pulseaudio/libpulse0_2.0-6.1_mipsel.deb +main/p/pulseaudio/libpulse-dev_2.0-6.1_mipsel.deb +main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_mipsel.deb +main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_mipsel.deb +main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_mipsel.deb +main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_mipsel.deb +main/u/udev/libudev0_175-7.2_mipsel.deb +main/u/udev/libudev-dev_175-7.2_mipsel.deb +main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb +main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb +main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb +main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb +main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb +main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb +main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb +main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb +main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb +main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb +main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb +main/x/x11proto-xinerama/x11proto-xinerama-dev_1.2.1-2_all.deb +main/z/zlib/zlib1g_1.2.7.dfsg-13_mipsel.deb +main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_mipsel.deb diff --git a/build/linux/sysroot_scripts/sysroot-creator-jessie.sh b/build/linux/sysroot_scripts/sysroot-creator-jessie.sh new file mode 100644 index 00000000000..1e168cb5284 --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator-jessie.sh @@ -0,0 +1,238 @@ +#!/bin/bash +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +SCRIPT_DIR=$(cd $(dirname $0) && pwd) + +DISTRO=debian +DIST=jessie +PACKAGES_EXT=xz +APT_REPO=http://http.us.debian.org/debian +# gpg keyring file generated using: +# export KEYS="518E17E1 46925553 2B90D010" +# gpg --recv-keys $KEYS +# gpg --output ./debian-archive-jessie-stable.gpg --export $KEYS +KEYRING_FILE=${SCRIPT_DIR}/debian-archive-jessie-stable.gpg + +HAS_ARCH_AMD64=1 +HAS_ARCH_I386=1 +HAS_ARCH_ARM=1 +HAS_ARCH_ARM64=1 +HAS_ARCH_MIPS=1 + +# Sysroot packages: these are the packages needed to build chrome. +# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated +# by running this script in GeneratePackageList mode. +DEBIAN_PACKAGES="\ + comerr-dev + krb5-multidev + libasound2 + libasound2-dev + libatk1.0-0 + libatk1.0-dev + libattr1 + libavahi-client3 + libavahi-common3 + libbluetooth3 + libbluetooth-dev + libbrlapi0.6 + libbrlapi-dev + libc6 + libc6-dev + libcairo2 + libcairo2-dev + libcairo-gobject2 + libcairo-script-interpreter2 + libcap-dev + libcap2 + libcomerr2 + libcups2 + libcups2-dev + libdbus-1-3 + libdbus-1-dev + libdbus-glib-1-2 + libdrm2 + libdrm-dev + libdrm-nouveau2 + libdrm-radeon1 + libelf1 + libelf-dev + libexpat1 + libexpat1-dev + libffi6 + libffi-dev + libfontconfig1 + libfontconfig1-dev + libfreetype6 + libfreetype6-dev + libgcc-4.8-dev + libgcc1 + libgconf-2-4 + libgconf2-4 + libgconf2-dev + libgcrypt20 + libgcrypt20-dev + libgdk-pixbuf2.0-0 + libgdk-pixbuf2.0-dev + libgl1-mesa-dev + libgl1-mesa-glx + libglapi-mesa + libglib2.0-0 + libglib2.0-dev + libgnome-keyring0 + libgnome-keyring-dev + libgnutls-openssl27 + libgnutls-deb0-28 + libgnutlsxx28 + libgnutls28-dev + libgomp1 + libgpg-error0 + libgpg-error-dev + libgssapi-krb5-2 + libgssrpc4 + libgtk2.0-0 + libgtk2.0-dev + libharfbuzz0b + libharfbuzz-dev + libharfbuzz-gobject0 + libharfbuzz-icu0 + libatomic1 + libk5crypto3 + libkadm5clnt-mit9 + libkadm5srv-mit9 + libkdb5-7 + libkeyutils1 + libkrb5-3 + libkrb5-dev + libkrb5support0 + libnspr4 + libnspr4-dev + libnss3 + libnss3-dev + libnss-db + liborbit2 + libp11-2 + libp11-kit0 + libpam0g + libpam0g-dev + libpango-1.0-0 + libpango1.0-dev + libpangoft2-1.0-0 + libpangocairo-1.0-0 + libpangox-1.0-0 + libpangoxft-1.0-0 + libpci3 + libpci-dev + libpcre3 + libpcre3-dev + libpcrecpp0 + libpixman-1-0 + libpixman-1-dev + libpng12-0 + libpng12-dev + libpthread-stubs0-dev + libpulse0 + libpulse-dev + libpulse-mainloop-glib0 + libselinux1 + libspeechd2 + libspeechd-dev + libssl1.0.0 + libssl-dev + libstdc++6 + libstdc++-4.8-dev + libtasn1-6 + libudev-dev + libudev1 + libx11-6 + libx11-dev + libx11-xcb1 + libx11-xcb-dev + libxau6 + libxau-dev + libxcb1 + libxcb1-dev + libxcb-glx0 + libxcb-render0 + libxcb-render0-dev + libxcb-shm0 + libxcb-shm0-dev + libxcomposite1 + libxcomposite-dev + libxcursor1 + libxcursor-dev + libxdamage1 + libxdamage-dev + libxdmcp6 + libxdmcp-dev + libxext6 + libxext-dev + libxfixes3 + libxfixes-dev + libxi6 + libxi-dev + libxinerama1 + libxinerama-dev + libxrandr2 + libxrandr-dev + libxrender1 + libxrender-dev + libxss1 + libxss-dev + libxt6 + libxt-dev + libxtst6 + libxtst-dev + libxxf86vm1 + linux-libc-dev + mesa-common-dev + speech-dispatcher + x11proto-composite-dev + x11proto-core-dev + x11proto-damage-dev + x11proto-fixes-dev + x11proto-input-dev + x11proto-kb-dev + x11proto-randr-dev + x11proto-record-dev + x11proto-render-dev + x11proto-scrnsaver-dev + x11proto-xext-dev + x11proto-xinerama-dev + zlib1g + zlib1g-dev +" + +DEBIAN_PACKAGES_AMD64=" + liblsan0 + libtsan0 +" + +DEBIAN_PACKAGES_X86=" + libasan0 + libcilkrts5 + libdrm-intel1 + libitm1 + libquadmath0 + libubsan0 +" + +DEBIAN_PACKAGES_ARM=" + libasan0 + libdrm-exynos1 + libdrm-freedreno1 + libdrm-omap1 + libubsan0 +" +DEBIAN_PACKAGES_ARM64=" + libdatrie1 + libgmp10 + libgraphite2-3 + libhogweed2 + libitm1 + libnettle4 + libthai0 +" + +. ${SCRIPT_DIR}/sysroot-creator.sh diff --git a/build/linux/sysroot_scripts/sysroot-creator-precise.sh b/build/linux/sysroot_scripts/sysroot-creator-precise.sh new file mode 100644 index 00000000000..7c92047cfde --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator-precise.sh @@ -0,0 +1,198 @@ +#!/bin/bash +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +SCRIPT_DIR=$(dirname $0) + +DISTRO=ubuntu +DIST=precise +DIST_UPDATES=precise-updates + +# This is where we get all the debian packages from. +APT_REPO=http://archive.ubuntu.com/ubuntu +APT_REPO_ARM=http://ports.ubuntu.com +APT_REPO_ARM64=http://ports.ubuntu.com +KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg + +HAS_ARCH_AMD64=1 +HAS_ARCH_I386=1 +HAS_ARCH_ARM=1 + +# Sysroot packages: these are the packages needed to build chrome. +# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated +# by running this script in GeneratePackageList mode. +DEBIAN_PACKAGES="\ + comerr-dev + gcc-4.6 + krb5-multidev + libasound2 + libasound2-dev + libatk1.0-0 + libatk1.0-dev + libavahi-client3 + libavahi-common3 + libbluetooth3 + libbluetooth-dev + libbrlapi0.5 + libbrlapi-dev + libc6 + libc6-dev + libcairo2 + libcairo2-dev + libcairo-gobject2 + libcairo-script-interpreter2 + libcap-dev + libcap2 + libcomerr2 + libcups2 + libcups2-dev + libdbus-1-3 + libdbus-1-dev + libdbus-glib-1-2 + libdrm-dev + libdrm-intel1 + libdrm-nouveau1a + libdrm-nouveau2 + libdrm-radeon1 + libdrm2 + libelf1 + libelf-dev + libexpat1 + libexpat1-dev + libffi6 + libffi-dev + libfontconfig1 + libfontconfig1-dev + libfreetype6 + libfreetype6-dev + libgcc1 + libgconf-2-4 + libgconf2-4 + libgconf2-dev + libgcrypt11 + libgcrypt11-dev + libgdk-pixbuf2.0-0 + libgdk-pixbuf2.0-dev + libgl1-mesa-dev + libgl1-mesa-glx + libglapi-mesa + libglib2.0-0 + libglib2.0-dev + libgnome-keyring0 + libgnome-keyring-dev + libgnutls26 + libgnutls-dev + libgnutls-openssl27 + libgnutlsxx27 + libgomp1 + libgpg-error0 + libgpg-error-dev + libgssapi-krb5-2 + libgssrpc4 + libgtk2.0-0 + libgtk2.0-dev + libk5crypto3 + libkadm5clnt-mit8 + libkadm5srv-mit8 + libkdb5-6 + libkeyutils1 + libkms1 + libkrb5-3 + libkrb5-dev + libkrb5support0 + libnspr4 + libnspr4-dev + libnss3 + libnss3-dev + libnss-db + liborbit2 + libp11-2 + libp11-kit0 + libpam0g + libpam0g-dev + libpango1.0-0 + libpango1.0-dev + libpci3 + libpci-dev + libpcre3 + libpcre3-dev + libpcrecpp0 + libpixman-1-0 + libpixman-1-dev + libpng12-0 + libpng12-dev + libpthread-stubs0-dev + libpulse0 + libpulse-dev + libpulse-mainloop-glib0 + libselinux1 + libspeechd2 + libspeechd-dev + libssl1.0.0 + libssl-dev + libstdc++6 + libstdc++6-4.6-dev + libtasn1-3 + libx11-6 + libx11-dev + libx11-xcb1 + libx11-xcb-dev + libxau6 + libxau-dev + libxcb1 + libxcb1-dev + libxcb-glx0 + libxcb-render0 + libxcb-render0-dev + libxcb-shm0 + libxcb-shm0-dev + libxcomposite1 + libxcomposite-dev + libxcursor1 + libxcursor-dev + libxdamage1 + libxdamage-dev + libxdmcp6 + libxdmcp-dev + libxext6 + libxext-dev + libxfixes3 + libxfixes-dev + libxi6 + libxi-dev + libxinerama1 + libxinerama-dev + libxrandr2 + libxrandr-dev + libxrender1 + libxrender-dev + libxss1 + libxss-dev + libxt6 + libxt-dev + libxtst6 + libxtst-dev + libxxf86vm1 + linux-libc-dev + mesa-common-dev + speech-dispatcher + x11proto-composite-dev + x11proto-core-dev + x11proto-damage-dev + x11proto-fixes-dev + x11proto-input-dev + x11proto-kb-dev + x11proto-randr-dev + x11proto-record-dev + x11proto-render-dev + x11proto-scrnsaver-dev + x11proto-xext-dev + zlib1g + zlib1g-dev +" + +DEBIAN_PACKAGES_X86="libquadmath0" +DEBIAN_PACKAGES_ARM="libdrm-omap1" + +. ${SCRIPT_DIR}/sysroot-creator.sh diff --git a/build/linux/sysroot_scripts/sysroot-creator-test.sh b/build/linux/sysroot_scripts/sysroot-creator-test.sh new file mode 100644 index 00000000000..9f5fe06cb7c --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator-test.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Rudimentry test suite for sysroot-creator. + +SCRIPT_DIR=$(dirname $0) + +set -o errexit + +TestUpdateAllLists() { + echo "[ RUN ] TestUpdateAllLists" + "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsAmd64 + "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsI386 + "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsARM + "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsAmd64 + "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsI386 + "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsARM + echo "[ OK ]" +} + +TestUpdateAllLists diff --git a/build/linux/sysroot_scripts/sysroot-creator-trusty.sh b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh new file mode 100644 index 00000000000..3975872f135 --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh @@ -0,0 +1,190 @@ +#!/bin/bash +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +SCRIPT_DIR=$(dirname $0) + +DISTRO=ubuntu +DIST=trusty +DIST_UPDATES=trusty-updates + +# This is where we get all the debian packages from. +APT_REPO=http://archive.ubuntu.com/ubuntu +APT_REPO_ARM=http://ports.ubuntu.com +APT_REPO_ARM64=http://ports.ubuntu.com +KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg + +HAS_ARCH_AMD64=1 +HAS_ARCH_I386=1 +HAS_ARCH_ARM=1 +HAS_ARCH_ARM64=1 + +# Sysroot packages: these are the packages needed to build chrome. +# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated +# by running this script in GeneratePackageList mode. +DEBIAN_PACKAGES="\ + comerr-dev + gcc-4.8 + krb5-multidev + libasound2 + libasound2-dev + libatk1.0-0 + libatk1.0-dev + libavahi-client3 + libavahi-common3 + libbluetooth3 + libbluetooth-dev + libc6 + libc6-dev + libcairo2 + libcairo2-dev + libcairo-gobject2 + libcairo-script-interpreter2 + libcap-dev + libcap2 + libcomerr2 + libcups2 + libcups2-dev + libdbus-1-3 + libdbus-1-dev + libdbus-glib-1-2 + libdrm2 + libelf1 + libelf-dev + libexpat1 + libexpat1-dev + libffi6 + libfontconfig1 + libfontconfig1-dev + libfreetype6 + libfreetype6-dev + libgcc1 + libgconf-2-4 + libgconf2-4 + libgconf2-dev + libgcrypt11 + libgcrypt11-dev + libgdk-pixbuf2.0-0 + libgdk-pixbuf2.0-dev + libgl1-mesa-dev + libgl1-mesa-glx + libglapi-mesa + libglib2.0-0 + libglib2.0-dev + libgnome-keyring0 + libgnome-keyring-dev + libgnutls26 + libgnutls-dev + libgnutls-openssl27 + libgnutlsxx27 + libgomp1 + libgpg-error0 + libgpg-error-dev + libgssapi-krb5-2 + libgssrpc4 + libgtk2.0-0 + libgtk2.0-dev + libk5crypto3 + libkadm5clnt-mit9 + libkadm5srv-mit9 + libkdb5-7 + libkeyutils1 + libkrb5-3 + libkrb5-dev + libkrb5support0 + libnspr4 + libnspr4-dev + libnss3 + libnss3-dev + libnss-db + liborbit2 + libp11-2 + libp11-kit0 + libpam0g + libpam0g-dev + libpango-1.0-0 + libpango1.0-dev + libpangocairo-1.0-0 + libpangoft2-1.0-0 + libpangoxft-1.0-0 + libpci3 + libpci-dev + libpcre3 + libpcre3-dev + libpcrecpp0 + libpixman-1-0 + libpixman-1-dev + libpng12-0 + libpng12-dev + libpulse0 + libpulse-dev + libpulse-mainloop-glib0 + libselinux1 + libspeechd2 + libspeechd-dev + libssl1.0.0 + libssl-dev + libstdc++6 + libstdc++-4.8-dev + libtasn1-6 + libx11-6 + libx11-dev + libx11-xcb1 + libx11-xcb-dev + libxau6 + libxau-dev + libxcb1 + libxcb1-dev + libxcb-glx0 + libxcb-render0 + libxcb-render0-dev + libxcb-shm0 + libxcb-shm0-dev + libxcomposite1 + libxcomposite-dev + libxcursor1 + libxcursor-dev + libxdamage1 + libxdamage-dev + libxdmcp6 + libxext6 + libxext-dev + libxfixes3 + libxfixes-dev + libxi6 + libxi-dev + libxinerama1 + libxinerama-dev + libxrandr2 + libxrandr-dev + libxrender1 + libxrender-dev + libxss1 + libxss-dev + libxt6 + libxt-dev + libxtst6 + libxtst-dev + libxxf86vm1 + linux-libc-dev + mesa-common-dev + speech-dispatcher + x11proto-composite-dev + x11proto-core-dev + x11proto-damage-dev + x11proto-fixes-dev + x11proto-input-dev + x11proto-kb-dev + x11proto-randr-dev + x11proto-record-dev + x11proto-render-dev + x11proto-scrnsaver-dev + x11proto-xext-dev + zlib1g + zlib1g-dev +" + +DEBIAN_PACKAGES_X86="libquadmath0" + +. ${SCRIPT_DIR}/sysroot-creator.sh diff --git a/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh new file mode 100644 index 00000000000..0ffdafe5c6c --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh @@ -0,0 +1,197 @@ +#!/bin/bash +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +SCRIPT_DIR=$(cd $(dirname $0) && pwd) + +DISTRO=debian +DIST=wheezy +APT_REPO=http://http.us.debian.org/debian +KEYRING_FILE=${SCRIPT_DIR}/debian-archive-wheezy-stable.gpg + +HAS_ARCH_AMD64=1 +HAS_ARCH_I386=1 +HAS_ARCH_ARM=1 +HAS_ARCH_MIPS=1 + +# Sysroot packages: these are the packages needed to build chrome. +# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated +# by running this script in GeneratePackageList mode. +DEBIAN_PACKAGES="\ + comerr-dev + gcc-4.6 + krb5-multidev + libasound2 + libasound2-dev + libatk1.0-0 + libatk1.0-dev + libattr1 + libavahi-client3 + libavahi-common3 + libbluetooth3 + libbluetooth-dev + libbrlapi0.5 + libbrlapi-dev + libc6 + libc6-dev + libcairo2 + libcairo2-dev + libcairo-gobject2 + libcairo-script-interpreter2 + libcap-dev + libcap2 + libcomerr2 + libcups2 + libcups2-dev + libdbus-1-3 + libdbus-1-dev + libdbus-glib-1-2 + libdrm2 + libdrm-dev + libdrm-nouveau1a + libdrm-radeon1 + libelf1 + libelf-dev + libexpat1 + libexpat1-dev + libffi5 + libffi-dev + libfontconfig1 + libfontconfig1-dev + libfreetype6 + libfreetype6-dev + libgcc1 + libgconf-2-4 + libgconf2-4 + libgconf2-dev + libgcrypt11 + libgcrypt11-dev + libgdk-pixbuf2.0-0 + libgdk-pixbuf2.0-dev + libgl1-mesa-dev + libgl1-mesa-glx + libglapi-mesa + libglib2.0-0 + libglib2.0-dev + libgnome-keyring0 + libgnome-keyring-dev + libgnutls26 + libgnutls-dev + libgnutls-openssl27 + libgnutlsxx27 + libgomp1 + libgpg-error0 + libgpg-error-dev + libgssapi-krb5-2 + libgssrpc4 + libgtk2.0-0 + libgtk2.0-dev + libk5crypto3 + libkadm5clnt-mit8 + libkadm5srv-mit8 + libkdb5-6 + libkeyutils1 + libkms1 + libkrb5-3 + libkrb5-dev + libkrb5support0 + libnspr4 + libnspr4-dev + libnss3 + libnss3-dev + libnss-db + liborbit2 + libp11-2 + libp11-kit0 + libpam0g + libpam0g-dev + libpango1.0-0 + libpango1.0-dev + libpci3 + libpci-dev + libpcre3 + libpcre3-dev + libpcrecpp0 + libpixman-1-0 + libpixman-1-dev + libpng12-0 + libpng12-dev + libpthread-stubs0-dev + libpulse0 + libpulse-dev + libpulse-mainloop-glib0 + libselinux1 + libspeechd2 + libspeechd-dev + libssl1.0.0 + libssl-dev + libstdc++6 + libstdc++6-4.6-dev + libtasn1-3 + libudev-dev + libudev0 + libx11-6 + libx11-dev + libx11-xcb1 + libx11-xcb-dev + libxau6 + libxau-dev + libxcb1 + libxcb1-dev + libxcb-glx0 + libxcb-render0 + libxcb-render0-dev + libxcb-shm0 + libxcb-shm0-dev + libxcomposite1 + libxcomposite-dev + libxcursor1 + libxcursor-dev + libxdamage1 + libxdamage-dev + libxdmcp6 + libxdmcp-dev + libxext6 + libxext-dev + libxfixes3 + libxfixes-dev + libxi6 + libxi-dev + libxinerama1 + libxinerama-dev + libxrandr2 + libxrandr-dev + libxrender1 + libxrender-dev + libxss1 + libxss-dev + libxt6 + libxt-dev + libxtst6 + libxtst-dev + libxxf86vm1 + linux-libc-dev + mesa-common-dev + speech-dispatcher + x11proto-composite-dev + x11proto-core-dev + x11proto-damage-dev + x11proto-fixes-dev + x11proto-input-dev + x11proto-kb-dev + x11proto-randr-dev + x11proto-record-dev + x11proto-render-dev + x11proto-scrnsaver-dev + x11proto-xext-dev + x11proto-xinerama-dev + zlib1g + zlib1g-dev +" + +DEBIAN_PACKAGES_X86="libquadmath0 libdrm-intel1" +DEBIAN_PACKAGES_ARM="libdrm-omap1" +DEBIAN_PACKAGES_AMD64="" + +. ${SCRIPT_DIR}/sysroot-creator.sh diff --git a/build/linux/sysroot_scripts/sysroot-creator.sh b/build/linux/sysroot_scripts/sysroot-creator.sh new file mode 100644 index 00000000000..64d9ab44672 --- /dev/null +++ b/build/linux/sysroot_scripts/sysroot-creator.sh @@ -0,0 +1,833 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# This script should not be run directly but sourced by the other +# scripts (e.g. sysroot-creator-trusty.sh). Its up to the parent scripts +# to define certain environment variables: e.g. +# DISTRO=ubuntu +# DIST=trusty +# APT_REPO=http://archive.ubuntu.com/ubuntu +# KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg +# DEBIAN_PACKAGES="gcc libz libssl" + +#@ This script builds Debian/Ubuntu sysroot images for building Google Chrome. +#@ +#@ Generally this script is invoked as: +#@ sysroot-creator-.sh * +#@ Available modes are shown below. +#@ +#@ List of modes: + +###################################################################### +# Config +###################################################################### + +set -o nounset +set -o errexit + +SCRIPT_DIR=$(cd $(dirname $0) && pwd) + +if [ -z "${DIST:-}" ]; then + echo "error: DIST not defined" + exit 1 +fi + +if [ -z "${APT_REPO:-}" ]; then + echo "error: APT_REPO not defined" + exit 1 +fi + +if [ -z "${KEYRING_FILE:-}" ]; then + echo "error: KEYRING_FILE not defined" + exit 1 +fi + +if [ -z "${DEBIAN_PACKAGES:-}" ]; then + echo "error: DEBIAN_PACKAGES not defined" + exit 1 +fi + +readonly HAS_ARCH_AMD64=${HAS_ARCH_AMD64:=0} +readonly HAS_ARCH_I386=${HAS_ARCH_I386:=0} +readonly HAS_ARCH_ARM=${HAS_ARCH_ARM:=0} +readonly HAS_ARCH_ARM64=${HAS_ARCH_ARM64:=0} +readonly HAS_ARCH_MIPS=${HAS_ARCH_MIPS:=0} + +readonly REQUIRED_TOOLS="wget" + +###################################################################### +# Package Config +###################################################################### + +PACKAGES_EXT=${PACKAGES_EXT:-bz2} +readonly RELEASE_FILE="Release" +readonly RELEASE_FILE_GPG="Release.gpg" + +readonly DEBIAN_DEP_LIST_AMD64="packagelist.${DIST}.amd64" +readonly DEBIAN_DEP_LIST_I386="packagelist.${DIST}.i386" +readonly DEBIAN_DEP_LIST_ARM="packagelist.${DIST}.arm" +readonly DEBIAN_DEP_LIST_ARM64="packagelist.${DIST}.arm64" +readonly DEBIAN_DEP_LIST_MIPS="packagelist.${DIST}.mipsel" + +###################################################################### +# Helper +###################################################################### + +Banner() { + echo "######################################################################" + echo $* + echo "######################################################################" +} + + +SubBanner() { + echo "----------------------------------------------------------------------" + echo $* + echo "----------------------------------------------------------------------" +} + + +Usage() { + egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3- +} + + +DownloadOrCopy() { + if [ -f "$2" ] ; then + echo "$2 already in place" + return + fi + + HTTP=0 + echo "$1" | grep -qs ^http:// && HTTP=1 + if [ "$HTTP" = "1" ]; then + SubBanner "downloading from $1 -> $2" + wget "$1" -O "${2}.partial" + mv "${2}.partial" $2 + else + SubBanner "copying from $1" + cp "$1" "$2" + fi +} + + +SetEnvironmentVariables() { + ARCH="" + echo $1 | grep -qs Amd64$ && ARCH=AMD64 + if [ -z "$ARCH" ]; then + echo $1 | grep -qs I386$ && ARCH=I386 + fi + if [ -z "$ARCH" ]; then + echo $1 | grep -qs Mips$ && ARCH=MIPS + fi + if [ -z "$ARCH" ]; then + echo $1 | grep -qs ARM$ && ARCH=ARM + fi + if [ -z "$ARCH" ]; then + echo $1 | grep -qs ARM64$ && ARCH=ARM64 + fi + if [ -z "${ARCH}" ]; then + echo "ERROR: Unable to determine architecture based on: $1" + exit 1 + fi + ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]') +} + + +# some sanity checks to make sure this script is run from the right place +# with the right tools +SanityCheck() { + Banner "Sanity Checks" + + local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd) + BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}" + mkdir -p ${BUILD_DIR} + echo "Using build directory: ${BUILD_DIR}" + + for tool in ${REQUIRED_TOOLS} ; do + if ! which ${tool} > /dev/null ; then + echo "Required binary $tool not found." + echo "Exiting." + exit 1 + fi + done + + # This is where the staging sysroot is. + INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging" + TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tgz" + + if ! mkdir -p "${INSTALL_ROOT}" ; then + echo "ERROR: ${INSTALL_ROOT} can't be created." + exit 1 + fi +} + + +ChangeDirectory() { + # Change directory to where this script is. + cd ${SCRIPT_DIR} +} + + +ClearInstallDir() { + Banner "Clearing dirs in ${INSTALL_ROOT}" + rm -rf ${INSTALL_ROOT}/* +} + + +CreateTarBall() { + Banner "Creating tarball ${TARBALL}" + tar zcf ${TARBALL} -C ${INSTALL_ROOT} . +} + +ExtractPackageBz2() { + if [ "${PACKAGES_EXT}" = "bz2" ]; then + bzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2" + else + xzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2" + fi +} + +GeneratePackageListDist() { + local arch="$1" + local apt_repo="$2" + local dist="$3" + + TMP_PACKAGE_LIST="${BUILD_DIR}/Packages.${dist}_${arch}" + local repo_basedir="${apt_repo}/dists/${dist}" + local package_list="${BUILD_DIR}/Packages.${dist}_${arch}.${PACKAGES_EXT}" + local package_file_arch="main/binary-${arch}/Packages.${PACKAGES_EXT}" + local package_list_arch="${repo_basedir}/${package_file_arch}" + + DownloadOrCopy "${package_list_arch}" "${package_list}" + VerifyPackageListing "${package_file_arch}" "${package_list}" ${dist} + ExtractPackageBz2 "${package_list}" "${TMP_PACKAGE_LIST}" +} + +GeneratePackageListCommon() { + GeneratePackageListDist "$2" "$3" ${DIST} + local output_file="$1" + local packages="$4" + local list_base="${TMP_PACKAGE_LIST}" + if [ ! -z ${DIST_UPDATES:-} ]; then + GeneratePackageListDist "$2" "$3" ${DIST_UPDATES} + cat "${TMP_PACKAGE_LIST}" | ./merge-package-lists.py "${list_base}" + fi + GeneratePackageList "${list_base}" "${output_file}" "${packages}" +} + +GeneratePackageListAmd64() { + GeneratePackageListCommon "$1" amd64 ${APT_REPO} "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_X86:=} ${DEBIAN_PACKAGES_AMD64:=}" +} + +GeneratePackageListI386() { + GeneratePackageListCommon "$1" i386 ${APT_REPO} "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_X86:=}" +} + +GeneratePackageListARM() { + GeneratePackageListCommon "$1" armhf ${APT_REPO_ARM:-${APT_REPO}} \ + "${DEBIAN_PACKAGES} ${DEBIAN_PACKAGES_ARM:=}" +} + +GeneratePackageListARM64() { + GeneratePackageListCommon "$1" arm64 ${APT_REPO_ARM64:-${APT_REPO}} \ + "${DEBIAN_PACKAGES} ${DEBIAN_PACKAGES_ARM64:=}" +} + +GeneratePackageListMips() { + GeneratePackageListCommon "$1" mipsel ${APT_REPO_MIPS:-${APT_REPO}} \ + "${DEBIAN_PACKAGES}" +} + +StripChecksumsFromPackageList() { + local package_file="$1" + sed -i 's/ [a-f0-9]\{64\}$//' "$package_file" +} + +VerifyPackageFilesMatch() { + local downloaded_package_file="$1" + local stored_package_file="$2" + diff -u "$downloaded_package_file" "$stored_package_file" + if [ "$?" -ne "0" ]; then + echo "ERROR: downloaded package files does not match $2." + echo "You may need to run UpdatePackageLists." + exit 1 + fi +} + +###################################################################### +# +###################################################################### + +HacksAndPatchesAmd64() { + Banner "Misc Hacks & Patches" + # these are linker scripts with absolute pathnames in them + # which we rewrite here + lscripts="${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libpthread.so \ + ${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libc.so" + + # Rewrite linker scripts + sed -i -e 's|/usr/lib/x86_64-linux-gnu/||g' ${lscripts} + sed -i -e 's|/lib/x86_64-linux-gnu/||g' ${lscripts} + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig scripts" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig + + SubBanner "Adding an additional ld.conf include" + LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf" + echo /usr/lib/gcc/x86_64-linux-gnu/4.6 > "$LD_SO_HACK_CONF" + echo /usr/lib >> "$LD_SO_HACK_CONF" +} + + +HacksAndPatchesI386() { + Banner "Misc Hacks & Patches" + # these are linker scripts with absolute pathnames in them + # which we rewrite here + lscripts="${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libpthread.so \ + ${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libc.so" + + # Rewrite linker scripts + sed -i -e 's|/usr/lib/i386-linux-gnu/||g' ${lscripts} + sed -i -e 's|/lib/i386-linux-gnu/||g' ${lscripts} + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig scripts" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/i386-linux-gnu/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig + + SubBanner "Adding an additional ld.conf include" + LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf" + echo /usr/lib/gcc/i486-linux-gnu/4.6 > "$LD_SO_HACK_CONF" + echo /usr/lib >> "$LD_SO_HACK_CONF" +} + + +HacksAndPatchesARM() { + Banner "Misc Hacks & Patches" + # these are linker scripts with absolute pathnames in them + # which we rewrite here + lscripts="${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libpthread.so \ + ${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libc.so" + + # Rewrite linker scripts + sed -i -e 's|/usr/lib/arm-linux-gnueabihf/||g' ${lscripts} + sed -i -e 's|/lib/arm-linux-gnueabihf/||g' ${lscripts} + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig files" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig +} + +HacksAndPatchesARM64() { + Banner "Misc Hacks & Patches" + # these are linker scripts with absolute pathnames in them + # which we rewrite here + lscripts="${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/libpthread.so \ + ${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/libc.so" + + # Rewrite linker scripts + sed -i -e 's|/usr/lib/aarch64-linux-gnu/||g' ${lscripts} + sed -i -e 's|/lib/aarch64-linux-gnu/||g' ${lscripts} + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig files" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig + +} + +HacksAndPatchesMips() { + Banner "Misc Hacks & Patches" + # these are linker scripts with absolute pathnames in them + # which we rewrite here + lscripts="${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libpthread.so \ + ${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libc.so" + + # Rewrite linker scripts + sed -i -e 's|/usr/lib/mipsel-linux-gnu/||g' ${lscripts} + sed -i -e 's|/lib/mipsel-linux-gnu/||g' ${lscripts} + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig files" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig +} + + +InstallIntoSysroot() { + Banner "Install Libs And Headers Into Jail" + + mkdir -p ${BUILD_DIR}/debian-packages + mkdir -p ${INSTALL_ROOT} + while (( "$#" )); do + local file="$1" + local package="${BUILD_DIR}/debian-packages/${file##*/}" + shift + local sha256sum="$1" + shift + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from package list" + exit 1 + fi + + Banner "Installing ${file}" + DownloadOrCopy ${APT_REPO}/pool/${file} ${package} + if [ ! -s "${package}" ] ; then + echo + echo "ERROR: bad package ${package}" + exit 1 + fi + echo "${sha256sum} ${package}" | sha256sum --quiet -c + + SubBanner "Extracting to ${INSTALL_ROOT}" + dpkg --fsys-tarfile ${package}\ + | tar -xf - -C ${INSTALL_ROOT} + + done + + # Prune /usr/share, leaving only pkgconfig + for name in ${INSTALL_ROOT}/usr/share/*; do + if [ "${name}" != "${INSTALL_ROOT}/usr/share/pkgconfig" ]; then + rm -r ${name} + fi + done +} + + +CleanupJailSymlinks() { + Banner "Jail symlink cleanup" + + SAVEDPWD=$(pwd) + cd ${INSTALL_ROOT} + local libdirs="lib usr/lib" + if [ "${ARCH}" != "MIPS" ]; then + libdirs="${libdirs} lib64" + fi + find $libdirs -type l -printf '%p %l\n' | while read link target; do + # skip links with non-absolute paths + echo "${target}" | grep -qs ^/ || continue + echo "${link}: ${target}" + case "${link}" in + usr/lib/gcc/*-linux-gnu/4.*/* | usr/lib/gcc/arm-linux-gnueabihf/4.*/* |\ + usr/lib/gcc/aarch64-linux-gnu/4.*/*) + # Relativize the symlink. + ln -snfv "../../../../..${target}" "${link}" + ;; + usr/lib/*-linux-gnu/* | usr/lib/arm-linux-gnueabihf/*) + # Relativize the symlink. + ln -snfv "../../..${target}" "${link}" + ;; + usr/lib/*) + # Relativize the symlink. + ln -snfv "../..${target}" "${link}" + ;; + lib64/* | lib/*) + # Relativize the symlink. + ln -snfv "..${target}" "${link}" + ;; + esac + done + + find $libdirs -type l -printf '%p %l\n' | while read link target; do + # Make sure we catch new bad links. + if [ ! -r "${link}" ]; then + echo "ERROR: FOUND BAD LINK ${link}" + ls -l ${link} + exit 1 + fi + done + cd "$SAVEDPWD" +} + +#@ +#@ BuildSysrootAmd64 +#@ +#@ Build everything and package it +BuildSysrootAmd64() { + if [ "$HAS_ARCH_AMD64" = "0" ]; then + return + fi + ClearInstallDir + local package_file="$BUILD_DIR/package_with_sha256sum_amd64" + GeneratePackageListAmd64 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_AMD64" + InstallIntoSysroot ${files_and_sha256sums} + CleanupJailSymlinks + HacksAndPatchesAmd64 + CreateTarBall +} + +#@ +#@ BuildSysrootI386 +#@ +#@ Build everything and package it +BuildSysrootI386() { + if [ "$HAS_ARCH_I386" = "0" ]; then + return + fi + ClearInstallDir + local package_file="$BUILD_DIR/package_with_sha256sum_i386" + GeneratePackageListI386 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_I386" + InstallIntoSysroot ${files_and_sha256sums} + CleanupJailSymlinks + HacksAndPatchesI386 + CreateTarBall +} + +#@ +#@ BuildSysrootARM +#@ +#@ Build everything and package it +BuildSysrootARM() { + if [ "$HAS_ARCH_ARM" = "0" ]; then + return + fi + ClearInstallDir + local package_file="$BUILD_DIR/package_with_sha256sum_arm" + GeneratePackageListARM "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM" + APT_REPO=${APT_REPO_ARM:=$APT_REPO} + InstallIntoSysroot ${files_and_sha256sums} + CleanupJailSymlinks + HacksAndPatchesARM + CreateTarBall +} + +#@ +#@ BuildSysrootARM64 +#@ +#@ Build everything and package it +BuildSysrootARM64() { + if [ "$HAS_ARCH_ARM64" = "0" ]; then + return + fi + ClearInstallDir + local package_file="$BUILD_DIR/package_with_sha256sum_arm64" + GeneratePackageListARM64 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM64" + APT_REPO=${APT_REPO_ARM64:=$APT_REPO} + InstallIntoSysroot ${files_and_sha256sums} + CleanupJailSymlinks + HacksAndPatchesARM64 + CreateTarBall +} + + +#@ +#@ BuildSysrootMips +#@ +#@ Build everything and package it +BuildSysrootMips() { + if [ "$HAS_ARCH_MIPS" = "0" ]; then + return + fi + ClearInstallDir + local package_file="$BUILD_DIR/package_with_sha256sum_arm" + GeneratePackageListMips "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS" + APT_REPO=${APT_REPO_MIPS:=$APT_REPO} + InstallIntoSysroot ${files_and_sha256sums} + CleanupJailSymlinks + HacksAndPatchesMips + CreateTarBall +} + +#@ +#@ BuildSysrootAll +#@ +#@ Build sysroot images for all architectures +BuildSysrootAll() { + RunCommand BuildSysrootAmd64 + RunCommand BuildSysrootI386 + RunCommand BuildSysrootARM + RunCommand BuildSysrootARM64 + RunCommand BuildSysrootMips +} + +UploadSysroot() { + local rev=$1 + if [ -z "${rev}" ]; then + echo "Please specify a revision to upload at." + exit 1 + fi + set -x + gsutil cp -a public-read "${TARBALL}" \ + "gs://chrome-linux-sysroot/toolchain/$rev/" + set +x +} + +#@ +#@ UploadSysrootAmd64 +#@ +UploadSysrootAmd64() { + if [ "$HAS_ARCH_AMD64" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootI386 +#@ +UploadSysrootI386() { + if [ "$HAS_ARCH_I386" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootARM +#@ +UploadSysrootARM() { + if [ "$HAS_ARCH_ARM" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootARM64 +#@ +UploadSysrootARM64() { + if [ "$HAS_ARCH_ARM64" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootMips +#@ +UploadSysrootMips() { + if [ "$HAS_ARCH_MIPS" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootAll +#@ +#@ Upload sysroot image for all architectures +UploadSysrootAll() { + RunCommand UploadSysrootAmd64 "$@" + RunCommand UploadSysrootI386 "$@" + RunCommand UploadSysrootARM "$@" + RunCommand UploadSysrootARM64 "$@" + RunCommand UploadSysrootMips "$@" +} + +# +# CheckForDebianGPGKeyring +# +# Make sure the Debian GPG keys exist. Otherwise print a helpful message. +# +CheckForDebianGPGKeyring() { + if [ ! -e "$KEYRING_FILE" ]; then + echo "KEYRING_FILE not found: ${KEYRING_FILE}" + echo "Debian GPG keys missing. Install the debian-archive-keyring package." + exit 1 + fi +} + +# +# VerifyPackageListing +# +# Verifies the downloaded Packages.bz2 file has the right checksums. +# +VerifyPackageListing() { + local file_path="$1" + local output_file="$2" + local dist="$3" + + local repo_basedir="${APT_REPO}/dists/${dist}" + local release_list="${repo_basedir}/${RELEASE_FILE}" + local release_list_gpg="${repo_basedir}/${RELEASE_FILE_GPG}" + + local release_file="${BUILD_DIR}/${dist}-${RELEASE_FILE}" + local release_file_gpg="${BUILD_DIR}/${dist}-${RELEASE_FILE_GPG}" + + CheckForDebianGPGKeyring + + DownloadOrCopy ${release_list} ${release_file} + DownloadOrCopy ${release_list_gpg} ${release_file_gpg} + echo "Verifying: ${release_file} with ${release_file_gpg}" + set -x + gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}" + set +x + + echo "Verifying: ${output_file}" + local checksums=$(grep ${file_path} ${release_file} | cut -d " " -f 2) + local sha256sum=$(echo ${checksums} | cut -d " " -f 3) + + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from ${release_list}" + exit 1 + fi + + echo "${sha256sum} ${output_file}" | sha256sum --quiet -c +} + +# +# GeneratePackageList +# +# Looks up package names in ${BUILD_DIR}/Packages and write list of URLs +# to output file. +# +GeneratePackageList() { + local input_file="$1" + local output_file="$2" + echo "Updating: ${output_file} from ${input_file}" + /bin/rm -f "${output_file}" + shift + shift + for pkg in $@ ; do + local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \ + egrep -o "pool/.*") + if [ -z "${pkg_full}" ]; then + echo "ERROR: missing package: $pkg" + exit 1 + fi + local pkg_nopool=$(echo "$pkg_full" | sed "s/^pool\///") + local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \ + grep ^SHA256: | sed 's/^SHA256: //') + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from Packages" + exit 1 + fi + echo $pkg_nopool $sha256sum >> "$output_file" + done + # sort -o does an in-place sort of this file + sort "$output_file" -o "$output_file" +} + +#@ +#@ UpdatePackageListsAmd64 +#@ +#@ Regenerate the package lists such that they contain an up-to-date +#@ list of URLs within the Debian archive. (For amd64) +UpdatePackageListsAmd64() { + if [ "$HAS_ARCH_AMD64" = "0" ]; then + return + fi + GeneratePackageListAmd64 "$DEBIAN_DEP_LIST_AMD64" + StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_AMD64" +} + +#@ +#@ UpdatePackageListsI386 +#@ +#@ Regenerate the package lists such that they contain an up-to-date +#@ list of URLs within the Debian archive. (For i386) +UpdatePackageListsI386() { + if [ "$HAS_ARCH_I386" = "0" ]; then + return + fi + GeneratePackageListI386 "$DEBIAN_DEP_LIST_I386" + StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_I386" +} + +#@ +#@ UpdatePackageListsARM +#@ +#@ Regenerate the package lists such that they contain an up-to-date +#@ list of URLs within the Debian archive. (For arm) +UpdatePackageListsARM() { + if [ "$HAS_ARCH_ARM" = "0" ]; then + return + fi + GeneratePackageListARM "$DEBIAN_DEP_LIST_ARM" + StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM" +} + +#@ +#@ UpdatePackageListsARM64 +#@ +#@ Regenerate the package lists such that they contain an up-to-date +#@ list of URLs within the Debian archive. (For arm64) +UpdatePackageListsARM64() { + if [ "$HAS_ARCH_ARM64" = "0" ]; then + return + fi + GeneratePackageListARM64 "$DEBIAN_DEP_LIST_ARM64" + StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM64" +} + +#@ +#@ UpdatePackageListsMips +#@ +#@ Regenerate the package lists such that they contain an up-to-date +#@ list of URLs within the Debian archive. (For mips) +UpdatePackageListsMips() { + if [ "$HAS_ARCH_MIPS" = "0" ]; then + return + fi + GeneratePackageListMips "$DEBIAN_DEP_LIST_MIPS" + StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS" +} + +#@ +#@ UpdatePackageListsAll +#@ +#@ Regenerate the package lists for all architectures. +UpdatePackageListsAll() { + RunCommand UpdatePackageListsAmd64 + RunCommand UpdatePackageListsI386 + RunCommand UpdatePackageListsARM + RunCommand UpdatePackageListsARM64 + RunCommand UpdatePackageListsMips +} + +RunCommand() { + SetEnvironmentVariables "$1" + SanityCheck + "$@" +} + +if [ $# -eq 0 ] ; then + echo "ERROR: you must specify a mode on the commandline" + echo + Usage + exit 1 +elif [ "$(type -t $1)" != "function" ]; then + echo "ERROR: unknown function '$1'." >&2 + echo "For help, try:" + echo " $0 help" + exit 1 +else + ChangeDirectory + if echo $1 | grep -qs "All$"; then + "$@" + else + RunCommand "$@" + fi +fi diff --git a/build/linux/system.gyp b/build/linux/system.gyp new file mode 100644 index 00000000000..9d0c6b68698 --- /dev/null +++ b/build/linux/system.gyp @@ -0,0 +1,1270 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + # If any of the linux_link_FOO below are set to 1, then the corresponding + # target will be linked against the FOO library (either dynamically or + # statically, depending on the pkg-config files), as opposed to loading the + # FOO library dynamically with dlopen. + 'linux_link_libgps%': 0, + 'linux_link_libpci%': 0, + 'linux_link_libspeechd%': 0, + 'linux_link_libbrlapi%': 0, + + # Used below for the various libraries. In this scope for sharing with GN. + 'libbrlapi_functions': [ + 'brlapi_getHandleSize', + 'brlapi_error_location', + 'brlapi_strerror', + 'brlapi__acceptKeys', + 'brlapi__openConnection', + 'brlapi__closeConnection', + 'brlapi__getDisplaySize', + 'brlapi__enterTtyModeWithPath', + 'brlapi__leaveTtyMode', + 'brlapi__writeDots', + 'brlapi__readKey', + ], + 'libgio_functions': [ + 'glib_check_version', + 'g_type_init', + 'g_settings_new', + 'g_settings_get_child', + 'g_settings_get_string', + 'g_settings_get_boolean', + 'g_settings_get_int', + 'g_settings_get_strv', + 'g_settings_list_schemas', + ], + 'libpci_functions': [ + 'pci_alloc', + 'pci_init', + 'pci_cleanup', + 'pci_scan_bus', + 'pci_fill_info', + 'pci_lookup_name', + ], + 'libudev_functions': [ + 'udev_device_get_action', + 'udev_device_get_devnode', + 'udev_device_get_parent', + 'udev_device_get_parent_with_subsystem_devtype', + 'udev_device_get_property_value', + 'udev_device_get_subsystem', + 'udev_device_get_sysattr_value', + 'udev_device_get_sysname', + 'udev_device_get_syspath', + 'udev_device_new_from_devnum', + 'udev_device_new_from_subsystem_sysname', + 'udev_device_new_from_syspath', + 'udev_device_unref', + 'udev_enumerate_add_match_subsystem', + 'udev_enumerate_get_list_entry', + 'udev_enumerate_new', + 'udev_enumerate_scan_devices', + 'udev_enumerate_unref', + 'udev_list_entry_get_next', + 'udev_list_entry_get_name', + 'udev_monitor_enable_receiving', + 'udev_monitor_filter_add_match_subsystem_devtype', + 'udev_monitor_get_fd', + 'udev_monitor_new_from_netlink', + 'udev_monitor_receive_device', + 'udev_monitor_unref', + 'udev_new', + 'udev_set_log_fn', + 'udev_set_log_priority', + 'udev_unref', + ], + }, + 'conditions': [ + [ 'chromeos==0 and use_ozone==0', { + # Hide GTK and related dependencies for Chrome OS and Ozone, so they won't get + # added back to Chrome OS and Ozone. Don't try to use GTK on Chrome OS and Ozone. + 'targets': [ + { + 'target_name': 'atk', + 'type': 'none', + 'conditions': [ + ['_toolset=="target"', { + 'direct_dependent_settings': { + 'cflags': [ + '', + # TODO(phajdan.jr): Report problem to pciutils project + # and get it fixed so that we don't need --use-extern-c. + '--use-extern-c', + '--link-directly=<(linux_link_libpci)', + '<@(libpci_functions)', + ], + 'message': 'Generating libpci library loader', + 'process_outputs_as_sources': 1, + }, + ], + }, + ], + }], + ], # conditions + 'targets': [ + { + 'target_name': 'dbus', + 'type': 'none', + 'direct_dependent_settings': { + 'cflags': [ + '=2.40 deprecate g_settings_list_schemas in favor of + # g_settings_schema_source_list_schemas. This function is not + # available on earlier versions that we still need to support + # (specifically, 2.32), so disable the warning. + # TODO(mgiuca): Remove this suppression (and variable) when we + # drop support for Ubuntu 13.10 (saucy) and earlier. Update the + # code to use g_settings_schema_source_list_schemas instead. + 'GLIB_DISABLE_DEPRECATION_WARNINGS', + ], + }, + 'defines': [ + '<(gio_warning_define)', + ], + 'direct_dependent_settings': { + 'cflags': [ + '', + '--link-directly=<(linux_link_gsettings)', + '<@(libgio_functions)', + ], + 'message': 'Generating libgio library loader', + 'process_outputs_as_sources': 1, + }, + ], + }], + ], + }, + { + 'target_name': 'glib', + 'type': 'none', + 'toolsets': ['host', 'target'], + 'variables': { + 'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0', + }, + 'conditions': [ + ['_toolset=="target"', { + 'direct_dependent_settings': { + 'cflags': [ + '', + '--link-directly=<(linux_link_libbrlapi)', + '<@(libbrlapi_functions)', + ], + 'message': 'Generating libbrlapi library loader', + 'process_outputs_as_sources': 1, + }, + ], + }, + { + 'target_name': 'libcap', + 'type': 'none', + 'link_settings': { + 'libraries': [ + '-lcap', + ], + }, + }, + { + 'target_name': 'libresolv', + 'type': 'none', + 'link_settings': { + 'libraries': [ + '-lresolv', + ], + }, + }, + { + # GN version: //third_party/speech-dispatcher + 'target_name': 'libspeechd', + 'type': 'static_library', + 'direct_dependent_settings': { + 'include_dirs': [ + '<(SHARED_INTERMEDIATE_DIR)', + ], + 'conditions': [ + ['linux_link_libspeechd==1', { + 'link_settings': { + 'libraries': [ + '-lspeechd', + ], + } + }], + ], + }, + 'include_dirs': [ + '../..', + ], + 'hard_dependency': 1, + 'actions': [ + { + 'variables': { + 'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libspeechd.h', + 'output_cc': '<(INTERMEDIATE_DIR)/libspeechd_loader.cc', + 'generator': '../../tools/generate_library_loader/generate_library_loader.py', + + # speech-dispatcher >= 0.8 installs libspeechd.h into + # speech-dispatcher/libspeechd.h, whereas speech-dispatcher < 0.8 + # puts libspeechd.h in the top-level include directory. + # Since we need to support both cases for now, we ship a copy of + # libspeechd.h in third_party/speech-dispatcher. If the user + # prefers to link against the speech-dispatcher directly, the + # `libspeechd_h_prefix' variable can be passed to gyp with a value + # such as "speech-dispatcher/" that will be prepended to + # "libspeechd.h" in the #include directive. + # TODO(phaldan.jr): Once we do not need to support + # speech-dispatcher < 0.8 we can get rid of all this (including + # third_party/speech-dispatcher) and just include + # speech-dispatcher/libspeechd.h unconditionally. + 'libspeechd_h_prefix%': '', + }, + 'action_name': 'generate_libspeechd_loader', + 'inputs': [ + '<(generator)', + ], + 'outputs': [ + '<(output_h)', + '<(output_cc)', + ], + 'action': ['python', + '<(generator)', + '--name', 'LibSpeechdLoader', + '--output-h', '<(output_h)', + '--output-cc', '<(output_cc)', + '--header', '<<(libspeechd_h_prefix)libspeechd.h>', + '--bundled-header', + '"third_party/speech-dispatcher/libspeechd.h"', + '--link-directly=<(linux_link_libspeechd)', + 'spd_open', + 'spd_say', + 'spd_stop', + 'spd_close', + 'spd_pause', + 'spd_resume', + 'spd_set_notification_on', + 'spd_set_voice_rate', + 'spd_set_voice_pitch', + 'spd_list_synthesis_voices', + 'spd_set_synthesis_voice', + 'spd_list_modules', + 'spd_set_output_module', + 'spd_set_language', + ], + 'message': 'Generating libspeechd library loader', + 'process_outputs_as_sources': 1, + }, + ], + }, + { + 'target_name': 'pangocairo', + 'type': 'none', + 'toolsets': ['host', 'target'], + 'conditions': [ + ['use_pango==1 and use_cairo==1', { + 'conditions': [ + ['_toolset=="target"', { + 'direct_dependent_settings': { + 'cflags': [ + ' + + For example: remove_bundled_libraries.py third_party/mesa + + The script scans sources looking for third_party directories. + Everything that is not explicitly preserved is removed (except for + gyp files), and the script fails if any directory passed on command + line does not exist (to ensure list is kept up to date). + + This is intended to be used on sources extracted from a tarball, + not a repository. + + NOTE: by default this will not remove anything (for safety). Pass + --do-remove flag to actually remove files. + +2. Pick the script to run depending on whether you use GYP or GN: + +2a. replace_gyp_files.py + + For example: replace_gyp_files.py -Duse_system_harfbuzz=1 + + The script ignores flags other than -D for convenience. This makes it + possible to have a variable e.g. ${myconf} with all the options, and + execute: + + build/linux/unbundle/replace_gyp_files.py ${myconf} + build/gyp_chromium ${myconf} + +2b. replace_gn_files.py --system-libraries lib... + + For example: replace_gn_files.py --system-libraries libxml diff --git a/build/linux/unbundle/expat.gyp b/build/linux/unbundle/expat.gyp new file mode 100644 index 00000000000..030fb856bb7 --- /dev/null +++ b/build/linux/unbundle/expat.gyp @@ -0,0 +1,17 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'expat', + 'type': 'none', + 'link_settings': { + 'libraries': [ + '-lexpat', + ], + }, + }, + ], +} diff --git a/build/linux/unbundle/ffmpeg.gn b/build/linux/unbundle/ffmpeg.gn new file mode 100644 index 00000000000..9a4fbfd75a4 --- /dev/null +++ b/build/linux/unbundle/ffmpeg.gn @@ -0,0 +1,30 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_ffmpeg") { + packages = [ + "libavcodec", + "libavformat", + "libavutil", + ] +} + +shim_headers("ffmpeg_shim") { + root_path = "." + headers = [ + "libavcodec/avcodec.h", + "libavformat/avformat.h", + "libavutil/imgutils.h", + ] +} + +source_set("ffmpeg") { + deps = [ + ":ffmpeg_shim", + ] + public_configs = [ ":system_ffmpeg" ] +} diff --git a/build/linux/unbundle/ffmpeg.gyp b/build/linux/unbundle/ffmpeg.gyp new file mode 100644 index 00000000000..b86a1c4dc71 --- /dev/null +++ b/build/linux/unbundle/ffmpeg.gyp @@ -0,0 +1,65 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'ffmpeg', + 'type': 'none', + 'direct_dependent_settings': { + 'cflags': [ + '\n' + 'int test() { return AV_CODEC_ID_OPUS; }" ' + '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_OPUS=1)', + + '\n' + 'int test() { return AV_CODEC_ID_VP9; }" ' + '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_VP9=1)', + + '\n' + 'int test() { return AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL; }" ' + '--on-failure -DCHROMIUM_OMIT_AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL=1)', + + '\n' + 'int test() { struct AVFrame frame;\n' + 'return av_frame_get_channels(&frame); }" ' + '--on-failure -DCHROMIUM_NO_AVFRAME_CHANNELS=1)', + ], + 'defines': [ + '__STDC_CONSTANT_MACROS', + 'USE_SYSTEM_FFMPEG', + ], + }, + 'variables': { + 'headers_root_path': '.', + 'header_filenames': [ + 'libavcodec/avcodec.h', + 'libavformat/avformat.h', + 'libavutil/imgutils.h', + ], + }, + 'includes': [ + '../../build/shim_headers.gypi', + ], + 'link_settings': { + 'ldflags': [ + ', which requires RTTI. + 'GOOGLE_PROTOBUF_NO_RTTI', + 'GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER', + ], + }, + 'link_settings': { + # Use full protobuf, because vanilla protobuf doesn't have + # our custom patch to retain unknown fields in lite mode. + 'ldflags': [ + ' 1 and split[0].startswith(exclusion): + continue + + if relpath.startswith(exclusion): + # Multiple exclusions can match the same path. Go through all of them + # and mark each one as used. + exclusion_used[exclusion] = True + excluded = True + if excluded: + continue + + # Deleting gyp files almost always leads to gyp failures. + # These files come from Chromium project, and can be replaced if needed. + if f.endswith('.gyp') or f.endswith('.gypi'): + continue + + # Same about GN files. + if f.endswith('.gn') or f.endswith('.gni'): + continue + + # Deleting .isolate files leads to gyp failures. They are usually + # not used by a distro build anyway. + # See http://www.chromium.org/developers/testing/isolated-testing + # for more info. + if f.endswith('.isolate'): + continue + + if options.do_remove: + # Delete the file - best way to ensure it's not used during build. + os.remove(path) + else: + # By default just print paths that would be removed. + print path + + exit_code = 0 + + # Fail if exclusion list contains stale entries - this helps keep it + # up to date. + for exclusion, used in exclusion_used.iteritems(): + if not used: + print '%s does not exist' % exclusion + exit_code = 1 + + if not options.do_remove: + print ('To actually remove files printed above, please pass ' + + '--do-remove flag.') + + return exit_code + + +if __name__ == '__main__': + sys.exit(DoMain(sys.argv[1:])) diff --git a/build/linux/unbundle/replace_gn_files.py b/build/linux/unbundle/replace_gn_files.py new file mode 100644 index 00000000000..9d729917be5 --- /dev/null +++ b/build/linux/unbundle/replace_gn_files.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Replaces GN files in tree with files from here that +make the build use system libraries. +""" + +from __future__ import print_function + +import argparse +import os +import shutil +import sys + + +REPLACEMENTS = { + 'ffmpeg': 'third_party/ffmpeg/BUILD.gn', + 'flac': 'third_party/flac/BUILD.gn', + 'harfbuzz-ng': 'third_party/harfbuzz-ng/BUILD.gn', + 'libevent': 'base/third_party/libevent/BUILD.gn', + 'libjpeg': 'build/secondary/third_party/libjpeg_turbo/BUILD.gn', + 'libpng': 'third_party/libpng/BUILD.gn', + 'libvpx': 'third_party/libvpx/BUILD.gn', + 'libwebp': 'third_party/libwebp/BUILD.gn', + 'libxml': 'third_party/libxml/BUILD.gn', + 'libxslt': 'third_party/libxslt/BUILD.gn', + 're2': 'third_party/re2/BUILD.gn', + 'snappy': 'third_party/snappy/BUILD.gn', + 'yasm': 'third_party/yasm/yasm_assemble.gni', + 'zlib': 'third_party/zlib/BUILD.gn', +} + + +def DoMain(argv): + my_dirname = os.path.dirname(__file__) + source_tree_root = os.path.abspath( + os.path.join(my_dirname, '..', '..', '..')) + + parser = argparse.ArgumentParser() + parser.add_argument('--system-libraries', nargs='*', default=[]) + parser.add_argument('--undo', action='store_true') + + args = parser.parse_args(argv) + + handled_libraries = set() + for lib, path in REPLACEMENTS.items(): + if lib not in args.system_libraries: + continue + handled_libraries.add(lib) + + if args.undo: + # Restore original file, and also remove the backup. + # This is meant to restore the source tree to its original state. + os.rename(os.path.join(source_tree_root, path + '.orig'), + os.path.join(source_tree_root, path)) + else: + # Create a backup copy for --undo. + shutil.copyfile(os.path.join(source_tree_root, path), + os.path.join(source_tree_root, path + '.orig')) + + # Copy the GN file from directory of this script to target path. + shutil.copyfile(os.path.join(my_dirname, '%s.gn' % lib), + os.path.join(source_tree_root, path)) + + unhandled_libraries = set(args.system_libraries) - handled_libraries + if unhandled_libraries: + print('Unrecognized system libraries requested: %s' % ', '.join( + sorted(unhandled_libraries)), file=sys.stderr) + return 1 + + return 0 + + +if __name__ == '__main__': + sys.exit(DoMain(sys.argv[1:])) diff --git a/build/linux/unbundle/replace_gyp_files.py b/build/linux/unbundle/replace_gyp_files.py new file mode 100644 index 00000000000..d067ba47386 --- /dev/null +++ b/build/linux/unbundle/replace_gyp_files.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Replaces gyp files in tree with files from here that +make the build use system libraries. +""" + + +import optparse +import os.path +import shutil +import sys + + +REPLACEMENTS = { + 'use_system_expat': 'third_party/expat/expat.gyp', + 'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp', + 'use_system_flac': 'third_party/flac/flac.gyp', + 'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp', + 'use_system_icu': 'third_party/icu/icu.gyp', + 'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp', + 'use_system_libevent': 'base/third_party/libevent/libevent.gyp', + 'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp', + 'use_system_libpng': 'third_party/libpng/libpng.gyp', + 'use_system_libusb': 'third_party/libusb/libusb.gyp', + 'use_system_libvpx': 'third_party/libvpx/libvpx.gyp', + 'use_system_libwebp': 'third_party/libwebp/libwebp.gyp', + 'use_system_libxml': 'third_party/libxml/libxml.gyp', + 'use_system_libxnvctrl' : 'third_party/libXNVCtrl/libXNVCtrl.gyp', + 'use_system_libxslt': 'third_party/libxslt/libxslt.gyp', + 'use_system_opus': 'third_party/opus/opus.gyp', + 'use_system_protobuf': 'third_party/protobuf/protobuf.gyp', + 'use_system_re2': 'third_party/re2/re2.gyp', + 'use_system_snappy': 'third_party/snappy/snappy.gyp', + 'use_system_sqlite': 'third_party/sqlite/sqlite.gyp', + 'use_system_v8': 'v8/src/v8.gyp', + 'use_system_zlib': 'third_party/zlib/zlib.gyp', +} + + +def DoMain(argv): + my_dirname = os.path.dirname(__file__) + source_tree_root = os.path.abspath( + os.path.join(my_dirname, '..', '..', '..')) + + parser = optparse.OptionParser() + + # Accept arguments in gyp command-line syntax, so that the caller can re-use + # command-line for this script and gyp. + parser.add_option('-D', dest='defines', action='append') + + parser.add_option('--undo', action='store_true') + + options, args = parser.parse_args(argv) + + for flag, path in REPLACEMENTS.items(): + if '%s=1' % flag not in options.defines: + continue + + if options.undo: + # Restore original file, and also remove the backup. + # This is meant to restore the source tree to its original state. + os.rename(os.path.join(source_tree_root, path + '.orig'), + os.path.join(source_tree_root, path)) + else: + # Create a backup copy for --undo. + shutil.copyfile(os.path.join(source_tree_root, path), + os.path.join(source_tree_root, path + '.orig')) + + # Copy the gyp file from directory of this script to target path. + shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)), + os.path.join(source_tree_root, path)) + + return 0 + + +if __name__ == '__main__': + sys.exit(DoMain(sys.argv)) diff --git a/build/linux/unbundle/snappy.gn b/build/linux/unbundle/snappy.gn new file mode 100644 index 00000000000..9956ef88bc0 --- /dev/null +++ b/build/linux/unbundle/snappy.gn @@ -0,0 +1,22 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/shim_headers.gni") + +shim_headers("snappy_shim") { + root_path = "src" + headers = [ + "snappy-c.h", + "snappy-sinksource.h", + "snappy-stubs-public.h", + "snappy.h", + ] +} + +source_set("snappy") { + deps = [ + ":snappy_shim", + ] + libs = [ "snappy" ] +} diff --git a/build/linux/unbundle/snappy.gyp b/build/linux/unbundle/snappy.gyp new file mode 100644 index 00000000000..ab856ed89cb --- /dev/null +++ b/build/linux/unbundle/snappy.gyp @@ -0,0 +1,29 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'snappy', + 'type': 'none', + 'variables': { + 'headers_root_path': 'src', + 'header_filenames': [ + 'snappy-c.h', + 'snappy-sinksource.h', + 'snappy-stubs-public.h', + 'snappy.h', + ], + }, + 'includes': [ + '../../build/shim_headers.gypi', + ], + 'link_settings': { + 'libraries': [ + '-lsnappy', + ], + }, + }, + ], +} diff --git a/build/linux/unbundle/sqlite.gyp b/build/linux/unbundle/sqlite.gyp new file mode 100644 index 00000000000..918da928018 --- /dev/null +++ b/build/linux/unbundle/sqlite.gyp @@ -0,0 +1,28 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'sqlite', + 'type': 'none', + 'direct_dependent_settings': { + 'cflags': [ + '&2 + exit 1 +fi + +# TODO(glider): this doesn't work if we set CC and CXX to override the default +# Clang. +SRCROOT="${BUILT_PRODUCTS_DIR}/../.." +CLANGVER=$(python ${SRCROOT}/tools/clang/scripts/update.py --print-clang-version) +ASAN_DYLIB=${SRCROOT}/third_party/llvm-build/Release+Asserts/lib/clang/${CLANGVER}/lib/darwin/${ASAN_DYLIB_NAME} + +DYLIB_BASENAME=$(basename "${ASAN_DYLIB}") +if [[ "${DYLIB_BASENAME}" != "${ASAN_DYLIB_NAME}" ]]; then + echo "basename(${ASAN_DYLIB}) != ${ASAN_DYLIB_NAME}" >&2 + exit 1 +fi + +# Check whether the directory containing the executable binary is named +# "MacOS". In this case we're building a full-fledged OSX app and will put +# the runtime into appname.app/Contents/Libraries/. Otherwise this is probably +# an iOS gtest app, and the ASan runtime is put next to the executable. +UPPER_DIR=$(dirname "${BINARY_DIR}") +if [ "${UPPER_DIR}" == "MacOS" ]; then + LIBRARIES_DIR="${UPPER_DIR}/Libraries" + mkdir -p "${LIBRARIES_DIR}" + NEW_LC_ID_DYLIB="@executable_path/../Libraries/${ASAN_DYLIB_NAME}" +else + LIBRARIES_DIR="${BINARY_DIR}" + NEW_LC_ID_DYLIB="@executable_path/${ASAN_DYLIB_NAME}" +fi + +cp "${ASAN_DYLIB}" "${LIBRARIES_DIR}" + +# Make LC_ID_DYLIB of the runtime copy point to its location. +install_name_tool \ + -id "${NEW_LC_ID_DYLIB}" \ + "${LIBRARIES_DIR}/${ASAN_DYLIB_NAME}" + +# Fix the rpath to the runtime library recorded in the binary. +install_name_tool \ + -change "${BUILTIN_DYLIB_PATH}" \ + "${NEW_LC_ID_DYLIB}" \ + "${BINARY}" diff --git a/build/mac/copy_framework_unversioned.sh b/build/mac/copy_framework_unversioned.sh new file mode 100644 index 00000000000..380cc908407 --- /dev/null +++ b/build/mac/copy_framework_unversioned.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Copies a framework to its new home, "unversioning" it. +# +# Normally, frameworks are versioned bundles. The contents of a framework are +# stored in a versioned directory within the bundle, and symbolic links +# provide access to the actual code and resources. See +# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html +# +# The symbolic links usually found in frameworks create problems. Symbolic +# links are excluded from code signatures. That means that it's possible to +# remove or retarget a symbolic link within a framework without affecting the +# seal. In Chrome's case, the outer .app bundle contains a framework where +# all application code and resources live. In order for the signature on the +# .app to be meaningful, it encompasses the framework. Because framework +# resources are accessed through the framework's symbolic links, this +# arrangement results in a case where the resources can be altered without +# affecting the .app signature's validity. +# +# Indirection through symbolic links also carries a runtime performance +# penalty on open() operations, although open() typically completes so quickly +# that this is not considered a major performance problem. +# +# To resolve these problems, the frameworks that ship within Chrome's .app +# bundle are unversioned. Unversioning is simple: instead of using the +# original outer .framework directory as the framework that ships within the +# .app, the inner versioned directory is used. Instead of accessing bundled +# resources through symbolic links, they are accessed directly. In normal +# situations, the only hard-coded use of the versioned directory is by dyld, +# when loading the framework's code, but this is handled through a normal +# Mach-O load command, and it is easy to adjust the load command to point to +# the unversioned framework code rather than the versioned counterpart. +# +# The resulting framework bundles aren't strictly conforming, but they work +# as well as normal versioned framework bundles. +# +# An option to skip running install_name_tool is available. By passing -I as +# the first argument to this script, install_name_tool will be skipped. This +# is only suitable for copied frameworks that will not be linked against, or +# when install_name_tool will be run on any linker output when something is +# linked against the copied framework. This option exists to allow signed +# frameworks to pass through without subjecting them to any modifications that +# would break their signatures. + +set -e + +RUN_INSTALL_NAME_TOOL=1 +if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then + shift + RUN_INSTALL_NAME_TOOL= +fi + +if [ $# -ne 2 ] ; then + echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2 + exit 1 +fi + +# FRAMEWORK should be a path to a versioned framework bundle, ending in +# .framework. DESTINATION_DIR is the directory that the unversioned framework +# bundle will be copied to. + +FRAMEWORK="${1}" +DESTINATION_DIR="${2}" + +FRAMEWORK_NAME="$(basename "${FRAMEWORK}")" +if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then + echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2 + exit 1 +fi +FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}" + +# Find the current version. +VERSIONS="${FRAMEWORK}/Versions" +CURRENT_VERSION_LINK="${VERSIONS}/Current" +CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")" +CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}" + +# Make sure that the framework's structure makes sense as a versioned bundle. +if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then + echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2 + exit 1 +fi + +DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}" + +# Copy the versioned directory within the versioned framework to its +# destination location. +mkdir -p "${DESTINATION_DIR}" +rsync -acC --delete --exclude Headers --exclude PrivateHeaders \ + --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}" + +if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then + # Adjust the Mach-O LC_ID_DYLIB load command in the framework. This does not + # change the LC_LOAD_DYLIB load commands in anything that may have already + # linked against the framework. Not all frameworks will actually need this + # to be changed. Some frameworks may already be built with the proper + # LC_ID_DYLIB for use as an unversioned framework. Xcode users can do this + # by setting LD_DYLIB_INSTALL_NAME to + # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME) + # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name + # at link time. + FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}" + LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" | + grep -A10 "^ *cmd LC_ID_DYLIB$" | + grep -m1 "^ *name" | + sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')" + VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}" + LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" | + sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")" + + if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then + install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}" + fi +fi diff --git a/build/mac/edit_xibs.sh b/build/mac/edit_xibs.sh new file mode 100644 index 00000000000..b7b749e1601 --- /dev/null +++ b/build/mac/edit_xibs.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp +# with the Xcode generator (as you likely use ninja). Documentation: +# http://dev.chromium.org/developers/design-documents/mac-xib-files + +set -e + +RELSRC=$(dirname "$0")/../.. +SRC=$(cd "$RELSRC" && pwd) +export PYTHONPATH="$PYTHONPATH:$SRC/build" +export GYP_GENERATORS=xcode +"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp" +echo "You can now edit XIB files in Xcode using:" +echo " $SRC/chrome/chrome_nibs.xcodeproj" diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py new file mode 100644 index 00000000000..f06fd15c168 --- /dev/null +++ b/build/mac/find_sdk.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints the lowest locally available SDK version greater than or equal to a +given minimum sdk version to standard output. + +Usage: + python find_sdk.py 10.6 # Ignores SDKs < 10.6 +""" + +import os +import re +import subprocess +import sys + +sys.path.append(os.path.dirname(os.path.dirname(__file__))) +import mac_toolchain + +from optparse import OptionParser + + +def parse_version(version_str): + """'10.6' => [10, 6]""" + return map(int, re.findall(r'(\d+)', version_str)) + + +def main(): + parser = OptionParser() + parser.add_option("--verify", + action="store_true", dest="verify", default=False, + help="return the sdk argument and warn if it doesn't exist") + parser.add_option("--sdk_path", + action="store", type="string", dest="sdk_path", default="", + help="user-specified SDK path; bypasses verification") + parser.add_option("--print_sdk_path", + action="store_true", dest="print_sdk_path", default=False, + help="Additionaly print the path the SDK (appears first).") + options, args = parser.parse_args() + if len(args) != 1: + parser.error('Please specify a minimum SDK version') + min_sdk_version = args[0] + + # Try using the toolchain in mac_files. + mac_toolchain.SetToolchainEnvironment() + + job = subprocess.Popen(['xcode-select', '-print-path'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, err = job.communicate() + if job.returncode != 0: + print >> sys.stderr, out + print >> sys.stderr, err + raise Exception('Error %d running xcode-select' % job.returncode) + sdk_dir = os.path.join( + out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs') + sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)] + sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6'] + sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6'] + if parse_version(s) >= parse_version(min_sdk_version)] + if not sdks: + raise Exception('No %s+ SDK found' % min_sdk_version) + best_sdk = sorted(sdks, key=parse_version)[0] + + if options.verify and best_sdk != min_sdk_version and not options.sdk_path: + print >> sys.stderr, '' + print >> sys.stderr, ' vvvvvvv' + print >> sys.stderr, '' + print >> sys.stderr, \ + 'This build requires the %s SDK, but it was not found on your system.' \ + % min_sdk_version + print >> sys.stderr, \ + 'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.' + print >> sys.stderr, '' + print >> sys.stderr, ' ^^^^^^^' + print >> sys.stderr, '' + sys.exit(1) + + if options.print_sdk_path: + print subprocess.check_output( + ['xcrun', '-sdk', 'macosx' + best_sdk, '--show-sdk-path']).strip() + + return best_sdk + + +if __name__ == '__main__': + if sys.platform != 'darwin': + raise Exception("This script only runs on Mac") + print main() + sys.exit(0) diff --git a/build/mac/strip_from_xcode b/build/mac/strip_from_xcode new file mode 100644 index 00000000000..c26b9fb492b --- /dev/null +++ b/build/mac/strip_from_xcode @@ -0,0 +1,62 @@ +#!/bin/bash + +# Copyright (c) 2008 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a handy wrapper script that figures out how to call the strip +# utility (strip_save_dsym in this case), if it even needs to be called at all, +# and then does it. This script should be called by a post-link phase in +# targets that might generate Mach-O executables, dynamic libraries, or +# loadable bundles. +# +# An example "Strip If Needed" build phase placed after "Link Binary With +# Libraries" would do: +# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode" + +if [ "${CONFIGURATION}" != "Release" ] ; then + # Only strip in release mode. + exit 0 +fi + +declare -a FLAGS + +# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too. +# Weird. +if [ "${MACH_O_TYPE}" = "mh_execute" ] || \ + [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then + # Strip everything (no special flags). No-op. + true +elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \ + [ "${MACH_O_TYPE}" = "mh_bundle" ]; then + # Strip debugging symbols and local symbols + FLAGS[${#FLAGS[@]}]=-S + FLAGS[${#FLAGS[@]}]=-x +elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then + # Don't strip static libraries. + exit 0 +else + # Warn, but don't treat this as an error. + echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE} + exit 0 +fi + +if [ -n "${STRIPFLAGS}" ] ; then + # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip + # Flags". + for stripflag in "${STRIPFLAGS}" ; do + FLAGS[${#FLAGS[@]}]="${stripflag}" + done +fi + +if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then + # An Xcode project can communicate a file listing symbols to saved in this + # environment variable by setting it as a build setting. This isn't a + # standard Xcode setting. It's used in preference to STRIPFLAGS to + # eliminate quoting ambiguity concerns. + FLAGS[${#FLAGS[@]}]=-s + FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}" +fi + +exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \ + "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}" diff --git a/build/mac/strip_save_dsym b/build/mac/strip_save_dsym new file mode 100644 index 00000000000..c9cf2266375 --- /dev/null +++ b/build/mac/strip_save_dsym @@ -0,0 +1,335 @@ +#!/usr/bin/env python + +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Usage: strip_save_dsym +# +# strip_save_dsym is a wrapper around the standard strip utility. Given an +# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake" +# .dSYM bundle for debugging, and then call strip to strip the Mach-O file. +# Note that the .dSYM file is a "fake" in that it's not a self-contained +# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O +# file, and therefore contains references to object files on the filesystem. +# The generated .dSYM bundle is therefore unsuitable for debugging in the +# absence of these .o files. +# +# If a .dSYM already exists and has a newer timestamp than the Mach-O file, +# this utility does nothing. That allows strip_save_dsym to be run on a file +# that has already been stripped without trashing the .dSYM. +# +# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly +# slow. On the other hand, doing a file copy (which is really all that +# dsymutil does) is comparatively fast. Since we usually just want to strip +# a release-mode executable but still be able to debug it, and we don't care +# so much about generating a hermetic dSYM bundle, we'll prefer the file copy. +# If a real dSYM is ever needed, it's still possible to create one by running +# dsymutil and pointing it at the original Mach-O file inside the "fake" +# bundle, provided that the object files are available. + +import errno +import os +import re +import shutil +import subprocess +import sys +import time + +# Returns a list of architectures contained in a Mach-O file. The file can be +# a universal (fat) file, in which case there will be one list element for +# each contained architecture, or it can be a thin single-architecture Mach-O +# file, in which case the list will contain a single element identifying the +# architecture. On error, returns an empty list. Determines the architecture +# list by calling file. +def macho_archs(macho): + macho_types = ["executable", + "dynamically linked shared library", + "bundle"] + macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")" + + file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho], + stdout=subprocess.PIPE) + + archs = [] + + type_line = file_cmd.stdout.readline() + type_match = re.match("^%s (.*)$" % macho_types_re, type_line) + if type_match: + archs.append(type_match.group(1)) + return [type_match.group(1)] + else: + type_match = re.match("^Mach-O universal binary with (.*) architectures$", + type_line) + if type_match: + for i in range(0, int(type_match.group(1))): + arch_line = file_cmd.stdout.readline() + arch_match = re.match( + "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re, + arch_line) + if arch_match: + archs.append(arch_match.group(1)) + + if file_cmd.wait() != 0: + archs = [] + + if len(archs) == 0: + print >> sys.stderr, "No architectures in %s" % macho + + return archs + +# Returns a dictionary mapping architectures contained in the file as returned +# by macho_archs to the LC_UUID load command for that architecture. +# Architectures with no LC_UUID load command are omitted from the dictionary. +# Determines the UUID value by calling otool. +def macho_uuids(macho): + uuids = {} + + archs = macho_archs(macho) + if len(archs) == 0: + return uuids + + for arch in archs: + if arch == "": + continue + + otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-", + macho], + stdout=subprocess.PIPE) + # state 0 is when nothing UUID-related has been seen yet. State 1 is + # entered after a load command begins, but it may not be an LC_UUID load + # command. States 2, 3, and 4 are intermediate states while reading an + # LC_UUID command. State 5 is the terminal state for a successful LC_UUID + # read. State 6 is the error state. + state = 0 + uuid = "" + for otool_line in otool_cmd.stdout: + if state == 0: + if re.match("^Load command .*$", otool_line): + state = 1 + elif state == 1: + if re.match("^ cmd LC_UUID$", otool_line): + state = 2 + else: + state = 0 + elif state == 2: + if re.match("^ cmdsize 24$", otool_line): + state = 3 + else: + state = 6 + elif state == 3: + # The UUID display format changed in the version of otool shipping + # with the Xcode 3.2.2 prerelease. The new format is traditional: + # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955 + # and with Xcode 3.2.6, then line is indented one more space: + # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955 + # The old format, from cctools-750 and older's otool, breaks the UUID + # up into a sequence of bytes: + # uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5 + # 0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55 + new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$", + otool_line) + if new_uuid_match: + uuid = new_uuid_match.group(1) + + # Skip state 4, there is no second line to read. + state = 5 + else: + old_uuid_match = re.match("^ uuid 0x(..) 0x(..) 0x(..) 0x(..) " + "0x(..) 0x(..) 0x(..) 0x(..)$", + otool_line) + if old_uuid_match: + state = 4 + uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \ + old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \ + old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \ + old_uuid_match.group(7) + old_uuid_match.group(8) + "-" + else: + state = 6 + elif state == 4: + old_uuid_match = re.match("^ 0x(..) 0x(..) 0x(..) 0x(..) " + "0x(..) 0x(..) 0x(..) 0x(..)$", + otool_line) + if old_uuid_match: + state = 5 + uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \ + old_uuid_match.group(3) + old_uuid_match.group(4) + \ + old_uuid_match.group(5) + old_uuid_match.group(6) + \ + old_uuid_match.group(7) + old_uuid_match.group(8) + else: + state = 6 + + if otool_cmd.wait() != 0: + state = 6 + + if state == 5: + uuids[arch] = uuid.upper() + + if len(uuids) == 0: + print >> sys.stderr, "No UUIDs in %s" % macho + + return uuids + +# Given a path to a Mach-O file and possible information from the environment, +# determines the desired path to the .dSYM. +def dsym_path(macho): + # If building a bundle, the .dSYM should be placed next to the bundle. Use + # WRAPPER_NAME to make this determination. If called from xcodebuild, + # WRAPPER_NAME will be set to the name of the bundle. + dsym = "" + if "WRAPPER_NAME" in os.environ: + if "BUILT_PRODUCTS_DIR" in os.environ: + dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"], + os.environ["WRAPPER_NAME"]) + else: + dsym = os.environ["WRAPPER_NAME"] + else: + dsym = macho + + dsym += ".dSYM" + + return dsym + +# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the +# architectures and UUIDs specified by the uuids map. +def make_fake_dsym(macho, dsym): + uuids = macho_uuids(macho) + if len(uuids) == 0: + return False + + dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF") + dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho)) + try: + os.makedirs(dwarf_dir) + except OSError, (err, error_string): + if err != errno.EEXIST: + raise + shutil.copyfile(macho, dwarf_file) + + # info_template is the same as what dsymutil would have written, with the + # addition of the fake_dsym key. + info_template = \ +''' + + + + CFBundleDevelopmentRegion + English + CFBundleIdentifier + com.apple.xcode.dsym.%(root_name)s + CFBundleInfoDictionaryVersion + 6.0 + CFBundlePackageType + dSYM + CFBundleSignature + ???? + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + dSYM_UUID + +%(uuid_dict)s + fake_dsym + + + +''' + + root_name = os.path.basename(dsym)[:-5] # whatever.dSYM without .dSYM + uuid_dict = "" + for arch in sorted(uuids): + uuid_dict += "\t\t\t" + arch + "\n"\ + "\t\t\t" + uuids[arch] + "\n" + info_dict = { + "root_name": root_name, + "uuid_dict": uuid_dict, + } + info_contents = info_template % info_dict + info_file = os.path.join(dsym, "Contents", "Info.plist") + info_fd = open(info_file, "w") + info_fd.write(info_contents) + info_fd.close() + + return True + +# For a Mach-O file, determines where the .dSYM bundle should be located. If +# the bundle does not exist or has a modification time older than the Mach-O +# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips +# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O +# file to be identical. +def strip_and_make_fake_dsym(macho): + dsym = dsym_path(macho) + macho_stat = os.stat(macho) + dsym_stat = None + try: + dsym_stat = os.stat(dsym) + except OSError, (err, error_string): + if err != errno.ENOENT: + raise + + if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime: + # Make a .dSYM bundle + if not make_fake_dsym(macho, dsym): + return False + + # Strip the Mach-O file + remove_dsym = True + try: + strip_cmdline = ['xcrun', 'strip'] + sys.argv[1:] + strip_cmd = subprocess.Popen(strip_cmdline) + if strip_cmd.wait() == 0: + remove_dsym = False + finally: + if remove_dsym: + shutil.rmtree(dsym) + + # Update modification time on the Mach-O file and .dSYM bundle + now = time.time() + os.utime(macho, (now, now)) + os.utime(dsym, (now, now)) + + return True + +def main(argv=None): + if argv is None: + argv = sys.argv + + # This only supports operating on one file at a time. Look at the arguments + # to strip to figure out what the source to be stripped is. Arguments are + # processed in the same way that strip does, although to reduce complexity, + # this doesn't do all of the same checking as strip. For example, strip + # has no -Z switch and would treat -Z on the command line as an error. For + # the purposes this is needed for, that's fine. + macho = None + process_switches = True + ignore_argument = False + for arg in argv[1:]: + if ignore_argument: + ignore_argument = False + continue + if process_switches: + if arg == "-": + process_switches = False + # strip has these switches accept an argument: + if arg in ["-s", "-R", "-d", "-o", "-arch"]: + ignore_argument = True + if arg[0] == "-": + continue + if macho is None: + macho = arg + else: + print >> sys.stderr, "Too many things to strip" + return 1 + + if macho is None: + print >> sys.stderr, "Nothing to strip" + return 1 + + if not strip_and_make_fake_dsym(macho): + return 1 + + return 0 + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/build/mac/tweak_info_plist.gni b/build/mac/tweak_info_plist.gni new file mode 100644 index 00000000000..d24a95d2ef7 --- /dev/null +++ b/build/mac/tweak_info_plist.gni @@ -0,0 +1,46 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Template to run the tweak_info_plist.py script on a plist. +# +# Arguments: +# +# info_plist: +# string, the plist to tweak. +# +# args: +# list of string, the arguments to pass to the tweak_info_plist.py +# script. +# +# Callers should use get_target_outputs() to get the output name. +template("tweak_info_plist") { + assert(defined(invoker.info_plist), + "The info_plist must be specified in $target_name") + assert(defined(invoker.args), + "The args to tweak_info_plist.py must be specified in $target_name") + + action(target_name) { + forward_variables_from(invoker, [ "testonly" ]) + script = "//build/mac/tweak_info_plist.py" + inputs = [ + script, + "//build/util/version.py", + "//build/util/LASTCHANGE", + "//chrome/VERSION", + ] + sources = [ + invoker.info_plist, + ] + _output_name = "$target_gen_dir/${target_name}_tweaked.plist" + outputs = [ + _output_name, + ] + args = invoker.args + [ + "--plist", + rebase_path(invoker.info_plist, root_build_dir), + "--output", + rebase_path(_output_name, root_build_dir), + ] + } +} diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py new file mode 100644 index 00000000000..920c62daf99 --- /dev/null +++ b/build/mac/tweak_info_plist.py @@ -0,0 +1,359 @@ +#!/usr/bin/env python + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Xcode supports build variable substitutions and CPP; sadly, that doesn't work +# because: +# +# 1. Xcode wants to do the Info.plist work before it runs any build phases, +# this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER +# we'd have to put it in another target so it runs in time. +# 2. Xcode also doesn't check to see if the header being used as a prefix for +# the Info.plist has changed. So even if we updated it, it's only looking +# at the modtime of the info.plist to see if that's changed. +# +# So, we work around all of this by making a script build phase that will run +# during the app build, and simply update the info.plist in place. This way +# by the time the app target is done, the info.plist is correct. +# + +import optparse +import os +import plistlib +import re +import subprocess +import sys +import tempfile + +TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + + +def _ConvertPlist(source_plist, output_plist, fmt): + """Convert |source_plist| to |fmt| and save as |output_plist|.""" + return subprocess.call( + ['plutil', '-convert', fmt, '-o', output_plist, source_plist]) + + +def _GetOutput(args): + """Runs a subprocess and waits for termination. Returns (stdout, returncode) + of the process. stderr is attached to the parent.""" + proc = subprocess.Popen(args, stdout=subprocess.PIPE) + (stdout, stderr) = proc.communicate() + return (stdout, proc.returncode) + + +def _GetOutputNoError(args): + """Similar to _GetOutput() but ignores stderr. If there's an error launching + the child (like file not found), the exception will be caught and (None, 1) + will be returned to mimic quiet failure.""" + try: + proc = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + except OSError: + return (None, 1) + (stdout, stderr) = proc.communicate() + return (stdout, proc.returncode) + + +def _RemoveKeys(plist, *keys): + """Removes a varargs of keys from the plist.""" + for key in keys: + try: + del plist[key] + except KeyError: + pass + + +def _ApplyVersionOverrides(version, keys, overrides, separator='.'): + """Applies version overrides. + + Given a |version| string as "a.b.c.d" (assuming a default separator) with + version components named by |keys| then overrides any value that is present + in |overrides|. + + >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'}) + 'a.d' + """ + if not overrides: + return version + version_values = version.split(separator) + for i, (key, value) in enumerate(zip(keys, version_values)): + if key in overrides: + version_values[i] = overrides[key] + return separator.join(version_values) + + +def _GetVersion(version_format, values, overrides=None): + """Generates a version number according to |version_format| using the values + from |values| or |overrides| if given.""" + result = version_format + for key in values: + if overrides and key in overrides: + value = overrides[key] + else: + value = values[key] + result = result.replace('@%s@' % key, value) + return result + + +def _AddVersionKeys( + plist, version_format_for_key, version=None, overrides=None): + """Adds the product version number into the plist. Returns True on success and + False on error. The error will be printed to stderr.""" + if not version: + # Pull in the Chrome version number. + VERSION_TOOL = os.path.join(TOP, 'build/util/version.py') + VERSION_FILE = os.path.join(TOP, 'chrome/VERSION') + (stdout, retval) = _GetOutput([ + VERSION_TOOL, '-f', VERSION_FILE, + '-t', '@MAJOR@.@MINOR@.@BUILD@.@PATCH@']) + + # If the command finished with a non-zero return code, then report the + # error up. + if retval != 0: + return False + + version = stdout.strip() + + # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH + # format (where each value is a number). Note that str.isdigit() returns + # True if the string is composed only of digits (and thus match \d+ regexp). + groups = version.split('.') + if len(groups) != 4 or not all(element.isdigit() for element in groups): + print >>sys.stderr, 'Invalid version string specified: "%s"' % version + return False + values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups)) + + for key in version_format_for_key: + plist[key] = _GetVersion(version_format_for_key[key], values, overrides) + + # Return with no error. + return True + + +def _DoSCMKeys(plist, add_keys): + """Adds the SCM information, visible in about:version, to property list. If + |add_keys| is True, it will insert the keys, otherwise it will remove them.""" + scm_revision = None + if add_keys: + # Pull in the Chrome revision number. + VERSION_TOOL = os.path.join(TOP, 'build/util/version.py') + LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE') + (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t', + '@LASTCHANGE@']) + if retval: + return False + scm_revision = stdout.rstrip() + + # See if the operation failed. + _RemoveKeys(plist, 'SCMRevision') + if scm_revision != None: + plist['SCMRevision'] = scm_revision + elif add_keys: + print >>sys.stderr, 'Could not determine SCM revision. This may be OK.' + + return True + + +def _AddBreakpadKeys(plist, branding, platform): + """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and + also requires the |branding| argument.""" + plist['BreakpadReportInterval'] = '3600' # Deliberately a string. + plist['BreakpadProduct'] = '%s_%s' % (branding, platform) + plist['BreakpadProductDisplay'] = branding + plist['BreakpadURL'] = 'https://clients2.google.com/cr/report' + + # These are both deliberately strings and not boolean. + plist['BreakpadSendAndExit'] = 'YES' + plist['BreakpadSkipConfirm'] = 'YES' + + +def _RemoveBreakpadKeys(plist): + """Removes any set Breakpad keys.""" + _RemoveKeys(plist, + 'BreakpadURL', + 'BreakpadReportInterval', + 'BreakpadProduct', + 'BreakpadProductDisplay', + 'BreakpadVersion', + 'BreakpadSendAndExit', + 'BreakpadSkipConfirm') + + +def _TagSuffixes(): + # Keep this list sorted in the order that tag suffix components are to + # appear in a tag value. That is to say, it should be sorted per ASCII. + components = ('full',) + assert tuple(sorted(components)) == components + + components_len = len(components) + combinations = 1 << components_len + tag_suffixes = [] + for combination in xrange(0, combinations): + tag_suffix = '' + for component_index in xrange(0, components_len): + if combination & (1 << component_index): + tag_suffix += '-' + components[component_index] + tag_suffixes.append(tag_suffix) + return tag_suffixes + + +def _AddKeystoneKeys(plist, bundle_identifier): + """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and + also requires the |bundle_identifier| argument (com.example.product).""" + plist['KSVersion'] = plist['CFBundleShortVersionString'] + plist['KSProductID'] = bundle_identifier + plist['KSUpdateURL'] = 'https://tools.google.com/service/update2' + + _RemoveKeys(plist, 'KSChannelID') + for tag_suffix in _TagSuffixes(): + if tag_suffix: + plist['KSChannelID' + tag_suffix] = tag_suffix + + +def _RemoveKeystoneKeys(plist): + """Removes any set Keystone keys.""" + _RemoveKeys(plist, + 'KSVersion', + 'KSProductID', + 'KSUpdateURL') + + tag_keys = [] + for tag_suffix in _TagSuffixes(): + tag_keys.append('KSChannelID' + tag_suffix) + _RemoveKeys(plist, *tag_keys) + + +def Main(argv): + parser = optparse.OptionParser('%prog [options]') + parser.add_option('--plist', dest='plist_path', action='store', + type='string', default=None, help='The path of the plist to tweak.') + parser.add_option('--output', dest='plist_output', action='store', + type='string', default=None, help='If specified, the path to output ' + \ + 'the tweaked plist, rather than overwriting the input.') + parser.add_option('--breakpad', dest='use_breakpad', action='store', + type='int', default=False, help='Enable Breakpad [1 or 0]') + parser.add_option('--keystone', dest='use_keystone', action='store', + type='int', default=False, help='Enable Keystone [1 or 0]') + parser.add_option('--scm', dest='add_scm_info', action='store', type='int', + default=True, help='Add SCM metadata [1 or 0]') + parser.add_option('--branding', dest='branding', action='store', + type='string', default=None, help='The branding of the binary') + parser.add_option('--bundle_id', dest='bundle_identifier', + action='store', type='string', default=None, + help='The bundle id of the binary') + parser.add_option('--platform', choices=('ios', 'mac'), default='mac', + help='The target platform of the bundle') + parser.add_option('--version-overrides', action='append', + help='Key-value pair to override specific component of version ' + 'like key=value (can be passed multiple time to configure ' + 'more than one override)') + parser.add_option('--format', choices=('binary1', 'xml1', 'json'), + default='xml1', help='Format to use when writing property list ' + '(default: %(default)s)') + parser.add_option('--version', dest='version', action='store', type='string', + default=None, help='The version string [major.minor.build.patch]') + (options, args) = parser.parse_args(argv) + + if len(args) > 0: + print >>sys.stderr, parser.get_usage() + return 1 + + if not options.plist_path: + print >>sys.stderr, 'No --plist specified.' + return 1 + + # Read the plist into its parsed format. Convert the file to 'xml1' as + # plistlib only supports that format in Python 2.7. + with tempfile.NamedTemporaryFile() as temp_info_plist: + retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1') + if retcode != 0: + return retcode + plist = plistlib.readPlist(temp_info_plist.name) + + # Convert overrides. + overrides = {} + if options.version_overrides: + for pair in options.version_overrides: + if not '=' in pair: + print >>sys.stderr, 'Invalid value for --version-overrides:', pair + return 1 + key, value = pair.split('=', 1) + overrides[key] = value + if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'): + print >>sys.stderr, 'Unsupported key for --version-overrides:', key + return 1 + + if options.platform == 'mac': + version_format_for_key = { + # Add public version info so "Get Info" works. + 'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@', + + # Honor the 429496.72.95 limit. The maximum comes from splitting 2^32 - 1 + # into 6, 2, 2 digits. The limitation was present in Tiger, but it could + # have been fixed in later OS release, but hasn't been tested (it's easy + # enough to find out with "lsregister -dump). + # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html + # BUILD will always be an increasing value, so BUILD_PATH gives us + # something unique that meetings what LS wants. + 'CFBundleVersion': '@BUILD@.@PATCH@', + } + else: + version_format_for_key = { + 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@', + 'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + } + + if options.use_breakpad: + version_format_for_key['BreakpadVersion'] = \ + '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + + # Insert the product version. + if not _AddVersionKeys( + plist, version_format_for_key, version=options.version, + overrides=overrides): + return 2 + + # Add Breakpad if configured to do so. + if options.use_breakpad: + if options.branding is None: + print >>sys.stderr, 'Use of Breakpad requires branding.' + return 1 + # Map gyp "OS" / gn "target_os" passed via the --platform parameter to + # the platform as known by breakpad. + platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform] + _AddBreakpadKeys(plist, options.branding, platform) + else: + _RemoveBreakpadKeys(plist) + + # Add Keystone if configured to do so. + if options.use_keystone: + if options.bundle_identifier is None: + print >>sys.stderr, 'Use of Keystone requires the bundle id.' + return 1 + _AddKeystoneKeys(plist, options.bundle_identifier) + else: + _RemoveKeystoneKeys(plist) + + # Adds or removes any SCM keys. + if not _DoSCMKeys(plist, options.add_scm_info): + return 3 + + output_path = options.plist_path + if options.plist_output is not None: + output_path = options.plist_output + + # Now that all keys have been mutated, rewrite the file. + with tempfile.NamedTemporaryFile() as temp_info_plist: + plistlib.writePlist(plist, temp_info_plist.name) + + # Convert Info.plist to the format requested by the --format flag. Any + # format would work on Mac but iOS requires specific format. + return _ConvertPlist(temp_info_plist.name, output_path, options.format) + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/build/mac_toolchain.py b/build/mac_toolchain.py new file mode 100644 index 00000000000..28cbd84d299 --- /dev/null +++ b/build/mac_toolchain.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Download necessary mac toolchain files under certain conditions. If +xcode-select is already set and points to an external folder +(e.g. /Application/Xcode.app), this script only runs if the GYP_DEFINE +|force_mac_toolchain| is set. To override the values in +|TOOLCHAIN_REVISION|-|TOOLCHAIN_SUB_REVISION| below, GYP_DEFINE +mac_toolchain_revision can be used instead. + +This script will only run on machines if /usr/bin/xcodebuild and +/usr/bin/xcode-select has been added to the sudoers list so the license can be +accepted. + +Otherwise, user input would be required to complete the script. Perhaps future +versions can be modified to allow for user input on developer machines. +""" + +import os +import plistlib +import shutil +import subprocess +import sys +import tarfile +import time +import tempfile +import urllib2 + +# This can be changed after running /build/package_mac_toolchain.py. +TOOLCHAIN_REVISION = '5B1008' +TOOLCHAIN_SUB_REVISION = 2 +TOOLCHAIN_VERSION = '%s-%s' % (TOOLCHAIN_REVISION, TOOLCHAIN_SUB_REVISION) + +BASE_DIR = os.path.abspath(os.path.dirname(__file__)) +TOOLCHAIN_BUILD_DIR = os.path.join(BASE_DIR, 'mac_files', 'Xcode.app') +STAMP_FILE = os.path.join(BASE_DIR, 'mac_files', 'toolchain_build_revision') +TOOLCHAIN_URL = 'gs://chrome-mac-sdk/' + + +def GetToolchainDirectory(): + if sys.platform == 'darwin' and not UseLocalMacSDK(): + return TOOLCHAIN_BUILD_DIR + else: + return None + + +def SetToolchainEnvironment(): + mac_toolchain_dir = GetToolchainDirectory() + if mac_toolchain_dir: + os.environ['DEVELOPER_DIR'] = mac_toolchain_dir + + +def ReadStampFile(): + """Return the contents of the stamp file, or '' if it doesn't exist.""" + try: + with open(STAMP_FILE, 'r') as f: + return f.read().rstrip() + except IOError: + return '' + + +def WriteStampFile(s): + """Write s to the stamp file.""" + EnsureDirExists(os.path.dirname(STAMP_FILE)) + with open(STAMP_FILE, 'w') as f: + f.write(s) + f.write('\n') + + +def EnsureDirExists(path): + if not os.path.exists(path): + os.makedirs(path) + + +def DownloadAndUnpack(url, output_dir): + """Decompresses |url| into a cleared |output_dir|.""" + temp_name = tempfile.mktemp(prefix='mac_toolchain') + try: + print 'Downloading new toolchain.' + subprocess.check_call(['gsutil.py', 'cp', url, temp_name]) + if os.path.exists(output_dir): + print 'Deleting old toolchain.' + shutil.rmtree(output_dir) + EnsureDirExists(output_dir) + print 'Unpacking new toolchain.' + tarfile.open(mode='r:gz', name=temp_name).extractall(path=output_dir) + finally: + if os.path.exists(temp_name): + os.unlink(temp_name) + + +def CanAccessToolchainBucket(): + """Checks whether the user has access to |TOOLCHAIN_URL|.""" + proc = subprocess.Popen(['gsutil.py', 'ls', TOOLCHAIN_URL], + stdout=subprocess.PIPE) + proc.communicate() + return proc.returncode == 0 + +def LoadPlist(path): + """Loads Plist at |path| and returns it as a dictionary.""" + fd, name = tempfile.mkstemp() + try: + subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path]) + with os.fdopen(fd, 'r') as f: + return plistlib.readPlist(f) + finally: + os.unlink(name) + + +def AcceptLicense(): + """Use xcodebuild to accept new toolchain license if necessary. Don't accept + the license if a newer license has already been accepted. This only works if + xcodebuild and xcode-select are passwordless in sudoers.""" + + # Check old license + try: + target_license_plist_path = \ + os.path.join(TOOLCHAIN_BUILD_DIR, + *['Contents','Resources','LicenseInfo.plist']) + target_license_plist = LoadPlist(target_license_plist_path) + build_type = target_license_plist['licenseType'] + build_version = target_license_plist['licenseID'] + + accepted_license_plist = LoadPlist( + '/Library/Preferences/com.apple.dt.Xcode.plist') + agreed_to_key = 'IDELast%sLicenseAgreedTo' % build_type + last_license_agreed_to = accepted_license_plist[agreed_to_key] + + # Historically all Xcode build numbers have been in the format of AANNNN, so + # a simple string compare works. If Xcode's build numbers change this may + # need a more complex compare. + if build_version <= last_license_agreed_to: + # Don't accept the license of older toolchain builds, this will break the + # license of newer builds. + return + except (subprocess.CalledProcessError, KeyError): + # If there's never been a license of type |build_type| accepted, + # |target_license_plist_path| or |agreed_to_key| may not exist. + pass + + print "Accepting license." + old_path = subprocess.Popen(['/usr/bin/xcode-select', '-p'], + stdout=subprocess.PIPE).communicate()[0].strip() + try: + build_dir = os.path.join(TOOLCHAIN_BUILD_DIR, 'Contents/Developer') + subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', build_dir]) + subprocess.check_call(['sudo', '/usr/bin/xcodebuild', '-license', 'accept']) + finally: + subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', old_path]) + + +def UseLocalMacSDK(): + force_pull = os.environ.has_key('FORCE_MAC_TOOLCHAIN') + + # Don't update the toolchain if there's already one installed outside of the + # expected location for a Chromium mac toolchain, unless |force_pull| is set. + proc = subprocess.Popen(['xcode-select', '-p'], stdout=subprocess.PIPE) + xcode_select_dir = proc.communicate()[0] + rc = proc.returncode + return (not force_pull and rc == 0 and + TOOLCHAIN_BUILD_DIR not in xcode_select_dir) + + +def main(): + if sys.platform != 'darwin': + return 0 + + # TODO(justincohen): Add support for GN per crbug.com/570091 + if UseLocalMacSDK(): + print 'Using local toolchain.' + return 0 + + toolchain_revision = os.environ.get('MAC_TOOLCHAIN_REVISION', + TOOLCHAIN_VERSION) + if ReadStampFile() == toolchain_revision: + print 'Toolchain (%s) is already up to date.' % toolchain_revision + AcceptLicense() + return 0 + + if not CanAccessToolchainBucket(): + print 'Cannot access toolchain bucket.' + return 0 + + # Reset the stamp file in case the build is unsuccessful. + WriteStampFile('') + + toolchain_file = '%s.tgz' % toolchain_revision + toolchain_full_url = TOOLCHAIN_URL + toolchain_file + + print 'Updating toolchain to %s...' % toolchain_revision + try: + toolchain_file = 'toolchain-%s.tgz' % toolchain_revision + toolchain_full_url = TOOLCHAIN_URL + toolchain_file + DownloadAndUnpack(toolchain_full_url, TOOLCHAIN_BUILD_DIR) + AcceptLicense() + + print 'Toolchain %s unpacked.' % toolchain_revision + WriteStampFile(toolchain_revision) + return 0 + except Exception as e: + print 'Failed to download toolchain %s.' % toolchain_file + print 'Exception %s' % e + print 'Exiting.' + return 1 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/nocompile.gni b/build/nocompile.gni new file mode 100644 index 00000000000..1b292d79d19 --- /dev/null +++ b/build/nocompile.gni @@ -0,0 +1,92 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an target to create a unittest that +# invokes a set of no-compile tests. A no-compile test is a test that asserts +# a particular construct will not compile. +# +# Also see: +# http://dev.chromium.org/developers/testing/no-compile-tests +# +# To use this, create a gyp target with the following form: +# +# import("//build/nocompile.gni") +# nocompile_test("my_module_nc_unittests") { +# sources = [ +# 'nc_testset_1.nc', +# 'nc_testset_2.nc', +# ] +# } +# +# The .nc files are C++ files that contain code we wish to assert will not +# compile. Each individual test case in the file should be put in its own +# #ifdef section. The expected output should be appended with a C++-style +# comment that has a python list of regular expressions. This will likely +# be greater than 80-characters. Giving a solid expected output test is +# important so that random compile failures do not cause the test to pass. +# +# Example .nc file: +# +# #if defined(TEST_NEEDS_SEMICOLON) // [r"expected ',' or ';' at end of input"] +# +# int a = 1 +# +# #elif defined(TEST_NEEDS_CAST) // [r"invalid conversion from 'void*' to 'char*'"] +# +# void* a = NULL; +# char* b = a; +# +# #endif +# +# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to: +# +# DISABLE_TEST_NEEDS_SEMICOLON +# TEST_NEEDS_CAST +# +# The lines above are parsed by a regexp so avoid getting creative with the +# formatting or ifdef logic; it will likely just not work. +# +# Implementation notes: +# The .nc files are actually processed by a python script which executes the +# compiler and generates a .cc file that is empty on success, or will have a +# series of #error lines on failure, and a set of trivially passing gunit +# TEST() functions on success. This allows us to fail at the compile step when +# something goes wrong, and know during the unittest run that the test was at +# least processed when things go right. + +import("//testing/test.gni") + +declare_args() { + # TODO(crbug.com/105388): Disabled until http://crbug.com/105388 is resolved. + enable_nocompile_tests = false +} + +if (enable_nocompile_tests) { + template("nocompile_test") { + nocompile_target = target_name + "_run_nocompile" + + action_foreach(nocompile_target) { + script = "//tools/nocompile_driver.py" + sources = invoker.sources + + result_path = "$target_gen_dir/{{source_name_part}}_nc.cc" + depfile = "${result_path}.d" + outputs = [ + result_path, + ] + args = [ + "4", # number of compilers to invoke in parallel. + "{{source}}", + "-Wall -Werror -Wfatal-errors " + "-I" + + rebase_path("//", root_build_dir), + "{{output}}", + ] + } + + test(target_name) { + deps = invoker.deps + [ ":$nocompile_target" ] + sources = get_target_outputs(":$nocompile_target") + } + } +} diff --git a/build/nocompile.gypi b/build/nocompile.gypi new file mode 100644 index 00000000000..4f61add0dd9 --- /dev/null +++ b/build/nocompile.gypi @@ -0,0 +1,97 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an target to create a unittest that +# invokes a set of no-compile tests. A no-compile test is a test that asserts +# a particular construct will not compile. +# +# Also see: +# http://dev.chromium.org/developers/testing/no-compile-tests +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_module_nc_unittests', +# 'type': 'executable', +# 'sources': [ +# 'nc_testset_1.nc', +# 'nc_testset_2.nc', +# ], +# 'includes': ['path/to/this/gypi/file'], +# } +# +# The .nc files are C++ files that contain code we wish to assert will not +# compile. Each individual test case in the file should be put in its own +# #ifdef section. The expected output should be appended with a C++-style +# comment that has a python list of regular expressions. This will likely +# be greater than 80-characters. Giving a solid expected output test is +# important so that random compile failures do not cause the test to pass. +# +# Example .nc file: +# +# #if defined(TEST_NEEDS_SEMICOLON) // [r"expected ',' or ';' at end of input"] +# +# int a = 1 +# +# #elif defined(TEST_NEEDS_CAST) // [r"invalid conversion from 'void*' to 'char*'"] +# +# void* a = NULL; +# char* b = a; +# +# #endif +# +# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to: +# +# DISABLE_TEST_NEEDS_SEMICOLON +# TEST_NEEDS_CAST +# +# The lines above are parsed by a regexp so avoid getting creative with the +# formatting or ifdef logic; it will likely just not work. +# +# Implementation notes: +# The .nc files are actually processed by a python script which executes the +# compiler and generates a .cc file that is empty on success, or will have a +# series of #error lines on failure, and a set of trivially passing gunit +# TEST() functions on success. This allows us to fail at the compile step when +# something goes wrong, and know during the unittest run that the test was at +# least processed when things go right. + +{ + # TODO(awong): Disabled until http://crbug.com/105388 is resolved. + 'sources/': [['exclude', '\\.nc$']], + 'conditions': [ + [ 'OS!="win" and clang==1', { + 'rules': [ + { + 'variables': { + 'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py', + 'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/' + '<(RULE_INPUT_ROOT)_nc.cc'), + }, + 'rule_name': 'run_nocompile', + 'extension': 'nc', + 'inputs': [ + '<(nocompile_driver)', + ], + 'outputs': [ + '<(nc_result_path)' + ], + 'depfile': '<(nc_result_path).d', + 'action': [ + 'python', + '<(nocompile_driver)', + '4', # number of compilers to invoke in parallel. + '<(RULE_INPUT_PATH)', + '-Wall -Werror -Wfatal-errors -I<(DEPTH)', + '<(nc_result_path)', + ], + 'message': 'Generating no compile results for <(RULE_INPUT_PATH)', + 'process_outputs_as_sources': 1, + }, + ], + }, { + 'sources/': [['exclude', '\\.nc$']] + }], # 'OS!="win" and clang=="1"' + ], +} + diff --git a/build/package_mac_toolchain.py b/build/package_mac_toolchain.py new file mode 100644 index 00000000000..073d2cb7f72 --- /dev/null +++ b/build/package_mac_toolchain.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Compress and upload Mac toolchain files.""" + +import argparse +import glob +import os +import plistlib +import re +import subprocess +import sys +import tarfile +import tempfile + + +TOOLCHAIN_URL = "gs://chrome-mac-sdk" + +# It's important to at least remove unused Platform folders to cut down on the +# size of the toolchain folder. There are other various unused folders that +# have been removed through trial and error. If future versions of Xcode become +# problematic it's possible this list is incorrect, and can be reduced to just +# the unused platforms. On the flip side, it's likely more directories can be +# excluded. +EXCLUDE_FOLDERS = [ +'Contents/Applications', +'Contents/Developer/Documentation', +'Contents/Developer/Platforms/AppleTVOS.platform', +'Contents/Developer/Platforms/AppleTVSimulator.platform', +'Contents/Developer/Platforms/WatchOS.platform', +'Contents/Developer/Platforms/WatchSimulator.platform', +'Contents/Developer/Platforms/iPhoneOS.platform', +'Contents/Developer/Platforms/iPhoneSimulator.platform', +'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator', +'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift', +'Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/' + 'usr/share/man', +'Contents/Developer/Library/Xcode/Templates' +] + + +def main(): + """Compress |target_dir| and upload to |TOOLCHAIN_URL|""" + parser = argparse.ArgumentParser() + parser.add_argument('target_dir', + help="Xcode installation directory.") + args = parser.parse_args() + + # Verify this looks like an Xcode directory. + contents_dir = os.path.join(args.target_dir, 'Contents') + plist_file = os.path.join(contents_dir, 'version.plist') + try: + info = plistlib.readPlist(plist_file) + except: + print "Invalid Xcode dir." + return 0 + build_version = info['ProductBuildVersion'] + + # Look for previous toolchain tgz files with the same |build_version|. + wildcard_filename = '%s/toolchain-%s-*.tgz' % (TOOLCHAIN_URL, build_version) + p = subprocess.Popen(['gsutil.py', 'ls', wildcard_filename], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output = p.communicate()[0] + next_count = 1 + if p.returncode == 0: + next_count = len(output.split('\n')) + sys.stdout.write("%s already exists (%s). " + "Do you want to create another? [y/n] " + % (build_version, next_count - 1)) + + if raw_input().lower() not in set(['yes','y', 'ye']): + print "Skipping duplicate upload." + return 0 + + os.chdir(args.target_dir) + toolchain_file_name = "toolchain-%s-%s" % (build_version, next_count) + toolchain_name = tempfile.mktemp(suffix='toolchain.tgz') + + print "Creating %s (%s)." % (toolchain_file_name, toolchain_name) + os.environ["COPYFILE_DISABLE"] = "1" + args = ['tar', '-cvzf', toolchain_name] + args.extend(map('--exclude={0}'.format, EXCLUDE_FOLDERS)) + args.extend(['.']) + subprocess.check_call(args) + + print "Uploading %s toolchain." % toolchain_file_name + destination_path = '%s/%s.tgz' % (TOOLCHAIN_URL, toolchain_file_name) + subprocess.check_call(['gsutil.py', 'cp', '-n', '-a', 'public-read', + toolchain_name, destination_path]) + + print "Done with %s upload." % toolchain_file_name + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/precompile.cc b/build/precompile.cc new file mode 100644 index 00000000000..db1ef6dfe55 --- /dev/null +++ b/build/precompile.cc @@ -0,0 +1,7 @@ +// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Precompiled header generator for Windows builds. No include is needed +// in this file as the PCH include is forced via the "Forced Include File" +// flag in the projects generated by GYP. diff --git a/build/precompile.h b/build/precompile.h new file mode 100644 index 00000000000..50a9b87e2fb --- /dev/null +++ b/build/precompile.h @@ -0,0 +1,57 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is used as a precompiled header for both C and C++ files. So +// any C++ headers must go in the __cplusplus block below. + +#if defined(BUILD_PRECOMPILE_H_) +#error You shouldn't include the precompiled header file more than once. +#endif + +#define BUILD_PRECOMPILE_H_ + +#define _USE_MATH_DEFINES + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(__cplusplus) + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#endif // __cplusplus diff --git a/build/print_python_deps.py b/build/print_python_deps.py new file mode 100644 index 00000000000..3d0c9a8e140 --- /dev/null +++ b/build/print_python_deps.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints all non-system dependencies for the given module. + +The primary use-case for this script is to genererate the list of python modules +required for .isolate files. +""" + +import argparse +import imp +import os +import pipes +import sys + +# Don't use any helper modules, or else they will end up in the results. + + +_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + + +def _ComputePythonDependencies(): + """Gets the paths of imported non-system python modules. + + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ + module_paths = (m.__file__ for m in sys.modules.values() + if m and hasattr(m, '__file__')) + + src_paths = set() + for path in module_paths: + if path == __file__: + continue + path = os.path.abspath(path) + if not path.startswith(_SRC_ROOT): + continue + + if path.endswith('.pyc'): + path = path[:-1] + src_paths.add(path) + + return src_paths + + +def _NormalizeCommandLine(options): + """Returns a string that when run from SRC_ROOT replicates the command.""" + args = ['build/print_python_deps.py'] + root = os.path.relpath(options.root, _SRC_ROOT) + if root != '.': + args.extend(('--root', root)) + if options.output: + args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT))) + for whitelist in sorted(options.whitelists): + args.extend(('--whitelist', os.path.relpath(whitelist, _SRC_ROOT))) + args.append(os.path.relpath(options.module, _SRC_ROOT)) + return ' '.join(pipes.quote(x) for x in args) + + +def _FindPythonInDirectory(directory): + """Returns an iterable of all non-test python files in the given directory.""" + files = [] + for root, _dirnames, filenames in os.walk(directory): + for filename in filenames: + if filename.endswith('.py') and not filename.endswith('_test.py'): + yield os.path.join(root, filename) + + +def main(): + parser = argparse.ArgumentParser( + description='Prints all non-system dependencies for the given module.') + parser.add_argument('module', + help='The python module to analyze.') + parser.add_argument('--root', default='.', + help='Directory to make paths relative to.') + parser.add_argument('--output', + help='Write output to a file rather than stdout.') + parser.add_argument('--whitelist', default=[], action='append', + dest='whitelists', + help='Recursively include all non-test python files ' + 'within this directory. May be specified multiple times.') + options = parser.parse_args() + sys.path.append(os.path.dirname(options.module)) + imp.load_source('NAME', options.module) + + paths_set = _ComputePythonDependencies() + for path in options.whitelists: + paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path)) + + paths = [os.path.relpath(p, options.root) for p in paths_set] + + normalized_cmdline = _NormalizeCommandLine(options) + out = open(options.output, 'w') if options.output else sys.stdout + with out: + out.write('# Generated by running:\n') + out.write('# %s\n' % normalized_cmdline) + for path in sorted(paths): + out.write(path + '\n') + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/protoc.gypi b/build/protoc.gypi new file mode 100644 index 00000000000..7563aa234a1 --- /dev/null +++ b/build/protoc.gypi @@ -0,0 +1,164 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to invoke protoc in a consistent manner. For Java-targets, see +# protoc_java.gypi. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_proto_lib', +# 'type': 'static_library', +# 'sources': [ +# 'foo.proto', +# 'bar.proto', +# ], +# 'variables': { +# # Optional, see below: 'proto_in_dir': '.' +# 'proto_out_dir': 'dir/for/my_proto_lib' +# }, +# 'includes': ['path/to/this/gypi/file'], +# } +# If necessary, you may add normal .cc files to the sources list or other gyp +# dependencies. The proto headers are guaranteed to be generated before any +# source files, even within this target, are compiled. +# +# The 'proto_in_dir' variable must be the relative path to the +# directory containing the .proto files. If left out, it defaults to '.'. +# +# The 'proto_out_dir' variable specifies the path suffix that output +# files are generated under. Targets that gyp-depend on my_proto_lib +# will be able to include the resulting proto headers with an include +# like: +# #include "dir/for/my_proto_lib/foo.pb.h" +# +# If you need to add an EXPORT macro to a protobuf's c++ header, set the +# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:' +# e.g. 'dllexport_decl=BASE_EXPORT:' +# +# It is likely you also need to #include a file for the above EXPORT macro to +# work. You can do so with the 'cc_include' variable. +# e.g. 'base/base_export.h' +# +# Implementation notes: +# A proto_out_dir of foo/bar produces +# <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h} +# <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py + +{ + 'variables': { + 'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py', + 'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)', + 'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)', + 'cc_generator_options%': '', + 'generate_python%': 1, + 'generate_cc%': 1, + # Name of plugin executable which generates custom cc stubs. + # If passed, generator_plugin_suffix (before .cc and .h) is also required. + 'generator_plugin%': '', + 'generator_plugin_options%': '', + 'cc_include%': '', + 'proto_in_dir%': '.', + 'conditions': [ + ['use_system_protobuf==0', { + 'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)', + }, { # use_system_protobuf==1 + 'protoc': '(java_out_dir), since that +# is the root directory of all the output. +# +# Implementation notes: +# A target_name of foo and proto-specified 'package' java.package.path produces: +# <(PRODUCT_DIR)/java_proto/foo/{java/package/path/}{Foo,Bar}.java +# where Foo and Bar are taken from 'java_outer_classname' of the protos. +# +# How the .jar-file is created is different than how protoc is used for other +# targets, and as such, this lives in its own file. + +{ + 'variables': { + 'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)android_protoc<(EXECUTABLE_SUFFIX)', + 'java_out_dir': '<(PRODUCT_DIR)/java_proto/<(_target_name)/src', + 'proto_in_dir%': '.', + 'stamp_file': '<(java_out_dir).stamp', + 'script': '<(DEPTH)/build/protoc_java.py', + + # The rest of the variables here are for the java.gypi include. + 'java_in_dir': '<(DEPTH)/build/android/empty', + 'generated_src_dirs': ['<(java_out_dir)'], + # Adding the |stamp_file| to |additional_input_paths| makes the actions in + # the include of java.gypi depend on the genproto_java action. + 'additional_input_paths': ['<(stamp_file)'], + 'run_findbugs': 0, + }, + 'actions': [ + { + 'action_name': 'genproto_java', + 'inputs': [ + '<(script)', + '<(protoc)', + '<@(_sources)', + ], + # We do not know the names of the generated files, so we use a stamp. + 'outputs': [ + '<(stamp_file)', + ], + 'action': [ + '<(script)', + '--protoc=<(protoc)', + '--proto-path=<(proto_in_dir)', + '--java-out-dir=<(java_out_dir)', + '--stamp=<(stamp_file)', + '<@(_sources)', + ], + 'message': 'Generating Java code from protobuf files in <(proto_in_dir)', + }, + ], + 'dependencies': [ + '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:android_protoc#host', + '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:protobuf_nano_javalib', + ], + 'includes': [ 'java.gypi' ], +} diff --git a/build/protoc_java.py b/build/protoc_java.py new file mode 100644 index 00000000000..470667c1f4b --- /dev/null +++ b/build/protoc_java.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate java source files from protobuf files. + +This is a helper file for the genproto_java action in protoc_java.gypi. + +It performs the following steps: +1. Deletes all old sources (ensures deleted classes are not part of new jars). +2. Creates source directory. +3. Generates Java files using protoc (output into either --java-out-dir or + --srcjar). +4. Creates a new stamp file. +""" + +import os +import optparse +import shutil +import subprocess +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp")) +from util import build_utils + +def main(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option("--protoc", help="Path to protoc binary.") + parser.add_option("--proto-path", help="Path to proto directory.") + parser.add_option("--java-out-dir", + help="Path to output directory for java files.") + parser.add_option("--srcjar", help="Path to output srcjar.") + parser.add_option("--stamp", help="File to touch on success.") + options, args = parser.parse_args(argv) + + build_utils.CheckOptions(options, parser, ['protoc', 'proto_path']) + if not options.java_out_dir and not options.srcjar: + print 'One of --java-out-dir or --srcjar must be specified.' + return 1 + + with build_utils.TempDir() as temp_dir: + # Specify arguments to the generator. + generator_args = ['optional_field_style=reftypes', + 'store_unknown_fields=true'] + out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir + # Generate Java files using protoc. + build_utils.CheckOutput( + [options.protoc, '--proto_path', options.proto_path, out_arg] + + args) + + if options.java_out_dir: + build_utils.DeleteDirectory(options.java_out_dir) + shutil.copytree(temp_dir, options.java_out_dir) + else: + build_utils.ZipDir(options.srcjar, temp_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + args + [options.protoc] + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/redirect_stdout.py b/build/redirect_stdout.py new file mode 100644 index 00000000000..bb94ee82f5f --- /dev/null +++ b/build/redirect_stdout.py @@ -0,0 +1,21 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import subprocess +import sys +import os + +# This script executes a command and redirects the stdout to a file. This is +# equivalent to |command... > output_file|. +# +# Usage: python redirect_stdout.py output_file command... + +if __name__ == '__main__': + if len(sys.argv) < 2: + print >> sys.stderr, "Usage: %s output_file command..." % (sys.argv[0]) + sys.exit(1) + + with open(sys.argv[1], 'w') as fp: + fnull = open(os.devnull, 'w') + sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp, stderr=fnull)) diff --git a/build/release.gypi b/build/release.gypi new file mode 100644 index 00000000000..9b8b11d20f3 --- /dev/null +++ b/build/release.gypi @@ -0,0 +1,29 @@ +{ + 'conditions': [ + # Handle build types. + ['buildtype=="Dev"', { + 'includes': ['internal/release_impl.gypi'], + }], + ['buildtype=="Dev" and incremental_chrome_dll==1', { + 'msvs_settings': { + 'VCLinkerTool': { + # Enable incremental linking and disable conflicting link options: + # http://msdn.microsoft.com/en-us/library/4khtbfyf.aspx + 'LinkIncremental': '2', + 'OptimizeReferences': '1', + 'EnableCOMDATFolding': '1', + 'Profile': 'false', + }, + }, + }], + ['buildtype=="Official"', { + 'includes': ['internal/release_impl_official.gypi'], + }], + # TODO(bradnelson): may also need: + # checksenabled + # coverage + # dom_stats + # pgo_instrument + # pgo_optimize + ], +} diff --git a/build/repack_action.gypi b/build/repack_action.gypi new file mode 100644 index 00000000000..04b982a38bd --- /dev/null +++ b/build/repack_action.gypi @@ -0,0 +1,31 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to invoke grit repack in a +# consistent manner. To use this the following variables need to be +# defined: +# pak_inputs: list: paths of pak files that need to be combined. +# pak_output: string: the output pak file path. + +{ + # GYP version: //tools/grit/repack.gni + 'variables': { + 'repack_path': '<(DEPTH)/tools/grit/grit/format/repack.py', + 'repack_options%': [], + }, + 'inputs': [ + '<(repack_path)', + '<@(pak_inputs)', + ], + 'outputs': [ + '<(pak_output)' + ], + 'action': [ + 'python', + '<(repack_path)', + '<@(repack_options)', + '<(pak_output)', + '<@(pak_inputs)', + ], +} diff --git a/build/rmdir_and_stamp.py b/build/rmdir_and_stamp.py new file mode 100644 index 00000000000..8f046ea5344 --- /dev/null +++ b/build/rmdir_and_stamp.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wipes out a directory recursively and then touches a stamp file. + +This odd pairing of operations is used to support build scripts which +slurp up entire directories (e.g. build/android/javac.py when handling +generated sources) as inputs. + +The general pattern of use is: + + - Add a target which generates |gen_sources| into |out_path| from |inputs|. + - Include |stamp_file| as an input for that target or any of its rules which + generate files in |out_path|. + - Add an action which depends on |inputs| and which outputs |stamp_file|; + the action should run this script and pass |out_path| and |stamp_file| as + its arguments. + +The net result is that you will force |out_path| to be wiped and all +|gen_sources| to be regenerated any time any file in |inputs| changes. + +See //mojo/mojom_bindings_generator.gypi for an example use case. + +""" + +import errno +import os +import shutil +import sys + + +def Main(dst_dir, stamp_file): + try: + shutil.rmtree(os.path.normpath(dst_dir)) + except OSError as e: + # Ignore only "not found" errors. + if e.errno != errno.ENOENT: + raise e + with open(stamp_file, 'a'): + os.utime(stamp_file, None) + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1], sys.argv[2])) diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed new file mode 100644 index 00000000000..b4111c7b828 --- /dev/null +++ b/build/sanitize-mac-build-log.sed @@ -0,0 +1,33 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Use this sed script to reduce a Mac build log into something readable. + +# Drop uninformative lines. +/^distcc/d +/^Check dependencies/d +/^ setenv /d +/^ cd /d +/^make: Nothing to be done/d +/^$/d + +# Xcode prints a short "compiling foobar.o" line followed by the lengthy +# full command line. These deletions drop the command line. +\|^ /Developer/usr/bin/|d +\|^ /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d +\|^ /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d + +# Drop any goma command lines as well. +\|^ .*/gomacc |d + +# And, if you've overridden something from your own bin directory, remove those +# full command lines, too. +\|^ /Users/[^/]*/bin/|d + +# There's already a nice note for bindings, don't need the command line. +\|^python scripts/rule_binding\.py|d + +# Shorten the "compiling foobar.o" line. +s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| +s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh new file mode 100644 index 00000000000..df5a7af29eb --- /dev/null +++ b/build/sanitize-mac-build-log.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Copyright (c) 2010 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed new file mode 100644 index 00000000000..c18e664c83a --- /dev/null +++ b/build/sanitize-win-build-log.sed @@ -0,0 +1,15 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Use this sed script to reduce a Windows build log into something +# machine-parsable. + +# Drop uninformative lines. +/The operation completed successfully\./d + +# Drop parallelization indicators on lines. +s/^[0-9]+>// + +# Shorten bindings generation lines +s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/ idl_compiler \1/ diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh new file mode 100644 index 00000000000..df5a7af29eb --- /dev/null +++ b/build/sanitize-win-build-log.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Copyright (c) 2010 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/build/sanitizers/BUILD.gn b/build/sanitizers/BUILD.gn new file mode 100644 index 00000000000..473200f299a --- /dev/null +++ b/build/sanitizers/BUILD.gn @@ -0,0 +1,25 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") + +if (is_clang) { + copy("copy_llvm_symbolizer") { + if (is_win) { + sources = [ + "$clang_base_path/bin/llvm-symbolizer.exe", + ] + outputs = [ + "$root_out_dir/llvm-symbolizer.exe", + ] + } else { + sources = [ + "$clang_base_path/bin/llvm-symbolizer", + ] + outputs = [ + "$root_out_dir/llvm-symbolizer", + ] + } + } +} diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS new file mode 100644 index 00000000000..3059b0e42aa --- /dev/null +++ b/build/sanitizers/OWNERS @@ -0,0 +1,4 @@ +glider@chromium.org +eugenis@chromium.org +per-file tsan_suppressions.cc=* +per-file lsan_suppressions.cc=* diff --git a/build/sanitizers/asan_suppressions.cc b/build/sanitizers/asan_suppressions.cc new file mode 100644 index 00000000000..df94bc89503 --- /dev/null +++ b/build/sanitizers/asan_suppressions.cc @@ -0,0 +1,23 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for AddressSanitizer. +// It should only be used under very limited circumstances such as suppressing +// a report caused by an interceptor call in a system-installed library. + +#if defined(ADDRESS_SANITIZER) + +// Please make sure the code below declares a single string variable +// kASanDefaultSuppressions which contains ASan suppressions delimited by +// newlines. +char kASanDefaultSuppressions[] = +// http://crbug.com/178677 +"interceptor_via_lib:libsqlite3.so\n" + +// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS. + +// End of suppressions. +; // Please keep this semicolon. + +#endif // ADDRESS_SANITIZER diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc new file mode 100644 index 00000000000..628589537b4 --- /dev/null +++ b/build/sanitizers/lsan_suppressions.cc @@ -0,0 +1,93 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for LeakSanitizer. +// You can also pass additional suppressions via LSAN_OPTIONS: +// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to +// http://dev.chromium.org/developers/testing/leaksanitizer for more info. + +#if defined(LEAK_SANITIZER) + +// Please make sure the code below declares a single string variable +// kLSanDefaultSuppressions which contains LSan suppressions delimited by +// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer +// for the instructions on writing suppressions. +char kLSanDefaultSuppressions[] = +// Intentional leak used as sanity test for Valgrind/memcheck. +"leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n" + +// ================ Leaks in third-party code ================ + +// False positives in libfontconfig. http://crbug.com/39050 +"leak:libfontconfig\n" +// eglibc-2.19/string/strdup.c creates false positive leak errors because of the +// same reason as crbug.com/39050. The leak error stack trace, when unwind on +// malloc, includes a call to libfontconfig. But the default stack trace is too +// short in leak sanitizer bot to make the libfontconfig suppression works. +// http://crbug.com/605286 +"leak:__strdup\n" + +// Leaks in Nvidia's libGL. +"leak:libGL.so\n" + +// TODO(eugenis): revisit NSS suppressions after the switch to BoringSSL +// NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988 +"leak:net::NSSCertDatabase::ImportFromPKCS12\n" +"leak:net::NSSCertDatabase::ListCerts\n" +"leak:net::NSSCertDatabase::DeleteCertAndKey\n" +"leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n" +// Another leak due to not shutting down NSS properly. http://crbug.com/124445 +"leak:error_get_my_stack\n" +// The NSS suppressions above will not fire when the fast stack unwinder is +// used, because it can't unwind through NSS libraries. Apply blanket +// suppressions for now. +"leak:libnssutil3\n" +"leak:libnspr4\n" +"leak:libnss3\n" +"leak:libplds4\n" +"leak:libnssckbi\n" + +// XRandR has several one time leaks. +"leak:libxrandr\n" + +// xrandr leak. http://crbug.com/119677 +"leak:XRRFindDisplay\n" + +// http://crbug.com/431213, http://crbug.com/416665 +"leak:gin/object_template_builder.h\n" + +// Leaks in swrast_dri.so. http://crbug.com/540042 +"leak:swrast_dri.so\n" + +// ================ Leaks in Chromium code ================ +// PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS. +// Instead, commits that introduce memory leaks should be reverted. Suppressing +// the leak is acceptable in some cases when reverting is impossible, i.e. when +// enabling leak detection for the first time for a test target with +// pre-existing leaks. + +// Small test-only leak in ppapi_unittests. http://crbug.com/258113 +"leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test\n" + +// http://crbug.com/322671 +"leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n" + +// http://crbug.com/355641 +"leak:TrayAccessibilityTest\n" + +// http://crbug.com/354644 +"leak:CertificateViewerUITest::ShowModalCertificateViewer\n" + +// http://crbug.com/356306 +"leak:content::SetProcessTitleFromCommandLine\n" + +// http://crbug.com/601435 +"leak:mojo/edk/js/handle.h\n" + +// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS. + +// End of suppressions. +; // Please keep this semicolon. + +#endif // LEAK_SANITIZER diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc new file mode 100644 index 00000000000..39fa1403c97 --- /dev/null +++ b/build/sanitizers/sanitizer_options.cc @@ -0,0 +1,187 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// This file contains the default options for various compiler-based dynamic +// tools. + +#include "build/build_config.h" + +#if defined(ADDRESS_SANITIZER) && defined(OS_MACOSX) +#include // for _NSGetArgc, _NSGetArgv +#include +#endif // ADDRESS_SANITIZER && OS_MACOSX + +#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \ + defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \ + defined(UNDEFINED_SANITIZER) +// Functions returning default options are declared weak in the tools' runtime +// libraries. To make the linker pick the strong replacements for those +// functions from this module, we explicitly force its inclusion by passing +// -Wl,-u_sanitizer_options_link_helper +extern "C" +void _sanitizer_options_link_helper() { } + +// The callbacks we define here will be called from the sanitizer runtime, but +// aren't referenced from the Chrome executable. We must ensure that those +// callbacks are not sanitizer-instrumented, and that they aren't stripped by +// the linker. +#define SANITIZER_HOOK_ATTRIBUTE \ + extern "C" \ + __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \ + __attribute__((visibility("default"))) \ + __attribute__((used)) +#endif + +#if defined(ADDRESS_SANITIZER) +// Default options for AddressSanitizer in various configurations: +// malloc_context_size=5 - limit the size of stack traces collected by ASan +// for each malloc/free by 5 frames. These stack traces tend to accumulate +// very fast in applications using JIT (v8 in Chrome's case), see +// https://code.google.com/p/address-sanitizer/issues/detail?id=177 +// symbolize=1 - enable in-process symbolization. +// legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to +// work around libGL.so using the obsolete API, see +// http://crbug.com/341805. This may break if pthread_cond_t objects are +// accessed by both instrumented and non-instrumented binaries (e.g. if +// they reside in shared memory). This option is going to be deprecated in +// upstream AddressSanitizer and must not be used anywhere except the +// official builds. +// check_printf=1 - check the memory accesses to printf (and other formatted +// output routines) arguments. +// use_sigaltstack=1 - handle signals on an alternate signal stack. Useful +// for stack overflow detection. +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports +// fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder +// to print error reports. V8 doesn't generate debug info for the JIT code, +// so the slow unwinder may not work properly. +// detect_stack_use_after_return=1 - use fake stack to delay the reuse of +// stack allocations and detect stack-use-after-return errors. +#if defined(OS_LINUX) +#if defined(GOOGLE_CHROME_BUILD) +// Default AddressSanitizer options for the official build. These do not affect +// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official +// Chromium builds. +const char kAsanDefaultOptions[] = + "legacy_pthread_cond=1 malloc_context_size=5 " + "symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 " + "strip_path_prefix=/../../ fast_unwind_on_fatal=1"; +#else +// Default AddressSanitizer options for buildbots and non-official builds. +const char *kAsanDefaultOptions = + "symbolize=1 check_printf=1 use_sigaltstack=1 " + "detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 "; +#endif // GOOGLE_CHROME_BUILD + +#elif defined(OS_MACOSX) +const char *kAsanDefaultOptions = + "check_printf=1 use_sigaltstack=1 " + "strip_path_prefix=/../../ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 detect_odr_violation=0 "; +static const char kNaClDefaultOptions[] = "handle_segv=0"; +static const char kNaClFlag[] = "--type=nacl-loader"; +#endif // OS_LINUX + +#if defined(OS_LINUX) || defined(OS_MACOSX) +SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() { +#if defined(OS_MACOSX) + char*** argvp = _NSGetArgv(); + int* argcp = _NSGetArgc(); + if (!argvp || !argcp) return kAsanDefaultOptions; + char** argv = *argvp; + int argc = *argcp; + for (int i = 0; i < argc; ++i) { + if (strcmp(argv[i], kNaClFlag) == 0) { + return kNaClDefaultOptions; + } + } +#endif + return kAsanDefaultOptions; +} + +extern "C" char kASanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() { + return kASanDefaultSuppressions; +} +#endif // OS_LINUX || OS_MACOSX +#endif // ADDRESS_SANITIZER + +#if defined(THREAD_SANITIZER) && defined(OS_LINUX) +// Default options for ThreadSanitizer in various configurations: +// detect_deadlocks=1 - enable deadlock (lock inversion) detection. +// second_deadlock_stack=1 - more verbose deadlock reports. +// report_signal_unsafe=0 - do not report async-signal-unsafe functions +// called from signal handlers. +// report_thread_leaks=0 - do not report unjoined threads at the end of +// the program execution. +// print_suppressions=1 - print the list of matched suppressions. +// history_size=7 - make the history buffer proportional to 2^7 (the maximum +// value) to keep more stack traces. +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +const char kTsanDefaultOptions[] = + "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 " + "report_thread_leaks=0 print_suppressions=1 history_size=7 " + "strict_memcmp=0 strip_path_prefix=/../../ "; + +SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() { + return kTsanDefaultOptions; +} + +extern "C" char kTSanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() { + return kTSanDefaultSuppressions; +} + +#endif // THREAD_SANITIZER && OS_LINUX + +#if defined(MEMORY_SANITIZER) +// Default options for MemorySanitizer: +// intercept_memcmp=0 - do not detect uninitialized memory in memcmp() calls. +// Pending cleanup, see http://crbug.com/523428 +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +const char kMsanDefaultOptions[] = + "intercept_memcmp=0 strip_path_prefix=/../../ "; + +SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() { + return kMsanDefaultOptions; +} + +#endif // MEMORY_SANITIZER + +#if defined(LEAK_SANITIZER) +// Default options for LeakSanitizer: +// print_suppressions=1 - print the list of matched suppressions. +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +const char kLsanDefaultOptions[] = + "print_suppressions=1 strip_path_prefix=/../../ "; + +SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() { + return kLsanDefaultOptions; +} + +extern "C" char kLSanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() { + return kLSanDefaultSuppressions; +} + +#endif // LEAK_SANITIZER + +#if defined(UNDEFINED_SANITIZER) +// Default options for UndefinedBehaviorSanitizer: +// print_stacktrace=1 - print the stacktrace when UBSan reports an error. +const char kUbsanDefaultOptions[] = + "print_stacktrace=1 strip_path_prefix=/../../ "; + +SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() { + return kUbsanDefaultOptions; +} + +#endif // UNDEFINED_SANITIZER diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp new file mode 100644 index 00000000000..025348ec3f2 --- /dev/null +++ b/build/sanitizers/sanitizers.gyp @@ -0,0 +1,93 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'sanitizer_options', + 'type': 'static_library', + 'toolsets': ['host', 'target'], + 'variables': { + # Every target is going to depend on sanitizer_options, so allow + # this one to depend on itself. + 'prune_self_dependency': 1, + # Do not let 'none' targets depend on this one, they don't need to. + 'link_dependency': 1, + }, + 'sources': [ + 'sanitizer_options.cc', + ], + 'include_dirs': [ + '../..', + ], + # Some targets may want to opt-out from ASan, TSan and MSan and link + # without the corresponding runtime libraries. We drop the libc++ + # dependency and omit the compiler flags to avoid bringing instrumented + # code to those targets. + 'conditions': [ + ['use_custom_libcxx==1', { + 'dependencies!': [ + '../../buildtools/third_party/libc++/libc++.gyp:libcxx_proxy', + ], + }], + ['tsan==1', { + 'sources': [ + 'tsan_suppressions.cc', + ], + }], + ['lsan==1', { + 'sources': [ + 'lsan_suppressions.cc', + ], + }], + ['asan==1', { + 'sources': [ + 'asan_suppressions.cc', + ], + }], + ], + 'cflags/': [ + ['exclude', '-fsanitize='], + ['exclude', '-fsanitize-'], + ], + 'direct_dependent_settings': { + 'ldflags': [ + '-Wl,-u_sanitizer_options_link_helper', + ], + 'target_conditions': [ + ['_type=="executable"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-Wl,-u,__sanitizer_options_link_helper', + ], + }, + }], + ], + }, + }, + # GN version: //build/sanitizers:copy_llvm_symbolizer + { + # Copy llvm-symbolizer to the product dir so that LKGR bots can package it. + 'target_name': 'llvm-symbolizer', + 'type': 'none', + 'variables': { + + # Path is relative to this GYP file. + 'llvm_symbolizer_path': + '../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer<(EXECUTABLE_SUFFIX)', + }, + 'conditions': [ + ['clang==1', { + 'copies': [{ + 'destination': '<(PRODUCT_DIR)', + 'files': [ + '<(llvm_symbolizer_path)', + ], + }], + }], + ], + }, + ], +} + diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc new file mode 100644 index 00000000000..efccc862f8e --- /dev/null +++ b/build/sanitizers/tsan_suppressions.cc @@ -0,0 +1,279 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for ThreadSanitizer. +// You can also pass additional suppressions via TSAN_OPTIONS: +// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to +// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2 +// for more info. + +#if defined(THREAD_SANITIZER) + +// Please make sure the code below declares a single string variable +// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines. +// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2 +// for the instructions on writing suppressions. +char kTSanDefaultSuppressions[] = +// False positives in libflashplayer.so and libglib.so. Since we don't +// instrument them, we cannot reason about the synchronization in them. +"race:libflashplayer.so\n" +"race:libglib*.so\n" + +// Intentional race in ToolsSanityTest.DataRace in base_unittests. +"race:base/tools_sanity_unittest.cc\n" + +// Data race on WatchdogCounter [test-only]. +"race:base/threading/watchdog_unittest.cc\n" + +// Races in libevent, http://crbug.com/23244. +"race:libevent/event.c\n" + +// http://crbug.com/46840. +"race:base::HistogramSamples::IncreaseSum\n" +"race:base::Histogram::Add\n" +"race:base::HistogramSamples::Add\n" + +// http://crbug.com/84094. +"race:sqlite3StatusSet\n" +"race:pcache1EnforceMaxPage\n" +"race:pcache1AllocPage\n" + +// http://crbug.com/102327. +// Test-only race, won't fix. +"race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n" + +// http://crbug.com/120808 +"race:base/threading/watchdog.cc\n" + +// http://crbug.com/157586 +"race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n" + +// http://crbug.com/158718 +"race:third_party/ffmpeg/libavcodec/pthread.c\n" +"race:third_party/ffmpeg/libavcodec/pthread_frame.c\n" +"race:third_party/ffmpeg/libavcodec/vp8.c\n" +"race:third_party/ffmpeg/libavutil/mem.c\n" +"race:*HashFrameForTesting\n" +"race:third_party/ffmpeg/libavcodec/h264pred.c\n" +"race:media::ReleaseData\n" + +// http://crbug.com/158922 +"race:third_party/libvpx/source/libvpx/vp8/encoder/*\n" +"race:third_party/libvpx/source/libvpx/vp9/encoder/*\n" + +// http://crbug.com/189177 +"race:thread_manager\n" +"race:v8::Locker::Initialize\n" + +// http://crbug.com/239359 +"race:media::TestInputCallback::OnData\n" + +// http://crbug.com/244368 +"race:skia::BeginPlatformPaint\n" + +// http://crbug.com/244385 +"race:unixTempFileDir\n" + +// http://crbug.com/244755 +"race:v8::internal::Zone::NewExpand\n" +"race:TooLateToEnableNow\n" +"race:adjust_segment_bytes_allocated\n" + +// http://crbug.com/244774 +"race:webrtc::RTPReceiver::ProcessBitrate\n" +"race:webrtc::RTPSender::ProcessBitrate\n" +"race:webrtc::VideoCodingModuleImpl::Decode\n" +"race:webrtc::RTPSender::SendOutgoingData\n" +"race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n" +"race:webrtc::VP8EncoderImpl::Encode\n" +"race:webrtc::ViEEncoder::DeliverFrame\n" +"race:webrtc::vcm::VideoReceiver::Decode\n" +"race:webrtc::VCMReceiver::FrameForDecoding\n" +"race:*trace_event_unique_catstatic*\n" + +// http://crbug.com/244856 +"race:AutoPulseLock\n" + +// http://crbug.com/246968 +"race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n" + +// http://crbug.com/246974 +"race:content::GpuWatchdogThread::CheckArmed\n" + +// http://crbug.com/257396 +"race:base::trace_event::" + "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n" + +// http://crbug.com/258479 +"race:SamplingStateScope\n" +"race:g_trace_state\n" + +// http://crbug.com/258499 +"race:third_party/skia/include/core/SkRefCnt.h\n" + +// http://crbug.com/268924 +"race:base::g_power_monitor\n" +"race:base::PowerMonitor::PowerMonitor\n" +"race:base::PowerMonitor::AddObserver\n" +"race:base::PowerMonitor::RemoveObserver\n" +"race:base::PowerMonitor::IsOnBatteryPower\n" + +// http://crbug.com/258935 +"race:base::Thread::StopSoon\n" + +// http://crbug.com/272095 +"race:base::g_top_manager\n" + +// http://crbug.com/280466 +"race:content::WebRtcAudioCapturer::SetCapturerSource\n" + +// http://crbug.com/285242 +"race:media::PulseAudioOutputStream::SetVolume\n" + +// http://crbug.com/308590 +"race:CustomThreadWatcher::~CustomThreadWatcher\n" + +// http://crbug.com/310851 +"race:net::ProxyResolverV8Tracing::Job::~Job\n" + +// http://crbug.com/327330 +"race:PrepareTextureMailbox\n" +"race:cc::LayerTreeHost::PaintLayerContents\n" + +// http://crbug.com/476529 +"deadlock:cc::VideoLayerImpl::WillDraw\n" + +// http://crbug.com/328826 +"race:gLCDOrder\n" +"race:gLCDOrientation\n" + +// http://crbug.com/328868 +"race:PR_Lock\n" + +// http://crbug.com/333244 +"race:content::" + "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n" + +// http://crbug.com/333871 +"race:v8::internal::Interface::NewValue()::value_interface\n" +"race:v8::internal::IsMinusZero(double)::minus_zero\n" +"race:v8::internal::FastCloneShallowObjectStub::InitializeInterfaceDescriptor\n" +"race:v8::internal::KeyedLoadStubCompiler::registers\n" +"race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n" +"race:v8::internal::KeyedLoadFastElementStub::InitializeInterfaceDescriptor\n" +"race:v8::internal::KeyedStoreFastElementStub::InitializeInterfaceDescriptor\n" +"race:v8::internal::LoadStubCompiler::registers\n" +"race:v8::internal::StoreStubCompiler::registers\n" +"race:v8::internal::HValue::LoopWeight\n" + +// http://crbug.com/334140 +"race:CommandLine::HasSwitch\n" +"race:CommandLine::current_process_commandline_\n" +"race:CommandLine::GetSwitchValueASCII\n" + +// http://crbug.com/338675 +"race:blink::s_platform\n" +"race:content::" + "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n" + +// http://crbug.com/345618 +"race:WebCore::AudioDestinationNode::render\n" + +// http://crbug.com/345624 +"race:media::DataSource::set_host\n" + +// http://crbug.com/347534 +"race:v8::internal::V8::TearDown\n" + +// http://crbug.com/347538 +"race:sctp_timer_start\n" + +// http://crbug.com/347548 +"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n" +"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n" + +// http://crbug.com/347553 +"race:blink::WebString::reset\n" + +// http://crbug.com/348511 +"race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n" + +// http://crbug.com/348982 +"race:cricket::P2PTransportChannel::OnConnectionDestroyed\n" +"race:cricket::P2PTransportChannel::AddConnection\n" + +// http://crbug.com/348984 +"race:sctp_express_handle_sack\n" +"race:system_base_info\n" + +// https://code.google.com/p/v8/issues/detail?id=3143 +"race:v8::internal::FLAG_track_double_fields\n" + +// http://crbug.com/374135 +"race:media::AlsaWrapper::PcmWritei\n" + +// False positive in libc's tzset_internal, http://crbug.com/379738. +"race:tzset_internal\n" + +// http://crbug.com/380554 +"deadlock:g_type_add_interface_static\n" + +// http:://crbug.com/386385 +"race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n" + +// http://crbug.com/388730 +"race:g_next_user_script_id\n" + +// http://crbug.com/397022 +"deadlock:" +"base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::TestBody\n" + +// http://crbug.com/415472 +"deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n" + +// http://crbug.com/490856 +"deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n" + +// http://crbug.com/417193 +// Suppressing both AudioContext.{cpp,h}. +"race:modules/webaudio/AudioContext\n" + +// https://code.google.com/p/skia/issues/detail?id=3294 +"race:SkBaseMutex::acquire\n" + +// https://crbug.com/430533 +"race:TileTaskGraphRunner::Run\n" + +// https://crbug.com/448203 +"race:blink::RemoteFrame::detach\n" + +// Lock inversion in third party code, won't fix. +// https://crbug.com/455638 +"deadlock:dbus::Bus::ShutdownAndBlock\n" + +// https://crbug.com/459429 +"race:randomnessPid\n" + +// https://crbug.com/454655 +"race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n" + +// https://crbug.com/569682 +"race:blink::ThreadState::visitStackRoots\n" + +// http://crbug.com/582274 +"race:usrsctp_close\n" + +// http://crbug.com/633145 +"race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n" + +// http://crbug.com/638378 +"race:~TaskSchedulerWorkerPoolCheckTlsReuse\n" + +// http://crbug.com/638583 +"race:webrtc/modules/audio_processing/aec/aec_rdft.cc\n" + +// End of suppressions. +; // Please keep this semicolon. + +#endif // THREAD_SANITIZER diff --git a/build/secondary/testing/gmock/BUILD.gn b/build/secondary/testing/gmock/BUILD.gn new file mode 100644 index 00000000000..1a5b76af7bb --- /dev/null +++ b/build/secondary/testing/gmock/BUILD.gn @@ -0,0 +1,55 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +config("gmock_config") { + # Gmock headers need to be able to find themselves. + include_dirs = [ + "//testing/gmock_custom", + "include", + ] +} + +static_library("gmock") { + testonly = true + sources = [ + # Sources based on files in r173 of gmock. + "include/gmock/gmock-actions.h", + "include/gmock/gmock-cardinalities.h", + "include/gmock/gmock-generated-actions.h", + "include/gmock/gmock-generated-function-mockers.h", + "include/gmock/gmock-generated-matchers.h", + "include/gmock/gmock-generated-nice-strict.h", + "include/gmock/gmock-matchers.h", + "include/gmock/gmock-spec-builders.h", + "include/gmock/gmock.h", + "include/gmock/internal/gmock-generated-internal-utils.h", + "include/gmock/internal/gmock-internal-utils.h", + "include/gmock/internal/gmock-port.h", + + #"src/gmock-all.cc", # Not needed by our build. + "src/gmock-cardinalities.cc", + "src/gmock-internal-utils.cc", + "src/gmock-matchers.cc", + "src/gmock-spec-builders.cc", + "src/gmock.cc", + ] + + # This project includes some stuff form gtest's guts. + include_dirs = [ "../gtest/include" ] + + public_configs = [ + ":gmock_config", + "//testing/gtest:gtest_config", + ] +} + +static_library("gmock_main") { + testonly = true + sources = [ + "src/gmock_main.cc", + ] + deps = [ + ":gmock", + ] +} diff --git a/build/secondary/testing/gtest/BUILD.gn b/build/secondary/testing/gtest/BUILD.gn new file mode 100644 index 00000000000..649f5c559db --- /dev/null +++ b/build/secondary/testing/gtest/BUILD.gn @@ -0,0 +1,130 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build_overrides/gtest.gni") + +config("gtest_config") { + visibility = [ + ":*", + "//testing/gmock:*", # gmock also shares this config. + ] + + defines = [ + # In order to allow regex matches in gtest to be shared between Windows + # and other systems, we tell gtest to always use it's internal engine. + "GTEST_HAS_POSIX_RE=0", + "GTEST_LANG_CXX11=1", + ] + + # Gtest headers need to be able to find themselves. + include_dirs = [ "include" ] + + if (is_win) { + cflags = [ "/wd4800" ] # Unused variable warning. + } +} + +config("gtest_direct_config") { + visibility = [ ":*" ] + defines = [ "UNIT_TEST" ] +} + +config("gtest_warnings") { + if (is_win && is_clang) { + # The Mutex constructor initializer list in gtest-port.cc is incorrectly + # ordered. See + # https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ. + cflags = [ "-Wno-reorder" ] + } +} + +static_library("gtest") { + testonly = true + sources = [ + "include/gtest/gtest-death-test.h", + "include/gtest/gtest-message.h", + "include/gtest/gtest-param-test.h", + "include/gtest/gtest-printers.h", + "include/gtest/gtest-spi.h", + "include/gtest/gtest-test-part.h", + "include/gtest/gtest-typed-test.h", + "include/gtest/gtest.h", + "include/gtest/gtest_pred_impl.h", + "include/gtest/internal/gtest-death-test-internal.h", + "include/gtest/internal/gtest-filepath.h", + "include/gtest/internal/gtest-internal.h", + "include/gtest/internal/gtest-linked_ptr.h", + "include/gtest/internal/gtest-param-util-generated.h", + "include/gtest/internal/gtest-param-util.h", + "include/gtest/internal/gtest-port.h", + "include/gtest/internal/gtest-string.h", + "include/gtest/internal/gtest-tuple.h", + "include/gtest/internal/gtest-type-util.h", + + #"gtest/src/gtest-all.cc", # Not needed by our build. + "src/gtest-death-test.cc", + "src/gtest-filepath.cc", + "src/gtest-internal-inl.h", + "src/gtest-port.cc", + "src/gtest-printers.cc", + "src/gtest-test-part.cc", + "src/gtest-typed-test.cc", + "src/gtest.cc", + ] + + if (gtest_include_multiprocess) { + sources += [ + "../multiprocess_func_list.cc", + "../multiprocess_func_list.h", + ] + } + + if (gtest_include_platform_test) { + sources += [ "../platform_test.h" ] + } + + if ((is_mac || is_ios) && gtest_include_objc_support) { + if (is_ios) { + set_sources_assignment_filter([]) + } + sources += [ + "../gtest_mac.h", + "../gtest_mac.mm", + ] + if (gtest_include_platform_test) { + sources += [ "../platform_test_mac.mm" ] + } + set_sources_assignment_filter(sources_assignment_filter) + } + + if (is_ios && gtest_include_ios_coverage) { + sources += [ + "../coverage_util_ios.cc", + "../coverage_util_ios.h", + ] + } + + include_dirs = [ "." ] + + all_dependent_configs = [ ":gtest_config" ] + public_configs = [ ":gtest_direct_config" ] + + configs -= [ "//build/config/compiler:chromium_code" ] + configs += [ + "//build/config/compiler:no_chromium_code", + + # Must be after no_chromium_code for warning flags to be ordered correctly. + ":gtest_warnings", + ] +} + +source_set("gtest_main") { + testonly = true + sources = [ + "src/gtest_main.cc", + ] + deps = [ + ":gtest", + ] +} diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn new file mode 100644 index 00000000000..ea1533f1950 --- /dev/null +++ b/build/secondary/third_party/android_tools/BUILD.gn @@ -0,0 +1,144 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +config("cpu_features_include") { + include_dirs = [ "$android_ndk_root/sources/android/cpufeatures" ] +} + +config("cpu_features_warnings") { + if (is_clang) { + # cpu-features.c has few unused functions on x86 b/26403333 + cflags = [ "-Wno-unused-function" ] + } +} + +# This is the GN version of +# //build/android/ndk.gyp:cpu_features +source_set("cpu_features") { + sources = [ + "$android_ndk_root/sources/android/cpufeatures/cpu-features.c", + ] + public_configs = [ ":cpu_features_include" ] + + configs -= [ "//build/config/compiler:chromium_code" ] + configs += [ + "//build/config/compiler:no_chromium_code", + + # Must be after no_chromium_code for warning flags to be ordered correctly. + ":cpu_features_warnings", + ] +} + +lib_version = "24.1.1" +lib_path = "$android_sdk_root/extras/android/m2repository/com/android/support" + +android_java_prebuilt("android_gcm_java") { + jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar" +} + +android_java_prebuilt("emma_device") { + jar_path = "$android_sdk_root/tools/lib/emma_device.jar" +} + +android_aar_prebuilt("android_support_design_java") { + deps = [ + ":android_support_v7_appcompat_java", + ] + lib_name = "design" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_aar_prebuilt("android_support_multidex_java") { + # TODO(jbudorick): remove requires_android after crbug.com/522043 is fixed. + requires_android = false + aar_path = "$lib_path/multidex/1.0.1/multidex-1.0.1.aar" +} + +android_java_prebuilt("android_support_annotations_java") { + lib_name = "support-annotations" + jar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.jar" +} + +android_aar_prebuilt("android_support_v4_java") { + lib_name = "support-v4" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_aar_prebuilt("android_support_v13_java") { + deps = [ + ":android_support_annotations_java", + ":android_support_v4_java", + ] + lib_name = "support-v13" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_aar_prebuilt("android_support_vector_drawable_java") { + lib_name = "support-vector-drawable" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_aar_prebuilt("android_support_v7_appcompat_java_internal") { + lib_name = "appcompat-v7" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +java_group("android_support_v7_appcompat_java") { + deps = [ + ":android_support_v4_java", + ":android_support_v7_appcompat_java_internal", + ":android_support_vector_drawable_java", + ] +} + +android_aar_prebuilt("android_support_v7_mediarouter_java") { + deps = [ + ":android_support_v7_appcompat_java", + ] + lib_name = "mediarouter-v7" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_aar_prebuilt("android_support_v7_recyclerview_java") { + deps = [ + ":android_support_v7_appcompat_java", + ] + lib_name = "recyclerview-v7" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +android_resources("google_play_services_default_resources") { + v14_skip = true + resource_dirs = [ "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/res" ] + custom_package = "com.google.android.gms" +} + +android_java_prebuilt("google_play_services_default_java") { + deps = [ + ":android_support_annotations_java", + ":android_support_v4_java", + ":android_support_v7_mediarouter_java", + ":google_play_services_default_resources", + ] + input_jars_paths = [ "$android_sdk/optional/org.apache.http.legacy.jar" ] + proguard_preprocess = true + proguard_config = "//third_party/android_tools/proguard.flags" + jar_path = "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar" +} + +android_aar_prebuilt("android_support_v17_leanback_java") { + deps = [ + ":android_support_v7_recyclerview_java", + ] + lib_name = "leanback-v17" + aar_path = "$lib_path/$lib_name/$lib_version/$lib_name-$lib_version.aar" +} + +# TODO(jbudorick): Remove this once net_java_test_support no longer needs it. +android_java_prebuilt("legacy_http_javalib") { + testonly = true + jar_path = "$android_sdk/optional/org.apache.http.legacy.jar" +} diff --git a/build/secondary/third_party/crashpad/OWNERS b/build/secondary/third_party/crashpad/OWNERS new file mode 100644 index 00000000000..1e002677a39 --- /dev/null +++ b/build/secondary/third_party/crashpad/OWNERS @@ -0,0 +1,3 @@ +mark@chromium.org +rsesek@chromium.org +scottmg@chromium.org diff --git a/build/secondary/third_party/crashpad/crashpad/client/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/client/BUILD.gn new file mode 100644 index 00000000000..1b68b1670bf --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/client/BUILD.gn @@ -0,0 +1,53 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +config("client_config") { + include_dirs = [ ".." ] +} + +static_library("client") { + sources = [ + "crash_report_database.cc", + "crash_report_database.h", + "crash_report_database_mac.mm", + "crash_report_database_win.cc", + "crashpad_client.h", + "crashpad_client_mac.cc", + "crashpad_client_win.cc", + "crashpad_info.cc", + "crashpad_info.h", + "prune_crash_reports.cc", + "prune_crash_reports.h", + "settings.cc", + "settings.h", + "simple_address_range_bag.cc", + "simple_address_range_bag.h", + "simple_string_dictionary.cc", + "simple_string_dictionary.h", + "simulate_crash.h", + "simulate_crash_mac.cc", + "simulate_crash_mac.h", + "simulate_crash_win.h", + ] + + if (is_mac) { + sources += [ + "capture_context_mac.S", + "capture_context_mac.h", + ] + } + + public_configs = [ ":client_config" ] + + deps = [ + "//base", + "//third_party/crashpad/crashpad/compat", + "//third_party/crashpad/crashpad/util", + ] + + if (is_win) { + libs = [ "rpcrt4.lib" ] + cflags = [ "/wd4201" ] # nonstandard extension used : nameless struct/union. + } +} diff --git a/build/secondary/third_party/crashpad/crashpad/compat/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/compat/BUILD.gn new file mode 100644 index 00000000000..27431f8ea39 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/compat/BUILD.gn @@ -0,0 +1,69 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +config("compat_config") { + include_dirs = [] + + if (is_win) { + include_dirs += [ "win" ] + } else { + include_dirs += [ "non_win" ] + } + + if (is_mac) { + include_dirs += [ + "mac", + "non_cxx11_lib", + ] + } +} + +static_library("compat") { + sources = [] + if (is_mac) { + sources += [ + "mac/AvailabilityMacros.h", + "mac/kern/exc_resource.h", + "mac/mach-o/getsect.cc", + "mac/mach-o/getsect.h", + "mac/mach-o/loader.h", + "mac/mach/mach.h", + "mac/sys/resource.h", + "non_cxx11_lib/type_traits", + "non_cxx11_lib/utility", + ] + } else { + sources += [ "non_mac/mach/mach.h" ] + } + + if (is_win) { + sources += [ + "win/getopt.h", + "win/strings.cc", + "win/strings.h", + "win/sys/types.h", + "win/time.cc", + "win/time.h", + "win/winnt.h", + ] + } else { + sources += [ + "non_win/dbghelp.h", + "non_win/minwinbase.h", + "non_win/timezoneapi.h", + "non_win/verrsrc.h", + "non_win/windows.h", + "non_win/winnt.h", + ] + } + + public_configs = [ ":compat_config" ] + + deps = [] + if (is_mac) { + deps += [ "//third_party/crashpad/crashpad/third_party/apple_cctools" ] + } else if (is_win) { + deps += [ "//third_party/crashpad/crashpad/third_party/getopt" ] + } +} diff --git a/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn new file mode 100644 index 00000000000..6cd8b9f34a8 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn @@ -0,0 +1,63 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +static_library("handler_lib") { + sources = [ + "crash_report_upload_thread.cc", + "crash_report_upload_thread.h", + "handler_main.cc", + "handler_main.h", + "mac/crash_report_exception_handler.cc", + "mac/crash_report_exception_handler.h", + "mac/exception_handler_server.cc", + "mac/exception_handler_server.h", + "prune_crash_reports_thread.cc", + "prune_crash_reports_thread.h", + "win/crash_report_exception_handler.cc", + "win/crash_report_exception_handler.h", + ] + + include_dirs = [ ".." ] + + deps = [ + "../compat", + "../minidump", + "../snapshot", + "../tools:tool_support", + "//base", + ] + + if (is_win) { + cflags = [ "/wd4201" ] + } +} + +executable("crashpad_handler") { + sources = [ + "main.cc", + ] + + include_dirs = [ ".." ] + + deps = [ + ":handler_lib", + "../compat", + "//base", + "//build/win:default_exe_manifest", + ] + + if (is_mac && is_component_build) { + # The handler is in Chromium.app/Contents/Versions/X/Chromium Framework.framework/Helpers/ + # so set rpath up to the base. + ldflags = [ + "-rpath", + "@loader_path/../../../../../..", + ] + } + + if (is_win) { + configs -= [ "//build/config/win:console" ] + configs += [ "//build/config/win:windowed" ] + } +} diff --git a/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn new file mode 100644 index 00000000000..d18fd71c30c --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn @@ -0,0 +1,69 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +static_library("minidump") { + deps = [ + "../compat", + "../snapshot", + "../util", + "//base", + ] + + include_dirs = [ ".." ] + + if (is_win) { + cflags = [ + "/wd4201", + "/wd4324", + ] + } + + sources = [ + "minidump_context.h", + "minidump_context_writer.cc", + "minidump_context_writer.h", + "minidump_crashpad_info_writer.cc", + "minidump_crashpad_info_writer.h", + "minidump_exception_writer.cc", + "minidump_exception_writer.h", + "minidump_extensions.cc", + "minidump_extensions.h", + "minidump_file_writer.cc", + "minidump_file_writer.h", + "minidump_handle_writer.cc", + "minidump_handle_writer.h", + "minidump_memory_info_writer.cc", + "minidump_memory_info_writer.h", + "minidump_memory_writer.cc", + "minidump_memory_writer.h", + "minidump_misc_info_writer.cc", + "minidump_misc_info_writer.h", + "minidump_module_crashpad_info_writer.cc", + "minidump_module_crashpad_info_writer.h", + "minidump_module_writer.cc", + "minidump_module_writer.h", + "minidump_rva_list_writer.cc", + "minidump_rva_list_writer.h", + "minidump_simple_string_dictionary_writer.cc", + "minidump_simple_string_dictionary_writer.h", + "minidump_stream_writer.cc", + "minidump_stream_writer.h", + "minidump_string_writer.cc", + "minidump_string_writer.h", + "minidump_system_info_writer.cc", + "minidump_system_info_writer.h", + "minidump_thread_id_map.cc", + "minidump_thread_id_map.h", + "minidump_thread_writer.cc", + "minidump_thread_writer.h", + "minidump_unloaded_module_writer.cc", + "minidump_unloaded_module_writer.h", + "minidump_user_stream_writer.cc", + "minidump_user_stream_writer.h", + "minidump_writable.cc", + "minidump_writable.h", + "minidump_writer_util.cc", + "minidump_writer_util.h", + ] +} diff --git a/build/secondary/third_party/crashpad/crashpad/snapshot/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/snapshot/BUILD.gn new file mode 100644 index 00000000000..b95acd1fba7 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/snapshot/BUILD.gn @@ -0,0 +1,136 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +static_library("snapshot") { + deps = [ + "../client", + "../compat", + "../util", + "//base", + ] + + include_dirs = [ ".." ] + + if (is_win) { + cflags = [ "/wd4201" ] + libs = [ "powrprof.lib" ] + } + + sources = [ + "capture_memory.cc", + "capture_memory.h", + "cpu_architecture.h", + "cpu_context.cc", + "cpu_context.h", + "crashpad_info_client_options.cc", + "crashpad_info_client_options.h", + "exception_snapshot.h", + "handle_snapshot.cc", + "handle_snapshot.h", + "mac/cpu_context_mac.cc", + "mac/cpu_context_mac.h", + "mac/exception_snapshot_mac.cc", + "mac/exception_snapshot_mac.h", + "mac/mach_o_image_annotations_reader.cc", + "mac/mach_o_image_annotations_reader.h", + "mac/mach_o_image_reader.cc", + "mac/mach_o_image_reader.h", + "mac/mach_o_image_segment_reader.cc", + "mac/mach_o_image_segment_reader.h", + "mac/mach_o_image_symbol_table_reader.cc", + "mac/mach_o_image_symbol_table_reader.h", + "mac/memory_snapshot_mac.cc", + "mac/memory_snapshot_mac.h", + "mac/module_snapshot_mac.cc", + "mac/module_snapshot_mac.h", + "mac/process_reader.cc", + "mac/process_reader.h", + "mac/process_snapshot_mac.cc", + "mac/process_snapshot_mac.h", + "mac/process_types.cc", + "mac/process_types.h", + "mac/process_types/all.proctype", + "mac/process_types/crashpad_info.proctype", + "mac/process_types/crashreporterclient.proctype", + "mac/process_types/custom.cc", + "mac/process_types/dyld_images.proctype", + "mac/process_types/flavors.h", + "mac/process_types/internal.h", + "mac/process_types/loader.proctype", + "mac/process_types/nlist.proctype", + "mac/process_types/traits.h", + "mac/system_snapshot_mac.cc", + "mac/system_snapshot_mac.h", + "mac/thread_snapshot_mac.cc", + "mac/thread_snapshot_mac.h", + "memory_snapshot.h", + "minidump/minidump_simple_string_dictionary_reader.cc", + "minidump/minidump_simple_string_dictionary_reader.h", + "minidump/minidump_string_list_reader.cc", + "minidump/minidump_string_list_reader.h", + "minidump/minidump_string_reader.cc", + "minidump/minidump_string_reader.h", + "minidump/module_snapshot_minidump.cc", + "minidump/module_snapshot_minidump.h", + "minidump/process_snapshot_minidump.cc", + "minidump/process_snapshot_minidump.h", + "module_snapshot.h", + "process_snapshot.h", + "system_snapshot.h", + "thread_snapshot.h", + "unloaded_module_snapshot.cc", + "unloaded_module_snapshot.h", + "win/capture_memory_delegate_win.cc", + "win/capture_memory_delegate_win.h", + "win/cpu_context_win.cc", + "win/cpu_context_win.h", + "win/exception_snapshot_win.cc", + "win/exception_snapshot_win.h", + "win/memory_map_region_snapshot_win.cc", + "win/memory_map_region_snapshot_win.h", + "win/memory_snapshot_win.cc", + "win/memory_snapshot_win.h", + "win/module_snapshot_win.cc", + "win/module_snapshot_win.h", + "win/pe_image_annotations_reader.cc", + "win/pe_image_annotations_reader.h", + "win/pe_image_reader.cc", + "win/pe_image_reader.h", + "win/pe_image_resource_reader.cc", + "win/pe_image_resource_reader.h", + "win/process_reader_win.cc", + "win/process_reader_win.h", + "win/process_snapshot_win.cc", + "win/process_snapshot_win.h", + "win/process_subrange_reader.cc", + "win/process_subrange_reader.h", + "win/system_snapshot_win.cc", + "win/system_snapshot_win.h", + "win/thread_snapshot_win.cc", + "win/thread_snapshot_win.h", + ] +} + +if (is_win) { + source_set("snapshot_api") { + deps = [ + ":snapshot", + "../compat", + "../util", + "//base", + ] + + include_dirs = [ ".." ] + + cflags = [ "/wd4201" ] + + sources = [ + "api/module_annotations_win.cc", + "api/module_annotations_win.h", + ] + } +} else { + group("snapshot_api") { + } +} diff --git a/build/secondary/third_party/crashpad/crashpad/third_party/apple_cctools/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/third_party/apple_cctools/BUILD.gn new file mode 100644 index 00000000000..c0250564936 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/third_party/apple_cctools/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +config("apple_cctools_config") { + include_dirs = [ "../.." ] +} + +source_set("apple_cctools") { + sources = [ + "cctools/include/mach-o/getsect.h", + "cctools/libmacho/getsecbyname.c", + ] + public_configs = [ ":apple_cctools_config" ] +} diff --git a/build/secondary/third_party/crashpad/crashpad/third_party/getopt/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/third_party/getopt/BUILD.gn new file mode 100644 index 00000000000..3edcfda7a9e --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/third_party/getopt/BUILD.gn @@ -0,0 +1,10 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +source_set("getopt") { + sources = [ + "getopt.cc", + "getopt.h", + ] +} diff --git a/build/secondary/third_party/crashpad/crashpad/tools/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/tools/BUILD.gn new file mode 100644 index 00000000000..2b867e26623 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/tools/BUILD.gn @@ -0,0 +1,39 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +source_set("tool_support") { + deps = [ + "//base", + ] + + include_dirs = [ ".." ] + + if (is_win) { + cflags = [ "/wd4201" ] + } + + sources = [ + "tool_support.cc", + "tool_support.h", + ] +} + +executable("crashpad_database_util") { + sources = [ + "crashpad_database_util.cc", + ] + + include_dirs = [ ".." ] + + deps = [ + ":tool_support", + "//base", + + # Default manifest on Windows (a no-op elsewhere). + "//build/win:default_exe_manifest", + "//third_party/crashpad/crashpad/client", + "//third_party/crashpad/crashpad/compat", + "//third_party/crashpad/crashpad/util", + ] +} diff --git a/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn b/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn new file mode 100644 index 00000000000..5b09b9d65f3 --- /dev/null +++ b/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn @@ -0,0 +1,243 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +if (is_mac) { + import("//build/config/sysroot.gni") +} + +# Allows the source set to inject ldflags for targets that link to it. +config("util_link_config") { + if (is_mac) { + libs = [ "bsm" ] + } +} + +if (is_mac) { + action_foreach("mig") { + script = "mach/mig.py" + sources = [ + "$sysroot/usr/include/mach/exc.defs", + "$sysroot/usr/include/mach/mach_exc.defs", + "$sysroot/usr/include/mach/notify.defs", + "mach/child_port.defs", + ] + + outputs = [ + "$target_gen_dir/mach/{{source_name_part}}User.c", + "$target_gen_dir/mach/{{source_name_part}}Server.c", + "$target_gen_dir/mach/{{source_name_part}}.h", + "$target_gen_dir/mach/{{source_name_part}}Server.h", + ] + + args = [ "{{source}}" ] + args += rebase_path(outputs, root_build_dir) + } +} + +static_library("util") { + sources = [ + "file/file_io.cc", + "file/file_io.h", + "file/file_io_posix.cc", + "file/file_io_win.cc", + "file/file_reader.cc", + "file/file_reader.h", + "file/file_seeker.cc", + "file/file_seeker.h", + "file/file_writer.cc", + "file/file_writer.h", + "file/string_file.cc", + "file/string_file.h", + "mac/checked_mach_address_range.h", + "mac/launchd.h", + "mac/launchd.mm", + "mac/mac_util.cc", + "mac/mac_util.h", + "mac/service_management.cc", + "mac/service_management.h", + "mac/xattr.cc", + "mac/xattr.h", + "misc/clock.h", + "misc/clock_mac.cc", + "misc/clock_posix.cc", + "misc/clock_win.cc", + "misc/implicit_cast.h", + "misc/initialization_state.h", + "misc/initialization_state_dcheck.cc", + "misc/initialization_state_dcheck.h", + "misc/pdb_structures.cc", + "misc/pdb_structures.h", + "misc/random_string.cc", + "misc/random_string.h", + "misc/scoped_forbid_return.cc", + "misc/scoped_forbid_return.h", + "misc/symbolic_constants_common.h", + "misc/tri_state.h", + "misc/uuid.cc", + "misc/uuid.h", + "net/http_body.cc", + "net/http_body.h", + "net/http_headers.cc", + "net/http_headers.h", + "net/http_multipart_builder.cc", + "net/http_multipart_builder.h", + "net/http_transport.cc", + "net/http_transport.h", + "net/http_transport_mac.mm", + "net/http_transport_win.cc", + "numeric/checked_address_range.cc", + "numeric/checked_address_range.h", + "numeric/checked_range.h", + "numeric/in_range_cast.h", + "numeric/int128.h", + "numeric/safe_assignment.h", + "posix/close_multiple.cc", + "posix/close_multiple.h", + "posix/close_stdio.cc", + "posix/close_stdio.h", + "posix/drop_privileges.cc", + "posix/drop_privileges.h", + "posix/process_info.h", + "posix/process_info_mac.cc", + "posix/symbolic_constants_posix.cc", + "posix/symbolic_constants_posix.h", + "stdlib/aligned_allocator.cc", + "stdlib/aligned_allocator.h", + "stdlib/cxx.h", + "stdlib/map_insert.h", + "stdlib/objc.h", + "stdlib/pointer_container.h", + "stdlib/string_number_conversion.cc", + "stdlib/string_number_conversion.h", + "stdlib/strlcpy.cc", + "stdlib/strlcpy.h", + "stdlib/strnlen.cc", + "stdlib/strnlen.h", + "string/split_string.cc", + "string/split_string.h", + "synchronization/semaphore.h", + "synchronization/semaphore_mac.cc", + "synchronization/semaphore_posix.cc", + "synchronization/semaphore_win.cc", + "thread/thread.cc", + "thread/thread.h", + "thread/thread_log_messages.cc", + "thread/thread_log_messages.h", + "thread/thread_posix.cc", + "thread/thread_win.cc", + "thread/worker_thread.cc", + "thread/worker_thread.h", + "win/address_types.h", + "win/capture_context.asm", + "win/capture_context.h", + "win/checked_win_address_range.h", + "win/command_line.cc", + "win/command_line.h", + "win/critical_section_with_debug_info.cc", + "win/critical_section_with_debug_info.h", + "win/exception_handler_server.cc", + "win/exception_handler_server.h", + "win/get_function.cc", + "win/get_function.h", + "win/get_module_information.cc", + "win/get_module_information.h", + "win/handle.cc", + "win/handle.h", + "win/module_version.cc", + "win/module_version.h", + "win/nt_internals.cc", + "win/nt_internals.h", + "win/ntstatus_logging.cc", + "win/ntstatus_logging.h", + "win/process_info.cc", + "win/process_info.h", + "win/process_structs.h", + "win/registration_protocol_win.cc", + "win/registration_protocol_win.h", + "win/scoped_handle.cc", + "win/scoped_handle.h", + "win/scoped_local_alloc.cc", + "win/scoped_local_alloc.h", + "win/scoped_process_suspend.cc", + "win/scoped_process_suspend.h", + "win/time.cc", + "win/time.h", + "win/xp_compat.h", + ] + + if (is_mac) { + # mach/ are not globally filtered. + sources += [ + "mach/child_port_handshake.cc", + "mach/child_port_handshake.h", + "mach/child_port_server.cc", + "mach/child_port_server.h", + "mach/child_port_types.h", + "mach/composite_mach_message_server.cc", + "mach/composite_mach_message_server.h", + "mach/exc_client_variants.cc", + "mach/exc_client_variants.h", + "mach/exc_server_variants.cc", + "mach/exc_server_variants.h", + "mach/exception_behaviors.cc", + "mach/exception_behaviors.h", + "mach/exception_ports.cc", + "mach/exception_ports.h", + "mach/exception_types.cc", + "mach/exception_types.h", + "mach/mach_extensions.cc", + "mach/mach_extensions.h", + "mach/mach_message.cc", + "mach/mach_message.h", + "mach/mach_message_server.cc", + "mach/mach_message_server.h", + "mach/notify_server.cc", + "mach/notify_server.h", + "mach/scoped_task_suspend.cc", + "mach/scoped_task_suspend.h", + "mach/symbolic_constants_mach.cc", + "mach/symbolic_constants_mach.h", + "mach/task_for_pid.cc", + "mach/task_for_pid.h", + "mach/task_memory.cc", + "mach/task_memory.h", + ] + } + + # Include files from here and generated files starting with "util". + include_dirs = [ + "..", + "$root_gen_dir/third_party/crashpad/crashpad", + ] + + all_dependent_configs = [ ":util_link_config" ] + + deps = [ + "//base", + "//third_party/crashpad/crashpad/compat", + ] + + if (is_win) { + libs = [ + "rpcrt4.lib", + "winhttp.lib", + ] + cflags = [ + "/wd4201", # nonstandard extension used : nameless struct/union. + "/wd4577", # 'noexcept' used with no exception handling mode specified. + ] + + if (current_cpu == "x86") { + asmflags = [ "/safeseh" ] + } + } else if (is_mac) { + sources += get_target_outputs(":mig") + deps += [ ":mig" ] + libs = [ + "CoreFoundation.framework", + "Foundation.framework", + "IOKit.framework", + ] + } +} diff --git a/build/secondary/third_party/libjpeg_turbo/BUILD.gn b/build/secondary/third_party/libjpeg_turbo/BUILD.gn new file mode 100644 index 00000000000..941c825d279 --- /dev/null +++ b/build/secondary/third_party/libjpeg_turbo/BUILD.gn @@ -0,0 +1,224 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Do not use the targets in this file unless you need a certain libjpeg +# implementation. Use the meta target //third_party:jpeg instead. + +import("//build/config/sanitizers/sanitizers.gni") +if (current_cpu == "arm") { + import("//build/config/arm.gni") +} + +assert(!is_ios, "This is not used on iOS, don't drag it in unintentionally") + +if (current_cpu == "x86" || current_cpu == "x64") { + import("//third_party/yasm/yasm_assemble.gni") + + yasm_assemble("simd_asm") { + defines = [] + + if (current_cpu == "x86") { + sources = [ + "simd/jccolor-mmx.asm", + "simd/jccolor-sse2.asm", + "simd/jcgray-mmx.asm", + "simd/jcgray-sse2.asm", + "simd/jchuff-sse2.asm", + "simd/jcsample-mmx.asm", + "simd/jcsample-sse2.asm", + "simd/jdcolor-mmx.asm", + "simd/jdcolor-sse2.asm", + "simd/jdmerge-mmx.asm", + "simd/jdmerge-sse2.asm", + "simd/jdsample-mmx.asm", + "simd/jdsample-sse2.asm", + "simd/jfdctflt-3dn.asm", + "simd/jfdctflt-sse.asm", + "simd/jfdctfst-mmx.asm", + "simd/jfdctfst-sse2.asm", + "simd/jfdctint-mmx.asm", + "simd/jfdctint-sse2.asm", + "simd/jidctflt-3dn.asm", + "simd/jidctflt-sse.asm", + "simd/jidctflt-sse2.asm", + "simd/jidctfst-mmx.asm", + "simd/jidctfst-sse2.asm", + "simd/jidctint-mmx.asm", + "simd/jidctint-sse2.asm", + "simd/jidctred-mmx.asm", + "simd/jidctred-sse2.asm", + "simd/jquant-3dn.asm", + "simd/jquant-mmx.asm", + "simd/jquant-sse.asm", + "simd/jquantf-sse2.asm", + "simd/jquanti-sse2.asm", + "simd/jsimdcpu.asm", + ] + defines += [ + "__x86__", + "PIC", + ] + } else if (current_cpu == "x64") { + sources = [ + "simd/jccolor-sse2-64.asm", + "simd/jcgray-sse2-64.asm", + "simd/jchuff-sse2-64.asm", + "simd/jcsample-sse2-64.asm", + "simd/jdcolor-sse2-64.asm", + "simd/jdmerge-sse2-64.asm", + "simd/jdsample-sse2-64.asm", + "simd/jfdctflt-sse-64.asm", + "simd/jfdctfst-sse2-64.asm", + "simd/jfdctint-sse2-64.asm", + "simd/jidctflt-sse2-64.asm", + "simd/jidctfst-sse2-64.asm", + "simd/jidctint-sse2-64.asm", + "simd/jidctred-sse2-64.asm", + "simd/jquantf-sse2-64.asm", + "simd/jquanti-sse2-64.asm", + ] + defines += [ + "__x86_64__", + "PIC", + ] + } + + if (is_win) { + defines += [ "MSVC" ] + include_dirs = [ "win" ] + if (current_cpu == "x86") { + defines += [ "WIN32" ] + } else { + defines += [ "WIN64" ] + } + } else if (is_mac || is_ios) { + defines += [ "MACHO" ] + include_dirs = [ "mac" ] + } else if (is_linux || is_android) { + defines += [ "ELF" ] + include_dirs = [ "linux" ] + } + } +} + +static_library("simd") { + if (current_cpu == "x86") { + deps = [ + ":simd_asm", + ] + sources = [ + "simd/jsimd_i386.c", + ] + } else if (current_cpu == "x64") { + deps = [ + ":simd_asm", + ] + sources = [ + "simd/jsimd_x86_64.c", + ] + } else if (current_cpu == "arm" && arm_version >= 7 && + (arm_use_neon || arm_optionally_use_neon)) { + sources = [ + "simd/jsimd_arm.c", + "simd/jsimd_arm_neon.S", + ] + } else if (current_cpu == "arm64") { + sources = [ + "simd/jsimd_arm64.c", + "simd/jsimd_arm64_neon.S", + ] + } else { + sources = [ + "jsimd_none.c", + ] + } + + if (is_win) { + cflags = [ "/wd4245" ] + } +} + +config("libjpeg_config") { + include_dirs = [ "." ] +} + +static_library("libjpeg") { + sources = [ + "jcapimin.c", + "jcapistd.c", + "jccoefct.c", + "jccolor.c", + "jcdctmgr.c", + "jchuff.c", + "jchuff.h", + "jcinit.c", + "jcmainct.c", + "jcmarker.c", + "jcmaster.c", + "jcomapi.c", + "jconfig.h", + "jcparam.c", + "jcphuff.c", + "jcprepct.c", + "jcsample.c", + "jdapimin.c", + "jdapistd.c", + "jdatadst.c", + "jdatasrc.c", + "jdcoefct.c", + "jdcolor.c", + "jdct.h", + "jddctmgr.c", + "jdhuff.c", + "jdhuff.h", + "jdinput.c", + "jdmainct.c", + "jdmarker.c", + "jdmaster.c", + "jdmerge.c", + "jdphuff.c", + "jdpostct.c", + "jdsample.c", + "jerror.c", + "jerror.h", + "jfdctflt.c", + "jfdctfst.c", + "jfdctint.c", + "jidctflt.c", + "jidctfst.c", + "jidctint.c", + "jidctred.c", + "jinclude.h", + "jmemmgr.c", + "jmemnobs.c", + "jmemsys.h", + "jmorecfg.h", + "jpegint.h", + "jpeglib.h", + "jpeglibmangler.h", + "jquant1.c", + "jquant2.c", + "jutils.c", + "jversion.h", + ] + + defines = [ + "WITH_SIMD", + "NO_GETENV", + ] + + configs += [ ":libjpeg_config" ] + + public_configs = [ ":libjpeg_config" ] + + # MemorySanitizer doesn't support assembly code, so keep it disabled in + # MSan builds for now. + if (is_msan) { + sources += [ "jsimd_none.c" ] + } else { + deps = [ + ":simd", + ] + } +} diff --git a/build/secondary/third_party/nss/BUILD.gn b/build/secondary/third_party/nss/BUILD.gn new file mode 100644 index 00000000000..5788f3e5d6d --- /dev/null +++ b/build/secondary/third_party/nss/BUILD.gn @@ -0,0 +1,22 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +if (is_linux) { + # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL + # library but the system NSS libraries. Non-Linux platforms using NSS use the + # hermetic one in //third_party/nss. + # + # Generally you should depend on //crypto:platform instead of using this + # config since that will properly pick up NSS or OpenSSL depending on + # platform and build config. + pkg_config("system_nss_no_ssl_config") { + packages = [ "nss" ] + extra_args = [ + "-v", + "-lssl3", + ] + } +} diff --git a/build/secondary/tools/swarming_client/BUILD.gn b/build/secondary/tools/swarming_client/BUILD.gn new file mode 100644 index 00000000000..f4860523c7c --- /dev/null +++ b/build/secondary/tools/swarming_client/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +group("isolate_py") { + _py_files = + read_file("//build/secondary/tools/swarming_client/isolate.pydeps", + "list lines") + + # Filter out comments. + set_sources_assignment_filter([ "#*" ]) + sources = _py_files + data = sources +} diff --git a/build/secondary/tools/swarming_client/isolate.pydeps b/build/secondary/tools/swarming_client/isolate.pydeps new file mode 100644 index 00000000000..56770d61109 --- /dev/null +++ b/build/secondary/tools/swarming_client/isolate.pydeps @@ -0,0 +1,231 @@ +# Generated by running: +# build/print_python_deps.py --root tools/swarming_client --output build/secondary/tools/swarming_client/isolate.pydeps --whitelist tools/swarming_client/third_party tools/swarming_client/isolate.py +auth.py +cipd.py +isolate.py +isolate_format.py +isolated_format.py +isolateserver.py +libs/__init__.py +libs/arfile/__init__.py +libs/arfile/arfile.py +run_isolated.py +third_party/__init__.py +third_party/chromium/__init__.py +third_party/chromium/natsort.py +third_party/colorama/__init__.py +third_party/colorama/ansi.py +third_party/colorama/ansitowin32.py +third_party/colorama/initialise.py +third_party/colorama/win32.py +third_party/colorama/winterm.py +third_party/depot_tools/__init__.py +third_party/depot_tools/auto_stub.py +third_party/depot_tools/fix_encoding.py +third_party/depot_tools/subcommand.py +third_party/google/__init__.py +third_party/google/protobuf/__init__.py +third_party/google/protobuf/compiler/__init__.py +third_party/google/protobuf/compiler/plugin_pb2.py +third_party/google/protobuf/descriptor.py +third_party/google/protobuf/descriptor_database.py +third_party/google/protobuf/descriptor_pb2.py +third_party/google/protobuf/descriptor_pool.py +third_party/google/protobuf/internal/__init__.py +third_party/google/protobuf/internal/api_implementation.py +third_party/google/protobuf/internal/containers.py +third_party/google/protobuf/internal/cpp_message.py +third_party/google/protobuf/internal/decoder.py +third_party/google/protobuf/internal/encoder.py +third_party/google/protobuf/internal/enum_type_wrapper.py +third_party/google/protobuf/internal/message_listener.py +third_party/google/protobuf/internal/python_message.py +third_party/google/protobuf/internal/type_checkers.py +third_party/google/protobuf/internal/wire_format.py +third_party/google/protobuf/message.py +third_party/google/protobuf/message_factory.py +third_party/google/protobuf/reflection.py +third_party/google/protobuf/service.py +third_party/google/protobuf/service_reflection.py +third_party/google/protobuf/symbol_database.py +third_party/google/protobuf/text_encoding.py +third_party/google/protobuf/text_format.py +third_party/googleapiclient/__init__.py +third_party/googleapiclient/channel.py +third_party/googleapiclient/discovery.py +third_party/googleapiclient/discovery_cache/__init__.py +third_party/googleapiclient/discovery_cache/appengine_memcache.py +third_party/googleapiclient/discovery_cache/base.py +third_party/googleapiclient/discovery_cache/file_cache.py +third_party/googleapiclient/errors.py +third_party/googleapiclient/http.py +third_party/googleapiclient/mimeparse.py +third_party/googleapiclient/model.py +third_party/googleapiclient/sample_tools.py +third_party/googleapiclient/schema.py +third_party/httplib2/__init__.py +third_party/httplib2/iri2uri.py +third_party/httplib2/socks.py +third_party/infra_libs/__init__.py +third_party/infra_libs/app.py +third_party/infra_libs/authentication.py +third_party/infra_libs/event_mon/__init__.py +third_party/infra_libs/event_mon/checkouts.py +third_party/infra_libs/event_mon/config.py +third_party/infra_libs/event_mon/monitoring.py +third_party/infra_libs/event_mon/protos/__init__.py +third_party/infra_libs/event_mon/protos/chrome_infra_log_pb2.py +third_party/infra_libs/event_mon/protos/goma_stats_pb2.py +third_party/infra_libs/event_mon/protos/log_request_lite_pb2.py +third_party/infra_libs/event_mon/router.py +third_party/infra_libs/experiments.py +third_party/infra_libs/httplib2_utils.py +third_party/infra_libs/infra_types/__init__.py +third_party/infra_libs/infra_types/infra_types.py +third_party/infra_libs/instrumented_requests.py +third_party/infra_libs/logs/__init__.py +third_party/infra_libs/logs/logs.py +third_party/infra_libs/memoize.py +third_party/infra_libs/time_functions/__init__.py +third_party/infra_libs/time_functions/parser.py +third_party/infra_libs/time_functions/testing.py +third_party/infra_libs/time_functions/timestamp.py +third_party/infra_libs/time_functions/zulu.py +third_party/infra_libs/ts_mon/__init__.py +third_party/infra_libs/ts_mon/common/__init__.py +third_party/infra_libs/ts_mon/common/distribution.py +third_party/infra_libs/ts_mon/common/errors.py +third_party/infra_libs/ts_mon/common/helpers.py +third_party/infra_libs/ts_mon/common/http_metrics.py +third_party/infra_libs/ts_mon/common/interface.py +third_party/infra_libs/ts_mon/common/metric_store.py +third_party/infra_libs/ts_mon/common/metrics.py +third_party/infra_libs/ts_mon/common/monitors.py +third_party/infra_libs/ts_mon/common/pb_to_popo.py +third_party/infra_libs/ts_mon/common/standard_metrics.py +third_party/infra_libs/ts_mon/common/targets.py +third_party/infra_libs/ts_mon/config.py +third_party/infra_libs/ts_mon/protos/__init__.py +third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py +third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py +third_party/infra_libs/ts_mon/protos/metrics_pb2.py +third_party/infra_libs/utils.py +third_party/oauth2client/__init__.py +third_party/oauth2client/_helpers.py +third_party/oauth2client/_openssl_crypt.py +third_party/oauth2client/_pycrypto_crypt.py +third_party/oauth2client/client.py +third_party/oauth2client/clientsecrets.py +third_party/oauth2client/crypt.py +third_party/oauth2client/file.py +third_party/oauth2client/gce.py +third_party/oauth2client/keyring_storage.py +third_party/oauth2client/locked_file.py +third_party/oauth2client/multistore_file.py +third_party/oauth2client/service_account.py +third_party/oauth2client/tools.py +third_party/oauth2client/util.py +third_party/oauth2client/xsrfutil.py +third_party/pyasn1/pyasn1/__init__.py +third_party/pyasn1/pyasn1/codec/__init__.py +third_party/pyasn1/pyasn1/codec/ber/__init__.py +third_party/pyasn1/pyasn1/codec/ber/decoder.py +third_party/pyasn1/pyasn1/codec/ber/encoder.py +third_party/pyasn1/pyasn1/codec/ber/eoo.py +third_party/pyasn1/pyasn1/codec/cer/__init__.py +third_party/pyasn1/pyasn1/codec/cer/decoder.py +third_party/pyasn1/pyasn1/codec/cer/encoder.py +third_party/pyasn1/pyasn1/codec/der/__init__.py +third_party/pyasn1/pyasn1/codec/der/decoder.py +third_party/pyasn1/pyasn1/codec/der/encoder.py +third_party/pyasn1/pyasn1/compat/__init__.py +third_party/pyasn1/pyasn1/compat/binary.py +third_party/pyasn1/pyasn1/compat/octets.py +third_party/pyasn1/pyasn1/debug.py +third_party/pyasn1/pyasn1/error.py +third_party/pyasn1/pyasn1/type/__init__.py +third_party/pyasn1/pyasn1/type/base.py +third_party/pyasn1/pyasn1/type/char.py +third_party/pyasn1/pyasn1/type/constraint.py +third_party/pyasn1/pyasn1/type/error.py +third_party/pyasn1/pyasn1/type/namedtype.py +third_party/pyasn1/pyasn1/type/namedval.py +third_party/pyasn1/pyasn1/type/tag.py +third_party/pyasn1/pyasn1/type/tagmap.py +third_party/pyasn1/pyasn1/type/univ.py +third_party/pyasn1/pyasn1/type/useful.py +third_party/requests/__init__.py +third_party/requests/adapters.py +third_party/requests/api.py +third_party/requests/auth.py +third_party/requests/certs.py +third_party/requests/compat.py +third_party/requests/cookies.py +third_party/requests/exceptions.py +third_party/requests/hooks.py +third_party/requests/models.py +third_party/requests/packages/__init__.py +third_party/requests/packages/urllib3/__init__.py +third_party/requests/packages/urllib3/_collections.py +third_party/requests/packages/urllib3/connection.py +third_party/requests/packages/urllib3/connectionpool.py +third_party/requests/packages/urllib3/contrib/__init__.py +third_party/requests/packages/urllib3/contrib/appengine.py +third_party/requests/packages/urllib3/contrib/ntlmpool.py +third_party/requests/packages/urllib3/contrib/pyopenssl.py +third_party/requests/packages/urllib3/exceptions.py +third_party/requests/packages/urllib3/fields.py +third_party/requests/packages/urllib3/filepost.py +third_party/requests/packages/urllib3/packages/__init__.py +third_party/requests/packages/urllib3/packages/ordered_dict.py +third_party/requests/packages/urllib3/packages/six.py +third_party/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py +third_party/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py +third_party/requests/packages/urllib3/poolmanager.py +third_party/requests/packages/urllib3/request.py +third_party/requests/packages/urllib3/response.py +third_party/requests/packages/urllib3/util/__init__.py +third_party/requests/packages/urllib3/util/connection.py +third_party/requests/packages/urllib3/util/request.py +third_party/requests/packages/urllib3/util/response.py +third_party/requests/packages/urllib3/util/retry.py +third_party/requests/packages/urllib3/util/ssl_.py +third_party/requests/packages/urllib3/util/timeout.py +third_party/requests/packages/urllib3/util/url.py +third_party/requests/sessions.py +third_party/requests/status_codes.py +third_party/requests/structures.py +third_party/requests/utils.py +third_party/rsa/rsa/__init__.py +third_party/rsa/rsa/_compat.py +third_party/rsa/rsa/_version133.py +third_party/rsa/rsa/_version200.py +third_party/rsa/rsa/asn1.py +third_party/rsa/rsa/bigfile.py +third_party/rsa/rsa/cli.py +third_party/rsa/rsa/common.py +third_party/rsa/rsa/core.py +third_party/rsa/rsa/key.py +third_party/rsa/rsa/parallel.py +third_party/rsa/rsa/pem.py +third_party/rsa/rsa/pkcs1.py +third_party/rsa/rsa/prime.py +third_party/rsa/rsa/randnum.py +third_party/rsa/rsa/transform.py +third_party/rsa/rsa/util.py +third_party/rsa/rsa/varblock.py +third_party/six/__init__.py +third_party/uritemplate/__init__.py +utils/__init__.py +utils/file_path.py +utils/fs.py +utils/large.py +utils/logging_utils.py +utils/lru.py +utils/net.py +utils/oauth.py +utils/on_error.py +utils/subprocess42.py +utils/threading_utils.py +utils/tools.py +utils/zip_package.py diff --git a/build/set_clang_warning_flags.gypi b/build/set_clang_warning_flags.gypi new file mode 100644 index 00000000000..f6d7aea700d --- /dev/null +++ b/build/set_clang_warning_flags.gypi @@ -0,0 +1,58 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included to set clang-specific compiler flags. +# To use this the following variable can be defined: +# clang_warning_flags: list: Compiler flags to pass to clang. +# clang_warning_flags_unset: list: Compiler flags to not pass to clang. +# +# Only use this in third-party code. In chromium_code, fix your code to not +# warn instead! +# +# Note that the gypi file is included in target_defaults, so it does not need +# to be explicitly included. +# +# Warning flags set by this will be used on all platforms. If you want to set +# warning flags on only some platforms, you have to do so manually. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'my_target', +# 'variables': { +# 'clang_warning_flags': ['-Wno-awesome-warning'], +# 'clang_warning_flags_unset': ['-Wpreviously-set-flag'], +# } +# } + +{ + 'variables': { + 'clang_warning_flags_unset%': [], # Provide a default value. + }, + 'conditions': [ + ['clang==1', { + # This uses >@ instead of @< to also see clang_warning_flags set in + # targets directly, not just the clang_warning_flags in target_defaults. + 'cflags': [ '>@(clang_warning_flags)' ], + 'cflags!': [ '>@(clang_warning_flags_unset)' ], + 'xcode_settings': { + 'WARNING_CFLAGS': ['>@(clang_warning_flags)'], + 'WARNING_CFLAGS!': ['>@(clang_warning_flags_unset)'], + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ '>@(clang_warning_flags)' ], + 'AdditionalOptions!': [ '>@(clang_warning_flags_unset)' ], + }, + }, + }], + ['clang==0 and host_clang==1', { + 'target_conditions': [ + ['_toolset=="host"', { + 'cflags': [ '>@(clang_warning_flags)' ], + 'cflags!': [ '>@(clang_warning_flags_unset)' ], + }], + ], + }], + ], +} diff --git a/build/shim_headers.gni b/build/shim_headers.gni new file mode 100644 index 00000000000..a37bd4a1d8d --- /dev/null +++ b/build/shim_headers.gni @@ -0,0 +1,34 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +template("shim_headers") { + action_name = "gen_${target_name}" + config_name = "${target_name}_config" + shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}" + + config(config_name) { + include_dirs = [ shim_headers_path ] + } + + action(action_name) { + script = "//tools/generate_shim_headers/generate_shim_headers.py" + args = [ + "--generate", + "--headers-root", + rebase_path(invoker.root_path), + "--output-directory", + rebase_path(shim_headers_path), + ] + invoker.headers + + outputs = process_file_template(invoker.headers, + "${shim_headers_path}/{{source_file_part}}") + } + + group(target_name) { + deps = [ + ":${action_name}", + ] + all_dependent_configs = [ ":${config_name}" ] + } +} diff --git a/build/shim_headers.gypi b/build/shim_headers.gypi new file mode 100644 index 00000000000..56d8d3a7196 --- /dev/null +++ b/build/shim_headers.gypi @@ -0,0 +1,60 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to handle shim headers +# in a consistent manner. To use this the following variables need to be +# defined: +# headers_root_path: string: path to directory containing headers +# header_filenames: list: list of header file names + +{ + 'variables': { + 'shim_headers_path': '<(SHARED_INTERMEDIATE_DIR)/shim_headers/<(_target_name)/<(_toolset)', + 'shim_generator_additional_args%': [], + }, + 'include_dirs++': [ + '<(shim_headers_path)', + ], + 'all_dependent_settings': { + # Repeating this with different numbers of plusses is unfortunately required + # to make sure that even if this include is inside nested conditions/etc, it + # still gets inserted at the beginning of the include_dirs list. See + # http://crbug.com/263818 for details. + 'include_dirs+++': [ + '<(shim_headers_path)', + ], + 'include_dirs++++': [ + '<(shim_headers_path)', + ], + 'include_dirs+++++': [ + '<(shim_headers_path)', + ], + }, + 'actions': [ + { + 'variables': { + 'generator_path': '<(DEPTH)/tools/generate_shim_headers/generate_shim_headers.py', + 'generator_args': [ + '--headers-root', '<(headers_root_path)', + '--output-directory', '<(shim_headers_path)', + '<@(shim_generator_additional_args)', + '<@(header_filenames)', + ], + }, + 'action_name': 'generate_<(_target_name)_shim_headers', + 'inputs': [ + '<(generator_path)', + ], + 'outputs': [ + ' \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi" + + rspfile_content = "{{inputs_newline}}" + + description = "SOLINK {{output}}" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_dir = "{{root_out_dir}}" + default_output_extension = ".dylib" + + output_prefix = "lib" + + # Since the above commands only updates the .TOC file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # Tell GN about the output files. It will link to the dylib but use the + # tocname for dependency management. + outputs = [ + dylib, + tocname, + ] + link_output = dylib + depend_output = tocname + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + } + + tool("solink_module") { + sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # eg "./libfoo.so" + rspfile = sofile + ".rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + link_command = "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\"" + if (is_component_build) { + link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}" + } + link_command += dsym_switch + link_command += " {{solibs}} {{libs}}" + command = link_command + + rspfile_content = "{{inputs_newline}}" + + description = "SOLINK_MODULE {{output}}" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_dir = "{{root_out_dir}}" + default_output_extension = ".so" + + outputs = [ + sofile, + ] + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + } + + tool("link") { + outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$outfile.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # Note about --filelist: Apple's linker reads the file list file and + # interprets each newline-separated chunk of text as a file name. It + # doesn't do the things one would expect from the shell like unescaping + # or handling quotes. In contrast, when Ninja finds a file name with + # spaces, it single-quotes them in $inputs_newline as it would normally + # do for command-line arguments. Thus any source names with spaces, or + # label names with spaces (which GN bases the output paths on) will be + # corrupted by this process. Don't use spaces for source files or labels. + command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{solibs}} {{libs}}" + description = "LINK $outfile" + rspfile_content = "{{inputs_newline}}" + outputs = [ + outfile, + ] + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + + default_output_dir = "{{root_out_dir}}" + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } + + tool("copy_bundle_data") { + # copy_command use hardlink if possible but this does not work with + # directories. If source is a directory, instead use "pax" to create + # the same tree structure using hardlinks to individual files (this + # preserve symbolic links too) as recommended in the replies to the + # question at http://serverfault.com/q/209888/43689 ("cp -al" isn't + # available on macOS). + # + # According to the man page for pax, the commands to use to clone + # olddir to newdir using pax are the following: + # + # $ mkdir newdir + # $ cd olddir + # $ pax -rwl . ../newdir + # + # The _copydir command does exactly that but use an absolute path + # constructed using shell variable $OLDPWD (automatically set when + # cd is used) as computing the relative path is a bit complex and + # using pwd would requires a sub-shell to be created. + _copydir = "mkdir -p {{output}} && cd {{source}} && " + + "pax -rwl . \"\$OLDPWD\"/{{output}}" + command = "rm -rf {{output}} && if [[ -d {{source}} ]]; then " + + _copydir + "; else " + copy_command + "; fi" + + description = "COPY_BUNDLE_DATA {{source}} {{output}}" + pool = ":bundle_pool($default_toolchain)" + } + tool("compile_xcassets") { + _tool = rebase_path("//build/toolchain/mac/compile_xcassets.py", + root_build_dir) + if (is_ios) { + _sdk_name = ios_sdk_name + _min_deployment_target = ios_deployment_target + } else { + _sdk_name = mac_sdk_name + _min_deployment_target = mac_deployment_target + } + command = "rm -f {{output}} && " + + "TOOL_VERSION=${tool_versions.compile_xcassets} " + + "python $_tool -p $_sdk_name -t $_min_deployment_target " + + "-T {{bundle_product_type}} -o {{output}} {{inputs}}" + + description = "COMPILE_XCASSETS {{output}}" + pool = ":bundle_pool($default_toolchain)" + } + } +} + +mac_toolchain("clang_arm") { + toolchain_args = { + current_cpu = "arm" + current_os = "mac" + } +} + +mac_toolchain("clang_x64") { + toolchain_args = { + current_cpu = "x64" + current_os = "mac" + } +} + +if (is_ios) { + mac_toolchain("ios_clang_arm") { + toolchain_args = { + current_cpu = "arm" + current_os = "ios" + } + } + + mac_toolchain("ios_clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + current_os = "ios" + } + } + + mac_toolchain("ios_clang_x86") { + toolchain_args = { + current_cpu = "x86" + current_os = "ios" + } + } + + mac_toolchain("ios_clang_x64") { + toolchain_args = { + current_cpu = "x64" + current_os = "ios" + } + } +} diff --git a/build/toolchain/mac/compile_xcassets.py b/build/toolchain/mac/compile_xcassets.py new file mode 100644 index 00000000000..ac0742eb56f --- /dev/null +++ b/build/toolchain/mac/compile_xcassets.py @@ -0,0 +1,105 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + + +def CompileXCAssets( + output, platform, product_type, min_deployment_target, inputs): + """Compile the .xcassets bundles to an asset catalog using actool. + + Args: + output: absolute path to the containing bundle + platform: the targetted platform + product_type: the bundle type + min_deployment_target: minimum deployment target + inputs: list of absolute paths to .xcassets bundles + """ + command = [ + 'xcrun', 'actool', '--output-format=human-readable-text', + '--compress-pngs', '--notices', '--warnings', '--errors', + '--platform', platform, '--minimum-deployment-target', + min_deployment_target, + ] + + if product_type != '': + command.extend(['--product-type', product_type]) + + if platform == 'macosx': + command.extend(['--target-device', 'mac']) + else: + command.extend(['--target-device', 'iphone', '--target-device', 'ipad']) + + # actool crashes if paths are relative, so convert input and output paths + # to absolute paths. + command.extend(['--compile', os.path.dirname(os.path.abspath(output))]) + command.extend(map(os.path.abspath, inputs)) + + # Run actool and redirect stdout and stderr to the same pipe (as actool + # is confused about what should go to stderr/stdout). + process = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, _ = process.communicate() + + if process.returncode: + sys.stderr.write(stdout) + sys.exit(process.returncode) + + # In case of success, the output looks like the following: + # /* com.apple.actool.compilation-results */ + # /Full/Path/To/Bundle.app/Assets.car + # + # Ignore any lines in the output matching those (last line is an empty line) + # and consider that the build failed if the output contains any other lines. + for line in stdout.splitlines(): + if not line: + continue + if line == '/* com.apple.actool.compilation-results */': + continue + if line == os.path.abspath(output): + continue + sys.stderr.write(stdout) + sys.exit(1) + + +def Main(): + parser = argparse.ArgumentParser( + description='compile assets catalog for a bundle') + parser.add_argument( + '--platform', '-p', required=True, + choices=('macosx', 'iphoneos', 'iphonesimulator'), + help='target platform for the compiled assets catalog') + parser.add_argument( + '--minimum-deployment-target', '-t', required=True, + help='minimum deployment target for the compiled assets catalog') + parser.add_argument( + '--output', '-o', required=True, + help='path to the compiled assets catalog') + parser.add_argument( + '--product-type', '-T', + help='type of the containing bundle') + parser.add_argument( + 'inputs', nargs='+', + help='path to input assets catalog sources') + args = parser.parse_args() + + if os.path.basename(args.output) != 'Assets.car': + sys.stderr.write( + 'output should be path to compiled asset catalog, not ' + 'to the containing bundle: %s\n' % (args.output,)) + sys.exit(1) + + CompileXCAssets( + args.output, + args.platform, + args.product_type, + args.minimum_deployment_target, + args.inputs) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/build/toolchain/mac/filter_libtool.py b/build/toolchain/mac/filter_libtool.py new file mode 100644 index 00000000000..91ccc9799ba --- /dev/null +++ b/build/toolchain/mac/filter_libtool.py @@ -0,0 +1,42 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import subprocess +import sys + +# This script executes libool and filters out logspam lines like: +# '/path/to/libtool: file: foo.o has no symbols' + +def Main(cmd_list): + libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?' + r'file: .* has no symbols$') + libtool_re5 = re.compile( + r'^.*libtool: warning for library: ' + + r'.* the table of contents is empty ' + + r'\(no object file members in the library define global symbols\)$') + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env['ZERO_AR_DATE'] = '1' + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print >>sys.stderr, line + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'): + os.utime(cmd_list[i+1], None) + break + return libtoolout.returncode + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/build/toolchain/mac/get_tool_mtime.py b/build/toolchain/mac/get_tool_mtime.py new file mode 100644 index 00000000000..4106344b821 --- /dev/null +++ b/build/toolchain/mac/get_tool_mtime.py @@ -0,0 +1,17 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys + +# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py +# +# Prints a GN scope with the variable name being the basename sans-extension +# and the value being the file modification time. A variable is emitted for +# each file argument on the command line. + +if __name__ == '__main__': + for f in sys.argv[1:]: + variable = os.path.splitext(os.path.basename(f))[0] + print '%s = %d' % (variable, os.path.getmtime(f)) diff --git a/build/toolchain/mac/linker_driver.py b/build/toolchain/mac/linker_driver.py new file mode 100644 index 00000000000..40676f6427d --- /dev/null +++ b/build/toolchain/mac/linker_driver.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python + +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import os.path +import shutil +import subprocess +import sys + +# The linker_driver.py is responsible for forwarding a linker invocation to +# the compiler driver, while processing special arguments itself. +# +# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out +# +# On Mac, the logical step of linking is handled by three discrete tools to +# perform the image link, debug info link, and strip. The linker_driver.py +# combines these three steps into a single tool. +# +# The command passed to the linker_driver.py should be the compiler driver +# invocation for the linker. It is first invoked unaltered (except for the +# removal of the special driver arguments, described below). Then the driver +# performs additional actions, based on these arguments: +# +# -Wcrl,dsym, +# After invoking the linker, this will run `dsymutil` on the linker's +# output, producing a dSYM bundle, stored at dsym_path_prefix. As an +# example, if the linker driver were invoked with: +# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..." +# The resulting dSYM would be out/gn/libbar.dylib.dSYM/. +# +# -Wcrl,unstripped, +# After invoking the linker, and before strip, this will save a copy of +# the unstripped linker output in the directory unstripped_path_prefix. +# +# -Wcrl,strip, +# After invoking the linker, and optionally dsymutil, this will run +# the strip command on the linker's output. strip_arguments are +# comma-separated arguments to be passed to the strip command. + +def Main(args): + """Main function for the linker driver. Separates out the arguments for + the main compiler driver and the linker driver, then invokes all the + required tools. + + Args: + args: list of string, Arguments to the script. + """ + + if len(args) < 2: + raise RuntimeError("Usage: linker_driver.py [linker-invocation]") + + # Collect arguments to the linker driver (this script) and remove them from + # the arguments being passed to the compiler driver. + linker_driver_actions = {} + compiler_driver_args = [] + for arg in args[1:]: + if arg.startswith(_LINKER_DRIVER_ARG_PREFIX): + # Convert driver actions into a map of name => lambda to invoke. + driver_action = ProcessLinkerDriverArg(arg) + assert driver_action[0] not in linker_driver_actions + linker_driver_actions[driver_action[0]] = driver_action[1] + else: + compiler_driver_args.append(arg) + + linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)] + + try: + # Run the linker by invoking the compiler driver. + subprocess.check_call(compiler_driver_args) + + # Run the linker driver actions, in the order specified by the actions list. + for action in _LINKER_DRIVER_ACTIONS: + name = action[0] + if name in linker_driver_actions: + linker_driver_outputs += linker_driver_actions[name](args) + except: + # If a linker driver action failed, remove all the outputs to make the + # build step atomic. + map(_RemovePath, linker_driver_outputs) + + # Re-report the original failure. + raise + + +def ProcessLinkerDriverArg(arg): + """Processes a linker driver argument and returns a tuple containing the + name and unary lambda to invoke for that linker driver action. + + Args: + arg: string, The linker driver argument. + + Returns: + A 2-tuple: + 0: The driver action name, as in _LINKER_DRIVER_ACTIONS. + 1: An 1-ary lambda that takes the full list of arguments passed to + Main(). The lambda should call the linker driver action that + corresponds to the argument and return a list of outputs from the + action. + """ + if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX): + raise ValueError('%s is not a linker driver argument' % (arg,)) + + sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):] + + for driver_action in _LINKER_DRIVER_ACTIONS: + (name, action) = driver_action + if sub_arg.startswith(name): + return (name, + lambda full_args: action(sub_arg[len(name):], full_args)) + + raise ValueError('Unknown linker driver argument: %s' % (arg,)) + + +def RunDsymUtil(dsym_path_prefix, full_args): + """Linker driver action for -Wcrl,dsym,. Invokes dsymutil + on the linker's output and produces a dsym file at |dsym_file| path. + + Args: + dsym_path_prefix: string, The path at which the dsymutil output should be + located. + full_args: list of string, Full argument list for the linker driver. + + Returns: + list of string, Build step outputs. + """ + if not len(dsym_path_prefix): + raise ValueError('Unspecified dSYM output file') + + linker_out = _FindLinkerOutput(full_args) + (head, tail) = os.path.split(linker_out) + dsym_out = os.path.join(dsym_path_prefix, tail + '.dSYM') + + # Remove old dSYMs before invoking dsymutil. + _RemovePath(dsym_out) + subprocess.check_call(['xcrun', 'dsymutil', '-o', dsym_out, linker_out]) + return [dsym_out] + + +def RunSaveUnstripped(unstripped_path_prefix, full_args): + """Linker driver action for -Wcrl,unstripped,. Copies + the linker output to |unstripped_path_prefix| before stripping. + + Args: + unstripped_path_prefix: string, The path at which the unstripped output + should be located. + full_args: list of string, Full argument list for the linker driver. + + Returns: + list of string, Build step outputs. + """ + if not len(unstripped_path_prefix): + raise ValueError('Unspecified unstripped output file') + + linker_out = _FindLinkerOutput(full_args) + (head, tail) = os.path.split(linker_out) + unstripped_out = os.path.join(unstripped_path_prefix, tail + '.unstripped') + + shutil.copyfile(linker_out, unstripped_out) + return [unstripped_out] + + +def RunStrip(strip_args_string, full_args): + """Linker driver action for -Wcrl,strip,. + + Args: + strip_args_string: string, Comma-separated arguments for `strip`. + full_args: list of string, Full arguments for the linker driver. + + Returns: + list of string, Build step outputs. + """ + strip_command = ['xcrun', 'strip'] + if len(strip_args_string) > 0: + strip_command += strip_args_string.split(',') + strip_command.append(_FindLinkerOutput(full_args)) + subprocess.check_call(strip_command) + return [] + + +def _FindLinkerOutput(full_args): + """Finds the output of the linker by looking for the output flag in its + argument list. As this is a required linker argument, raises an error if it + cannot be found. + """ + # The linker_driver.py script may be used to wrap either the compiler linker + # (uses -o to configure the output) or lipo (uses -output to configure the + # output). Since wrapping the compiler linker is the most likely possibility + # use try/except and fallback to checking for -output if -o is not found. + try: + output_flag_index = full_args.index('-o') + except ValueError: + output_flag_index = full_args.index('-output') + return full_args[output_flag_index + 1] + + +def _RemovePath(path): + """Removes the file or directory at |path| if it exists.""" + if os.path.exists(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + os.unlink(path) + + +_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,' + +"""List of linker driver actions. The sort order of this list affects the +order in which the actions are invoked. The first item in the tuple is the +argument's -Wcrl, and the second is the function to invoke. +""" +_LINKER_DRIVER_ACTIONS = [ + ('dsym,', RunDsymUtil), + ('unstripped,', RunSaveUnstripped), + ('strip,', RunStrip), +] + + +if __name__ == '__main__': + Main(sys.argv) + sys.exit(0) diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn new file mode 100644 index 00000000000..78748bd40ed --- /dev/null +++ b/build/toolchain/nacl/BUILD.gn @@ -0,0 +1,263 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sysroot.gni") +import("//build/config/nacl/config.gni") +import("//build/toolchain/nacl_toolchain.gni") + +# Add the toolchain revision as a preprocessor define so that sources are +# rebuilt when a toolchain is updated. +# Idea we could use the toolchain deps feature, but currently that feature is +# bugged and does not trigger a rebuild. +# https://code.google.com/p/chromium/issues/detail?id=431880 +# Calls to get the toolchain revision are relatively slow, so do them all in a +# single batch to amortize python startup, etc. +revisions = exec_script("//native_client/build/get_toolchain_revision.py", + [ + "nacl_x86_glibc", + "nacl_arm_glibc", + "pnacl_newlib", + ], + "trim list lines") +nacl_x86_glibc_rev = revisions[0] +nacl_arm_glibc_rev = revisions[1] + +pnacl_newlib_rev = revisions[2] + +if (host_os == "win") { + toolsuffix = ".exe" +} else { + toolsuffix = "" +} + +# The PNaCl toolchain tools are all wrapper scripts rather than binary +# executables. On POSIX systems, nobody cares what kind of executable +# file you are. But on Windows, scripts (.bat files) cannot be run +# directly and need the Windows shell (cmd.exe) specified explicily. +if (host_os == "win") { + # NOTE! The //build/toolchain/gcc_*_wrapper.py scripts recognize + # this exact prefix string, so they must be updated if this string + # is changed in any way. + scriptprefix = "cmd /c call " + scriptsuffix = ".bat" +} else { + scriptprefix = "" + scriptsuffix = "" +} + +# When the compilers are run via goma or ccache rather than directly by +# GN/Ninja, the goma/ccache wrapper handles .bat files but gets confused +# by being given the scriptprefix. +if (host_os == "win" && !use_goma && cc_wrapper == "") { + compiler_scriptprefix = scriptprefix +} else { + compiler_scriptprefix = "" +} + +template("pnacl_toolchain") { + assert(defined(invoker.executable_extension), + "Must define executable_extension") + + nacl_toolchain(target_name) { + toolchain_package = "pnacl_newlib" + toolchain_revision = pnacl_newlib_rev + toolprefix = + rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-", + root_build_dir) + + cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix + cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix + ar = scriptprefix + toolprefix + "ar" + scriptsuffix + readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix + nm = scriptprefix + toolprefix + "nm" + scriptsuffix + if (defined(invoker.strip)) { + strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix + } + + # Note this is not the usual "ld = cxx" because "ld" uses are + # never run via goma, so this needs scriptprefix. + ld = scriptprefix + toolprefix + "clang++" + scriptsuffix + + executable_extension = invoker.executable_extension + + toolchain_args = { + is_clang = true + current_cpu = "pnacl" + } + } +} + +pnacl_toolchain("newlib_pnacl") { + executable_extension = ".pexe" + + # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file. + # It's very similar in purpose to the traditional "strip" utility: it + # turns what comes out of the linker into what you actually want to + # distribute and run. PNaCl doesn't have a "strip"-like utility that + # you ever actually want to use other than pnacl-finalize, so just + # make pnacl-finalize the strip tool rather than adding an additional + # step like "postlink" to run pnacl-finalize. + strip = "finalize" +} + +pnacl_toolchain("newlib_pnacl_nonsfi") { + executable_extension = "" + strip = "strip" +} + +template("nacl_glibc_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + assert(defined(invoker.toolchain_package), "Must define toolchain_package") + assert(defined(invoker.toolchain_revision), "Must define toolchain_revision") + forward_variables_from(invoker, + [ + "toolchain_package", + "toolchain_revision", + ]) + + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + nacl_toolchain("glibc_" + toolchain_cpu) { + cc = toolprefix + "gcc" + toolsuffix + cxx = toolprefix + "g++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + ld = cxx + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + toolchain_args = { + current_cpu = toolchain_cpu + is_clang = false + is_nacl_glibc = true + } + } +} + +nacl_glibc_toolchain("x86") { + toolchain_package = "nacl_x86_glibc" + toolchain_revision = nacl_x86_glibc_rev + + # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather + # than using the i686-nacl binary directly. This is a because i686-nacl-gcc + # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with + # compiler executables that are shell scripts (so the i686 'compiler' is not + # currently in goma). + toolchain_tuple = "x86_64-nacl" +} + +nacl_glibc_toolchain("x64") { + toolchain_package = "nacl_x86_glibc" + toolchain_revision = nacl_x86_glibc_rev + toolchain_tuple = "x86_64-nacl" +} + +nacl_glibc_toolchain("arm") { + toolchain_package = "nacl_arm_glibc" + toolchain_revision = nacl_arm_glibc_rev + toolchain_tuple = "arm-nacl" +} + +template("nacl_clang_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + + toolchain_package = "pnacl_newlib" + toolchain_revision = pnacl_newlib_rev + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + nacl_toolchain("clang_newlib_" + toolchain_cpu) { + cc = toolprefix + "clang" + toolsuffix + cxx = toolprefix + "clang++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + ld = cxx + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + toolchain_args = { + current_cpu = toolchain_cpu + is_clang = true + } + } +} + +template("nacl_irt_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + + toolchain_package = "pnacl_newlib" + toolchain_revision = pnacl_newlib_rev + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir) + + tls_edit_label = + "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)" + host_toolchain_out_dir = + rebase_path(get_label_info(tls_edit_label, "root_out_dir"), + root_build_dir) + tls_edit = "${host_toolchain_out_dir}/tls_edit" + + nacl_toolchain("irt_" + toolchain_cpu) { + cc = toolprefix + "clang" + toolsuffix + cxx = toolprefix + "clang++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + # Some IRT implementations (notably, Chromium's) contain C++ code, + # so we need to link w/ the C++ linker. + ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}" + + toolchain_args = { + current_cpu = toolchain_cpu + is_clang = true + + # Always build the IRT with full debugging symbols, regardless of + # how Chromium itself is being built (or other NaCl executables). + symbol_level = 2 + } + + # TODO(ncbray): depend on link script + deps = [ + tls_edit_label, + ] + } +} + +template("nacl_clang_toolchains") { + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + nacl_clang_toolchain(target_name) { + toolchain_tuple = invoker.toolchain_tuple + } + nacl_irt_toolchain(target_name) { + toolchain_tuple = invoker.toolchain_tuple + } +} + +nacl_clang_toolchains("x86") { + # Rely on :compiler_cpu_abi adding -m32. See nacl_x86_glibc above. + toolchain_tuple = "x86_64-nacl" +} + +nacl_clang_toolchains("x64") { + toolchain_tuple = "x86_64-nacl" +} + +nacl_clang_toolchains("arm") { + toolchain_tuple = "arm-nacl" +} + +nacl_clang_toolchains("mipsel") { + toolchain_tuple = "mipsel-nacl" +} diff --git a/build/toolchain/nacl_toolchain.gni b/build/toolchain/nacl_toolchain.gni new file mode 100644 index 00000000000..eb6ffcce0c0 --- /dev/null +++ b/build/toolchain/nacl_toolchain.gni @@ -0,0 +1,55 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") +import("//build/toolchain/gcc_toolchain.gni") + +# This template defines a NaCl toolchain. +# +# It requires the following variables specifying the executables to run: +# - cc +# - cxx +# - ar +# - ld + +template("nacl_toolchain") { + assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value") + assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value") + assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value") + assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value") + gcc_toolchain(target_name) { + if (defined(invoker.executable_extension)) { + executable_extension = invoker.executable_extension + } else { + executable_extension = ".nexe" + } + rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision + + forward_variables_from(invoker, + [ + "ar", + "cc", + "cxx", + "deps", + "ld", + "link_outputs", + "nm", + "readelf", + "strip", + ]) + + toolchain_args = { + # Use all values set on the invoker's toolchain_args. + forward_variables_from(invoker.toolchain_args, "*") + + current_os = "nacl" + + # We do not support component builds with the NaCl toolchains. + is_component_build = false + + # We do not support tcmalloc in the NaCl toolchains. + use_allocator = "none" + } + } +} diff --git a/build/toolchain/toolchain.gni b/build/toolchain/toolchain.gni new file mode 100644 index 00000000000..6fedfccdf4e --- /dev/null +++ b/build/toolchain/toolchain.gni @@ -0,0 +1,105 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Toolchain-related configuration that may be needed outside the context of the +# toolchain() rules themselves. + +import("//build/config/chrome_build.gni") + +declare_args() { + # Enable Link Time Optimization in optimized builds (output programs run + # faster, but linking is up to 5-20x slower). + # Note: use target_os == "linux" rather than is_linux so that it does not + # apply to host_toolchain when target_os="android". + allow_posix_link_time_opt = + target_os == "linux" && !is_chromeos && target_cpu == "x64" && + is_chrome_branded && is_official_build + + # Set to true to use lld, the LLVM linker. This flag may be used on Windows + # with the shipped LLVM toolchain, or on Linux with a self-built top-of-tree + # LLVM toolchain (see llvm_force_head_revision in + # build/config/compiler/BUILD.gn). + use_lld = is_win && host_os != "win" + + # If this is set to true, or if LLVM_FORCE_HEAD_REVISION is set to 1 + # in the environment, we use the revision in the llvm repo to determine + # the CLANG_REVISION to use, instead of the version hard-coded into + # //tools/clang/scripts/update.py. This should only be used in + # conjunction with setting LLVM_FORCE_HEAD_REVISION in the + # environment when `gclient runhooks` is run as well. + llvm_force_head_revision = false + + # Compile with Xcode version of clang instead of hermetic version shipped + # with the build. Used on iOS to ship official builds (as they are built + # with the version of clang shipped with Xcode). + use_xcode_clang = is_ios && is_official_build +} + +declare_args() { + if (is_clang) { + # Clang compiler version. Clang files are placed at version-dependent paths. + clang_version = "4.0.0" + } +} + +# Check target_os here instead of is_ios as this file is loaded for secondary +# toolchain (host toolchain in particular) but the argument is the same for +# all toolchains. +assert(!use_xcode_clang || target_os == "ios", + "Using Xcode's clang is only supported in iOS builds") + +# Subdirectory within root_out_dir for shared library files. +# TODO(agrieve): GYP sets this to "lib" for Linux & Android, but this won't work +# in GN until support for loadable_module() is added. +# See: https://codereview.chromium.org/1236503002/ +if (is_linux) { + shlib_subdir = "lib" +} else { + shlib_subdir = "." +} + +# Root out dir for shared library files. +root_shlib_dir = root_out_dir +if (shlib_subdir != ".") { + root_shlib_dir += "/$shlib_subdir" +} + +# Extension for shared library files (including leading dot). +if (is_mac || is_ios) { + shlib_extension = ".dylib" +} else if (is_android && is_component_build) { + # By appending .cr, we prevent name collisions with libraries already + # loaded by the Android zygote. + shlib_extension = ".cr.so" +} else if (is_posix) { + shlib_extension = ".so" +} else if (is_win) { + shlib_extension = ".dll" +} else { + assert(false, "Platform not supported") +} + +# Prefix for shared library files. +if (is_posix) { + shlib_prefix = "lib" +} else { + shlib_prefix = "" +} + +# While other "tool"s in a toolchain are specific to the target of that +# toolchain, the "stamp" and "copy" tools are really generic to the host; +# but each toolchain must define them separately. GN doesn't allow a +# template instantiation inside a toolchain definition, so some boilerplate +# has to be repeated in each toolchain to define these two tools. These +# four variables reduce the duplication in that boilerplate. +stamp_description = "STAMP {{output}}" +copy_description = "COPY {{source}} {{output}}" +if (host_os == "win") { + stamp_command = "$python_path gyp-win-tool stamp {{output}}" + copy_command = + "$python_path gyp-win-tool recursive-mirror {{source}} {{output}}" +} else { + stamp_command = "touch {{output}}" + copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})" +} diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn new file mode 100644 index 00000000000..0549a6b698d --- /dev/null +++ b/build/toolchain/win/BUILD.gn @@ -0,0 +1,431 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/visual_studio_version.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/toolchain.gni") + +# Should only be running on Windows. +assert(is_win) + +# Setup the Visual Studio state. +# +# Its arguments are the VS path and the compiler wrapper tool. It will write +# "environment.x86" and "environment.x64" to the build directory and return a +# list to us. +gyp_win_tool_path = + rebase_path("//tools/gyp/pylib/gyp/win_tool.py", root_build_dir) + +if (use_goma) { + goma_prefix = "$goma_dir/gomacc.exe " +} else { + goma_prefix = "" +} + +# Copy the VS runtime DLL for the default toolchain to the root build directory +# so things will run. +if (current_toolchain == default_toolchain) { + if (is_debug) { + configuration_name = "Debug" + } else { + configuration_name = "Release" + } + exec_script("../../vs_toolchain.py", + [ + "copy_dlls", + rebase_path(root_build_dir), + configuration_name, + target_cpu, + ]) +} + +# Parameters: +# environment: File name of environment file. +# +# You would also define a toolchain_args variable with at least these set: +# current_cpu: current_cpu to pass as a build arg +# current_os: current_os to pass as a build arg +template("msvc_toolchain") { + toolchain(target_name) { + # When invoking this toolchain not as the default one, these args will be + # passed to the build. They are ignored when this is the default toolchain. + assert(defined(invoker.toolchain_args)) + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + + # This value needs to be passed through unchanged. + host_toolchain = host_toolchain + + current_os = "win" + } + + # Make these apply to all tools below. + lib_switch = "" + lib_dir_switch = "/LIBPATH:" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + env = invoker.environment + + # When the invoker has explicitly overridden use_goma or cc_wrapper in the + # toolchain args, use those values, otherwise default to the global one. + # This works because the only reasonable override that toolchains might + # supply for these values are to force-disable them. + if (defined(toolchain_args.is_clang)) { + toolchain_uses_clang = toolchain_args.is_clang + } else { + toolchain_uses_clang = is_clang + } + + if (toolchain_uses_clang && host_os != "win") { + # This toolchain definition uses response files for compilations. GN uses + # the quoting rules of the host OS, while clang-cl always defaults to + # cmd.exe quoting rules for parsing response files. Tell clang-cl to use + # POSIX quoting rules, so it can understand what GN generates. + cl = "${invoker.cl} --rsp-quoting=posix" + } else { + cl = invoker.cl + } + + if (use_lld) { + if (host_os == "win") { + lld_link = "lld-link.exe" + } else { + lld_link = "lld-link" + } + prefix = rebase_path("$clang_base_path/bin", root_build_dir) + + # lld-link includes a replacement for lib.exe that can produce thin + # archives and understands bitcode (for lto builds). + lib = "$prefix/$lld_link /lib /llvmlibthin" + link = "$prefix/$lld_link" + } else { + lib = "lib.exe" + link = "link.exe" + } + + # If possible, pass system includes as flags to the compiler. When that's + # not possible, load a full environment file (containing %INCLUDE% and + # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just + # passing in a list of include directories isn't enough. + if (defined(invoker.sys_include_flags)) { + env_wrapper = "" + sys_include_flags = "${invoker.sys_include_flags} " # Note trailing space. + } else { + # clang-cl doesn't need this env hoop, so omit it there. + assert(!toolchain_uses_clang) + env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space. + sys_include_flags = "" + } + + tool("cc") { + rspfile = "{{output}}.rsp" + precompiled_header_type = "msvc" + pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb" + + # Label names may have spaces in them so the pdbname must be quoted. The + # source and output don't need to be quoted because GN knows they're a + # full file name and will quote automatically when necessary. + command = "$env_wrapper$cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd\"$pdbname\"" + depsformat = "msvc" + description = "CC {{output}}" + outputs = [ + "$object_subdir/{{source_name_part}}.obj", + ] + rspfile_content = "$sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}" + } + + tool("cxx") { + rspfile = "{{output}}.rsp" + precompiled_header_type = "msvc" + + # The PDB name needs to be different between C and C++ compiled files. + pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb" + + # See comment in CC tool about quoting. + command = "$env_wrapper$cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd\"$pdbname\"" + depsformat = "msvc" + description = "CXX {{output}}" + outputs = [ + "$object_subdir/{{source_name_part}}.obj", + ] + rspfile_content = "$sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}" + } + + tool("rc") { + command = "$python_path gyp-win-tool rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}" + outputs = [ + "$object_subdir/{{source_name_part}}.res", + ] + description = "RC {{output}}" + } + + tool("asm") { + if (toolchain_args.current_cpu == "x64") { + ml = "ml64.exe" + } else { + ml = "ml.exe" + } + command = "$python_path gyp-win-tool asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} /c /Fo{{output}} {{source}}" + description = "ASM {{output}}" + outputs = [ + "$object_subdir/{{source_name_part}}.obj", + ] + } + + tool("alink") { + rspfile = "{{output}}.rsp" + command = "$python_path gyp-win-tool link-wrapper $env False $lib /nologo {{arflags}} /OUT:{{output}} @$rspfile" + description = "LIB {{output}}" + outputs = [ + # Ignore {{output_extension}} and always use .lib, there's no reason to + # allow targets to override this extension on Windows. + "{{output_dir}}/{{target_output_name}}.lib", + ] + default_output_extension = ".lib" + default_output_dir = "{{target_out_dir}}" + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{inputs_newline}}" + } + + tool("solink") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # e.g. foo.dll + libname = "${dllname}.lib" # e.g. foo.dll.lib + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$python_path gyp-win-tool link-wrapper $env False $link /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK(DLL) {{output}}" + outputs = [ + dllname, + libname, + ] + link_output = libname + depend_output = libname + runtime_outputs = [ dllname ] + if (symbol_level != 0) { + outputs += [ pdbname ] + runtime_outputs += [ pdbname ] + } + + # Since the above commands only updates the .lib file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}" + } + + tool("solink_module") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # e.g. foo.dll + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$python_path gyp-win-tool link-wrapper $env False $link /nologo /DLL /OUT:$dllname /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK_MODULE(DLL) {{output}}" + outputs = [ + dllname, + ] + if (symbol_level != 0) { + outputs += [ pdbname ] + } + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}" + } + + tool("link") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$exename.pdb" + rspfile = "$exename.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$python_path gyp-win-tool link-wrapper $env False $link /nologo /OUT:$exename /PDB:$pdbname @$rspfile" + + default_output_extension = ".exe" + default_output_dir = "{{root_out_dir}}" + description = "LINK {{output}}" + outputs = [ + exename, + ] + if (symbol_level != 0) { + outputs += [ pdbname ] + } + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}" + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } + } +} + +if (is_clang) { + sys_include_prefix = "-imsvc" +} else { + # MSVC doesn't have the concept of system headers. + sys_include_prefix = "/I" +} + +if (host_os == "win") { + clang_cl = "clang-cl.exe" +} else { + clang_cl = "clang-cl" +} + +# 32-bit toolchains. Only define these when the target architecture is 32-bit +# since we don't do any 32-bit cross compiles when targeting 64-bit (the +# build does generate some 64-bit stuff from 32-bit target builds). +if (target_cpu == "x86") { + x86_toolchain_data = exec_script("setup_toolchain.py", + [ + visual_studio_path, + gyp_win_tool_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "x86", + "${sys_include_prefix}", + ], + "scope") + + msvc_toolchain("x86") { + environment = "environment.x86" + cl = "${goma_prefix}\"${x86_toolchain_data.vc_bin_dir}/cl.exe\"" + toolchain_args = { + current_cpu = "x86" + is_clang = false + } + } + + msvc_toolchain("clang_x86") { + environment = "environment.x86" + prefix = rebase_path("$clang_base_path/bin", root_build_dir) + cl = "${goma_prefix}$prefix/${clang_cl}" + sys_include_flags = "${x86_toolchain_data.include_flags}" + + toolchain_args = { + current_cpu = "x86" + is_clang = true + } + } +} + +# 64-bit toolchains. +x64_toolchain_data = exec_script("setup_toolchain.py", + [ + visual_studio_path, + gyp_win_tool_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "x64", + "${sys_include_prefix}", + ], + "scope") + +template("win_x64_toolchains") { + msvc_toolchain(target_name) { + environment = "environment.x64" + cl = "${goma_prefix}\"${x64_toolchain_data.vc_bin_dir}/cl.exe\"" + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = false + current_cpu = "x64" + } + } + + msvc_toolchain("clang_" + target_name) { + environment = "environment.x64" + prefix = rebase_path("$clang_base_path/bin", root_build_dir) + cl = "${goma_prefix}$prefix/${clang_cl}" + sys_include_flags = "${x64_toolchain_data.include_flags}" + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = true + current_cpu = "x64" + } + } +} + +win_x64_toolchains("x64") { + toolchain_args = { + # Use the defaults. + } +} + +# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain. +# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64). +# The only reason it's a separate toolchain is so that it can force +# is_component_build to false in the toolchain_args() block, because +# building nacl64.exe in component style does not work. +win_x64_toolchains("nacl_win64") { + toolchain_args = { + is_component_build = false + } +} + +# WinRT toolchains. Only define these when targeting them. +# +# NOTE: This is currently broken because it references vc_bin_dir. brettw@ +# changed this around a bit, and I don't know what this should be set to +# in terms of what setup_toolchain returns for a certain CPU architecture. +if (target_os == "winrt_81" || target_os == "winrt_81_phone" || + target_os == "winrt_10") { + msvc_toolchain("winrt_x86") { + environment = "environment.winrt_x86" + cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\"" + + toolchain_args = { + is_clang = false + current_cpu = "x86" + } + } + + msvc_toolchain("winrt_x64") { + environment = "environment.winrt_x64" + cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\"" + + toolchain_args = { + is_clang = false + current_cpu = "x64" + } + } +} diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni new file mode 100644 index 00000000000..ff5899c8c78 --- /dev/null +++ b/build/toolchain/win/midl.gni @@ -0,0 +1,104 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_win) + +import("//build/config/win/visual_studio_version.gni") + +# This template defines a rule to invoke the MS IDL compiler. The generated +# source code will be compiled and linked into targets that depend on this. +# +# Parameters +# +# sources +# List of .idl file to process. +# +# out_dir (optional) +# Directory to write the generated files to. Defaults to target_gen_dir. +# +# deps (optional) +# visibility (optional) + +template("midl") { + action_name = "${target_name}_idl_action" + source_set_name = target_name + + assert(defined(invoker.sources), "Source must be defined for $target_name") + + if (defined(invoker.out_dir)) { + out_dir = invoker.out_dir + } else { + out_dir = target_gen_dir + } + + header_file = "{{source_name_part}}.h" + dlldata_file = "{{source_name_part}}.dlldata.c" + interface_identifier_file = "{{source_name_part}}_i.c" + proxy_file = "{{source_name_part}}_p.c" + type_library_file = "{{source_name_part}}.tlb" + + action_foreach(action_name) { + visibility = [ ":$source_set_name" ] + + # This functionality is handled by the win-tool because the GYP build has + # MIDL support built-in. + # TODO(brettw) move this to a separate MIDL wrapper script for better + # clarity once GYP support is not needed. + script = "$root_build_dir/gyp-win-tool" + + sources = invoker.sources + + # Note that .tlb is not included in the outputs as it is not always + # generated depending on the content of the input idl file. + outputs = [ + "$out_dir/$header_file", + "$out_dir/$dlldata_file", + "$out_dir/$interface_identifier_file", + "$out_dir/$proxy_file", + ] + + if (current_cpu == "x86") { + win_tool_arch = "environment.x86" + idl_target_platform = "win32" + } else if (current_cpu == "x64") { + win_tool_arch = "environment.x64" + idl_target_platform = "x64" + } else { + assert(false, "Need environment for this arch") + } + + args = [ + "midl-wrapper", + win_tool_arch, + rebase_path(out_dir, root_build_dir), + type_library_file, + header_file, + dlldata_file, + interface_identifier_file, + proxy_file, + "{{source}}", + "/char", + "signed", + "/env", + idl_target_platform, + "/Oicf", + ] + + forward_variables_from(invoker, [ "deps" ]) + } + + source_set(target_name) { + forward_variables_from(invoker, [ "visibility" ]) + + # We only compile the IID files from the IDL tool rather than all outputs. + sources = process_file_template(invoker.sources, + [ "$out_dir/$interface_identifier_file" ]) + + public_deps = [ + ":$action_name", + ] + + configs += [ "//build/config/win:midl_warnings" ] + } +} diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py new file mode 100644 index 00000000000..0d0975dfe3a --- /dev/null +++ b/build/toolchain/win/setup_toolchain.py @@ -0,0 +1,232 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Copies the given "win tool" (which the toolchain uses to wrap compiler +# invocations) and the environment blocks for the 32-bit and 64-bit builds on +# Windows to the build directory. +# +# The arguments are the visual studio install location and the location of the +# win tool. The script assumes that the root build directory is the current dir +# and the files will be written to the current directory. + +import errno +import json +import os +import re +import subprocess +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) +import gn_helpers + +SCRIPT_DIR = os.path.dirname(__file__) + +def _ExtractImportantEnvironment(output_of_set): + """Extracts environment variables required for the toolchain to run from + a textual dump output by the cmd.exe 'set' command.""" + envvars_to_save = ( + 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma. + 'include', + 'lib', + 'libpath', + 'path', + 'pathext', + 'systemroot', + 'temp', + 'tmp', + ) + env = {} + # This occasionally happens and leads to misleading SYSTEMROOT error messages + # if not caught here. + if output_of_set.count('=') == 0: + raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set) + for line in output_of_set.splitlines(): + for envvar in envvars_to_save: + if re.match(envvar + '=', line.lower()): + var, setting = line.split('=', 1) + if envvar == 'path': + # Our own rules (for running gyp-win-tool) and other actions in + # Chromium rely on python being in the path. Add the path to this + # python here so that if it's not in the path when ninja is run + # later, python will still be found. + setting = os.path.dirname(sys.executable) + os.pathsep + setting + env[var.upper()] = setting + break + if sys.platform in ('win32', 'cygwin'): + for required in ('SYSTEMROOT', 'TEMP', 'TMP'): + if required not in env: + raise Exception('Environment variable "%s" ' + 'required to be set to valid path' % required) + return env + + +def _DetectVisualStudioPath(): + """Return path to the GYP_MSVS_VERSION of Visual Studio. + """ + + # Use the code in build/vs_toolchain.py to avoid duplicating code. + chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..')) + sys.path.append(os.path.join(chromium_dir, 'build')) + import vs_toolchain + return vs_toolchain.DetectVisualStudioPath() + + +def _LoadEnvFromBat(args): + """Given a bat command, runs it and returns env vars set by it.""" + args = args[:] + args.extend(('&&', 'set')) + popen = subprocess.Popen( + args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + variables, _ = popen.communicate() + if popen.returncode != 0: + raise Exception('"%s" failed with error %d' % (args, popen.returncode)) + return variables + + +def _LoadToolchainEnv(cpu, sdk_dir): + """Returns a dictionary with environment variables that must be set while + running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe).""" + # Check if we are running in the SDK command line environment and use + # the setup script from the SDK if so. |cpu| should be either + # 'x86' or 'x64'. + assert cpu in ('x86', 'x64') + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir: + # Load environment from json file. + env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu)) + env = json.load(open(env))['env'] + for k in env: + entries = [os.path.join(*([os.path.join(sdk_dir, 'bin')] + e)) + for e in env[k]] + # clang-cl wants INCLUDE to be ;-separated even on non-Windows, + # lld-link wants LIB to be ;-separated even on non-Windows. Path gets :. + # The separator for INCLUDE here must match the one used in main() below. + sep = os.pathsep if k == 'PATH' else ';' + env[k] = sep.join(entries) + # PATH is a bit of a special case, it's in addition to the current PATH. + env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH'] + # Augment with the current env to pick up TEMP and friends. + for k in os.environ: + if k not in env: + env[k] = os.environ[k] + + varlines = [] + for k in sorted(env.keys()): + varlines.append('%s=%s' % (str(k), str(env[k]))) + variables = '\n'.join(varlines) + + # Check that the json file contained the same environment as the .cmd file. + if sys.platform in ('win32', 'cygwin'): + script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd')) + assert _ExtractImportantEnvironment(variables) == \ + _ExtractImportantEnvironment(_LoadEnvFromBat([script, '/' + cpu])) + else: + if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ: + os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath() + # We only support x64-hosted tools. + script_path = os.path.normpath(os.path.join( + os.environ['GYP_MSVS_OVERRIDE_PATH'], + 'VC/vcvarsall.bat')) + if not os.path.exists(script_path): + raise Exception('%s is missing - make sure VC++ tools are installed.' % + script_path) + args = [script_path, 'amd64_x86' if cpu == 'x86' else 'amd64'] + variables = _LoadEnvFromBat(args) + return _ExtractImportantEnvironment(variables) + + +def _FormatAsEnvironmentBlock(envvar_dict): + """Format as an 'environment block' directly suitable for CreateProcess. + Briefly this is a list of key=value\0, terminated by an additional \0. See + CreateProcess documentation for more details.""" + block = '' + nul = '\0' + for key, value in envvar_dict.iteritems(): + block += key + '=' + value + nul + block += nul + return block + + +def _CopyTool(source_path): + """Copies the given tool to the current directory, including a warning not + to edit it.""" + with open(source_path) as source_file: + tool_source = source_file.readlines() + + # Add header and write it out to the current directory (which should be the + # root build dir). Don't write the file if a matching file already exists + # because that causes a cascade of unnecessary rebuilds. + match = False + contents = ''.join([tool_source[0], + '# Generated by setup_toolchain.py do not edit.\n'] + + tool_source[1:]) + out_path = 'gyp-win-tool' + try: + with open(out_path, 'rb') as read_tool_file: + existing_contents = read_tool_file.read() + if existing_contents == contents: + match = True + except: + pass + if not match: + with open(out_path, 'wb') as write_tool_file: + write_tool_file.write(contents) + + +def main(): + if len(sys.argv) != 7: + print('Usage setup_toolchain.py ' + ' ' + ' ') + sys.exit(2) + tool_source = sys.argv[2] + win_sdk_path = sys.argv[3] + runtime_dirs = sys.argv[4] + target_cpu = sys.argv[5] + include_prefix = sys.argv[6] + + _CopyTool(tool_source) + + cpus = ('x86', 'x64') + assert target_cpu in cpus + vc_bin_dir = '' + include = '' + + # TODO(scottmg|goma): Do we need an equivalent of + # ninja_use_custom_environment_files? + + for cpu in cpus: + # Extract environment variables for subprocesses. + env = _LoadToolchainEnv(cpu, win_sdk_path) + env['PATH'] = runtime_dirs + os.pathsep + env['PATH'] + + if cpu == target_cpu: + for path in env['PATH'].split(os.pathsep): + if os.path.exists(os.path.join(path, 'cl.exe')): + vc_bin_dir = os.path.realpath(path) + break + # The separator for INCLUDE here must match the one used in + # _LoadToolchainEnv() above. + include = [include_prefix + p for p in env['INCLUDE'].split(';') if p] + include = ' '.join(['"' + i.replace('"', r'\"') + '"' for i in include]) + + env_block = _FormatAsEnvironmentBlock(env) + with open('environment.' + cpu, 'wb') as f: + f.write(env_block) + + # Create a store app version of the environment. + if 'LIB' in env: + env['LIB'] = env['LIB'] .replace(r'\VC\LIB', r'\VC\LIB\STORE') + if 'LIBPATH' in env: + env['LIBPATH'] = env['LIBPATH'].replace(r'\VC\LIB', r'\VC\LIB\STORE') + env_block = _FormatAsEnvironmentBlock(env) + with open('environment.winrt_' + cpu, 'wb') as f: + f.write(env_block) + + assert vc_bin_dir + print 'vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir) + assert include + print 'include_flags = ' + gn_helpers.ToGNString(include) + +if __name__ == '__main__': + main() diff --git a/build/tree_truth.sh b/build/tree_truth.sh new file mode 100644 index 00000000000..617092dc8a4 --- /dev/null +++ b/build/tree_truth.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Script for printing recent commits in a buildbot run. + +# Return the sha1 of the given tag. If not present, return "". +# $1: path to repo +# $2: tag name +tt_sha1_for_tag() { + oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null) + if [ $? -eq 0 ] ; then + echo $oneline + fi +} + +# Return the sha1 of HEAD, or "" +# $1: path to repo +tt_sha1_for_head() { + ( cd $1 && git log HEAD -n1 --format='%H' | cat ) +} + +# For the given repo, set tag to HEAD. +# $1: path to repo +# $2: tag name +tt_tag_head() { + ( cd $1 && git tag -f $2 ) +} + +# For the given repo, delete the tag. +# $1: path to repo +# $2: tag name +tt_delete_tag() { + ( cd $1 && git tag -d $2 ) +} + +# For the given repo, set tag to "three commits ago" (for testing). +# $1: path to repo +# $2: tag name +tt_tag_three_ago() { + local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}') + ( cd $1 && git tag -f $2 $sh ) +} + +# List the commits between the given tag and HEAD. +# If the tag does not exist, only list the last few. +# If the tag is at HEAD, list nothing. +# Output format has distinct build steps for repos with changes. +# $1: path to repo +# $2: tag name +# $3: simple/short repo name to use for display +tt_list_commits() { + local tag_sha1=$(tt_sha1_for_tag $1 $2) + local head_sha1=$(tt_sha1_for_head $1) + local display_name=$(echo $3 | sed 's#/#_#g') + if [ "${tag_sha1}" = "${head_sha1}" ] ; then + return + fi + if [ "${tag_sha1}" = "" ] ; then + echo "@@@BUILD_STEP Recent commits in repo $display_name@@@" + echo "NOTE: git tag was not found so we have no baseline." + echo "Here are some recent commits, but they may not be new for this build." + ( cd $1 && git log -n 10 --stat | cat) + else + echo "@@@BUILD_STEP New commits in repo $display_name@@@" + ( cd $1 && git log -n 500 $2..HEAD --stat | cat) + fi +} + +# Clean out the tree truth tags in all repos. For testing. +tt_clean_all() { + for project in $@; do + tt_delete_tag $CHROME_SRC/../$project tree_truth + done +} + +# Print tree truth for all clank repos. +tt_print_all() { + for project in $@; do + local full_path=$CHROME_SRC/../$project + tt_list_commits $full_path tree_truth $project + tt_tag_head $full_path tree_truth + done +} + +# Print a summary of the last 10 commits for each repo. +tt_brief_summary() { + echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@" + for project in $@; do + echo $project: + local full_path=$CHROME_SRC/../$project + (cd $full_path && git log -n 10 --format=" %H %s %an, %ad" | cat) + echo "=================================================================" + done +} + +CHROME_SRC=$1 +shift +PROJECT_LIST=$@ +tt_brief_summary $PROJECT_LIST +tt_print_all $PROJECT_LIST diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh new file mode 100644 index 00000000000..fa2d1077af6 --- /dev/null +++ b/build/update-linux-sandbox.sh @@ -0,0 +1,76 @@ +#!/bin/sh + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +BUILDTYPE="${BUILDTYPE:-Debug}" +CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}" +CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}" +CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox" +CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox" +CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH") + +TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null) +if [ $? -ne 0 ]; then + echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}" + exit 1 +fi + +# Make sure the path is not on NFS. +if [ "${TARGET_DIR_TYPE}" = "6969" ]; then + echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!" + exit 1 +fi + +installsandbox() { + echo "(using sudo so you may be asked for your password)" + sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \ + "${CHROME_SANDBOX_INST_PATH}" && + sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" && + sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}" + return $? +} + +if [ ! -d "${CHROME_OUT_DIR}" ]; then + echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" " + echo "If you are building in Release mode" + exit 1 +fi + +if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then + echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}" + echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE= ${0}\" to override " + echo "after you build the chrome_sandbox target" + exit 1 +fi + +if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then + echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, " + echo "installing it now." + installsandbox +fi + +if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then + echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" + exit 1 +fi + +CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api) +INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api) + +if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then + echo "Your installed setuid sandbox is too old, installing it now." + if ! installsandbox; then + echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" + exit 1 + fi +else + echo "Your setuid sandbox is up to date" + if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then + echo -n "Make sure you have \"export " + echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" " + echo "somewhere in your .bashrc" + echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}" + fi +fi diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn new file mode 100644 index 00000000000..ad6f2ea7815 --- /dev/null +++ b/build/util/BUILD.gn @@ -0,0 +1,47 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +action("webkit_version") { + script = "version.py" + + lastchange_file = "LASTCHANGE.blink" + + template_file = "webkit_version.h.in" + inputs = [ + lastchange_file, + template_file, + ] + + output_file = "$target_gen_dir/webkit_version.h" + outputs = [ + output_file, + ] + + args = [ + "-f", + rebase_path(lastchange_file, root_build_dir), + rebase_path(template_file, root_build_dir), + rebase_path(output_file, root_build_dir), + ] +} + +action("chrome_version_json") { + script = "version.py" + _chrome_version_path = "//chrome/VERSION" + inputs = [ + _chrome_version_path, + ] + _output_file = "$root_gen_dir/CHROME_VERSION.json" + outputs = [ + _output_file, + ] + args = [ + "--file", + rebase_path(_chrome_version_path, root_build_dir), + "--template", + "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}", + "--output", + rebase_path(_output_file, root_build_dir), + ] +} diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE new file mode 100644 index 00000000000..9c0671487e5 --- /dev/null +++ b/build/util/LASTCHANGE @@ -0,0 +1 @@ +LASTCHANGE=b5f4f758e70e919abde72a1b0470b8ea55916aad diff --git a/build/util/LASTCHANGE.blink b/build/util/LASTCHANGE.blink new file mode 100644 index 00000000000..8a7464ba1c2 --- /dev/null +++ b/build/util/LASTCHANGE.blink @@ -0,0 +1 @@ +LASTCHANGE=5b2f167ccdaed3f1a90ec1084715fbb91b8d25b0 diff --git a/build/util/branding.gni b/build/util/branding.gni new file mode 100644 index 00000000000..c38d2a9fc72 --- /dev/null +++ b/build/util/branding.gni @@ -0,0 +1,39 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This exposes the Chrome branding as GN variables for use in build files. +# +# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively. +# However, it is far better to write an action to generate a file at +# build-time with the information you need. This allows better dependency +# checking and GN will run faster. +# +# These values should only be used if you REALLY need to depend on them at +# build-time, for example, in the computation of output file names. + +import("//build/config/chrome_build.gni") + +_branding_dictionary_template = "full_name = \"@PRODUCT_FULLNAME@\" " + + "short_name = \"@PRODUCT_SHORTNAME@\" " + + "bundle_id = \"@MAC_BUNDLE_ID@\" " + + "creator_code = \"@MAC_CREATOR_CODE@\" " + +_branding_file = "//chrome/app/theme/$branding_path_component/BRANDING" +_result = exec_script("version.py", + [ + "-f", + rebase_path(_branding_file, root_build_dir), + "-t", + _branding_dictionary_template, + ], + "scope", + [ _branding_file ]) + +chrome_product_full_name = _result.full_name +chrome_product_short_name = _result.short_name + +if (is_mac) { + chrome_mac_bundle_id = _result.bundle_id + chrome_mac_creator_code = _result.creator_code +} diff --git a/build/util/java_action.gni b/build/util/java_action.gni new file mode 100644 index 00000000000..d9ca472c150 --- /dev/null +++ b/build/util/java_action.gni @@ -0,0 +1,101 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +jarrunner = "//build/util/java_action.py" + +# Declare a target that runs a java command a single time. +# +# This target type allows you to run a java command a single time to produce +# one or more output files. If you want to run a java command for each of a +# set of input files, see "java_action_foreach". +# +# See "gn help action" for more information on how to use this target. This +# template is based on the "action" and supports the same variables. +template("java_action") { + assert(defined(invoker.script), + "Need script in $target_name listing the .jar file to run.") + assert(defined(invoker.outputs), + "Need outputs in $target_name listing the generated outputs.") + + jarscript = invoker.script + action(target_name) { + script = jarrunner + + inputs = [ + jarscript, + ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + + args = [ + "-jar", + rebase_path(jarscript, root_build_dir), + ] + if (defined(invoker.args)) { + args += invoker.args + } + + forward_variables_from(invoker, + [ + "console", + "data", + "data_deps", + "depfile", + "deps", + "outputs", + "sources", + "visibility", + ]) + } +} + +# Declare a target that runs a java command over a set of files. +# +# This target type allows you to run a java command once-per-file over a set of +# sources. If you want to run a java command once that takes many files as +# input, see "java_action". +# +# See "gn help action_foreach" for more information on how to use this target. +# This template is based on the "action_foreach" supports the same variables. +template("java_action_foreach") { + assert(defined(invoker.script), + "Need script in $target_name listing the .jar file to run.") + assert(defined(invoker.outputs), + "Need outputs in $target_name listing the generated outputs.") + assert(defined(invoker.sources), + "Need sources in $target_name listing the target inputs.") + + jarscript = invoker.script + action_foreach(target_name) { + script = jarrunner + + inputs = [ + jarscript, + ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + + args = [ + "-jar", + rebase_path(jarscript, root_build_dir), + ] + if (defined(invoker.args)) { + args += invoker.args + } + + forward_variables_from(invoker, + [ + "console", + "data", + "data_deps", + "depfile", + "deps", + "outputs", + "sources", + "visibility", + ]) + } +} diff --git a/build/util/java_action.py b/build/util/java_action.py new file mode 100644 index 00000000000..abf084cc943 --- /dev/null +++ b/build/util/java_action.py @@ -0,0 +1,82 @@ +#!/usr/bin/python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wrapper script to run java command as action with gn.""" + +import os +import subprocess +import sys + +EXIT_SUCCESS = 0 +EXIT_FAILURE = 1 + + +def IsExecutable(path): + """Returns whether file at |path| exists and is executable. + + Args: + path: absolute or relative path to test. + + Returns: + True if the file at |path| exists, False otherwise. + """ + return os.path.isfile(path) and os.access(path, os.X_OK) + + +def FindCommand(command): + """Looks up for |command| in PATH. + + Args: + command: name of the command to lookup, if command is a relative or + absolute path (i.e. contains some path separator) then only that + path will be tested. + + Returns: + Full path to command or None if the command was not found. + + On Windows, this respects the PATHEXT environment variable when the + command name does not have an extension. + """ + fpath, _ = os.path.split(command) + if fpath: + if IsExecutable(command): + return command + + if sys.platform == 'win32': + # On Windows, if the command does not have an extension, cmd.exe will + # try all extensions from PATHEXT when resolving the full path. + command, ext = os.path.splitext(command) + if not ext: + exts = os.environ['PATHEXT'].split(os.path.pathsep) + else: + exts = [ext] + else: + exts = [''] + + for path in os.environ['PATH'].split(os.path.pathsep): + for ext in exts: + path = os.path.join(path, command) + ext + if IsExecutable(path): + return path + + return None + + +def main(): + java_path = FindCommand('java') + if not java_path: + sys.stderr.write('java: command not found\n') + sys.exit(EXIT_FAILURE) + + args = sys.argv[1:] + if len(args) < 2 or args[0] != '-jar': + sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0]) + sys.exit(EXIT_FAILURE) + + return subprocess.check_call([java_path] + args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/util/lastchange.py b/build/util/lastchange.py new file mode 100644 index 00000000000..c81e0bd52ec --- /dev/null +++ b/build/util/lastchange.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +lastchange.py -- Chromium revision fetching utility. +""" + +import re +import optparse +import os +import subprocess +import sys + +_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL) + +class VersionInfo(object): + def __init__(self, url, revision): + self.url = url + self.revision = revision + + +def FetchSVNRevision(directory, svn_url_regex): + """ + Fetch the Subversion branch and revision for a given directory. + + Errors are swallowed. + + Returns: + A VersionInfo object or None on error. + """ + try: + proc = subprocess.Popen(['svn', 'info'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=directory, + shell=(sys.platform=='win32')) + except OSError: + # command is apparently either not installed or not executable. + return None + if not proc: + return None + + attrs = {} + for line in proc.stdout: + line = line.strip() + if not line: + continue + key, val = line.split(': ', 1) + attrs[key] = val + + try: + match = svn_url_regex.search(attrs['URL']) + if match: + url = match.group(2) + else: + url = '' + revision = attrs['Revision'] + except KeyError: + return None + + return VersionInfo(url, revision) + + +def RunGitCommand(directory, command): + """ + Launches git subcommand. + + Errors are swallowed. + + Returns: + A process object or None. + """ + command = ['git'] + command + # Force shell usage under cygwin. This is a workaround for + # mysterious loss of cwd while invoking cygwin's git. + # We can't just pass shell=True to Popen, as under win32 this will + # cause CMD to be used, while we explicitly want a cygwin shell. + if sys.platform == 'cygwin': + command = ['sh', '-c', ' '.join(command)] + try: + proc = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=directory, + shell=(sys.platform=='win32')) + return proc + except OSError: + return None + + +def FetchGitRevision(directory, hash_only): + """ + Fetch the Git hash for a given directory. + + Errors are swallowed. + + Returns: + A VersionInfo object or None on error. + """ + hsh = '' + git_args = ['log', '-1', '--format=%H'] + if hash_only: + git_args.append('--grep=^Cr-Commit-Position:') + proc = RunGitCommand(directory, git_args) + if proc: + output = proc.communicate()[0].strip() + if proc.returncode == 0 and output: + hsh = output + if not hsh: + return None + pos = '' + proc = RunGitCommand(directory, ['cat-file', 'commit', hsh]) + if proc: + output = proc.communicate()[0] + if proc.returncode == 0 and output: + for line in reversed(output.splitlines()): + if line.startswith('Cr-Commit-Position:'): + pos = line.rsplit()[-1].strip() + break + if hash_only or not pos: + return VersionInfo('git', hsh) + return VersionInfo('git', '%s-%s' % (hsh, pos)) + + +def FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper): + """ + Fetch the Subversion URL and revision through Git. + + Errors are swallowed. + + Returns: + A tuple containing the Subversion URL and revision. + """ + git_args = ['log', '-1', '--format=%b'] + if go_deeper: + git_args.append('--grep=git-svn-id') + proc = RunGitCommand(directory, git_args) + if proc: + output = proc.communicate()[0].strip() + if proc.returncode == 0 and output: + # Extract the latest SVN revision and the SVN URL. + # The target line is the last "git-svn-id: ..." line like this: + # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316.... + match = _GIT_SVN_ID_REGEX.search(output) + if match: + revision = match.group(2) + url_match = svn_url_regex.search(match.group(1)) + if url_match: + url = url_match.group(2) + else: + url = '' + return url, revision + return None, None + + +def FetchGitSVNRevision(directory, svn_url_regex, go_deeper): + """ + Fetch the Git-SVN identifier for the local tree. + + Errors are swallowed. + """ + url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper) + if url and revision: + return VersionInfo(url, revision) + return None + + +def FetchVersionInfo(default_lastchange, directory=None, + directory_regex_prior_to_src_url='chrome|blink|svn', + go_deeper=False, hash_only=False): + """ + Returns the last change (in the form of a branch, revision tuple), + from some appropriate revision control system. + """ + svn_url_regex = re.compile( + r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)') + + version_info = (FetchSVNRevision(directory, svn_url_regex) or + FetchGitSVNRevision(directory, svn_url_regex, go_deeper) or + FetchGitRevision(directory, hash_only)) + if not version_info: + if default_lastchange and os.path.exists(default_lastchange): + revision = open(default_lastchange, 'r').read().strip() + version_info = VersionInfo(None, revision) + else: + version_info = VersionInfo(None, None) + return version_info + +def GetHeaderGuard(path): + """ + Returns the header #define guard for the given file path. + This treats everything after the last instance of "src/" as being a + relevant part of the guard. If there is no "src/", then the entire path + is used. + """ + src_index = path.rfind('src/') + if src_index != -1: + guard = path[src_index + 4:] + else: + guard = path + guard = guard.upper() + return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_' + +def GetHeaderContents(path, define, version): + """ + Returns what the contents of the header file should be that indicate the given + revision. Note that the #define is specified as a string, even though it's + currently always a SVN revision number, in case we need to move to git hashes. + """ + header_guard = GetHeaderGuard(path) + + header_contents = """/* Generated by lastchange.py, do not edit.*/ + +#ifndef %(header_guard)s +#define %(header_guard)s + +#define %(define)s "%(version)s" + +#endif // %(header_guard)s +""" + header_contents = header_contents % { 'header_guard': header_guard, + 'define': define, + 'version': version } + return header_contents + +def WriteIfChanged(file_name, contents): + """ + Writes the specified contents to the specified file_name + iff the contents are different than the current contents. + """ + try: + old_contents = open(file_name, 'r').read() + except EnvironmentError: + pass + else: + if contents == old_contents: + return + os.unlink(file_name) + open(file_name, 'w').write(contents) + + +def main(argv=None): + if argv is None: + argv = sys.argv + + parser = optparse.OptionParser(usage="lastchange.py [options]") + parser.add_option("-d", "--default-lastchange", metavar="FILE", + help="Default last change input FILE.") + parser.add_option("-m", "--version-macro", + help="Name of C #define when using --header. Defaults to " + + "LAST_CHANGE.", + default="LAST_CHANGE") + parser.add_option("-o", "--output", metavar="FILE", + help="Write last change to FILE. " + + "Can be combined with --header to write both files.") + parser.add_option("", "--header", metavar="FILE", + help="Write last change to FILE as a C/C++ header. " + + "Can be combined with --output to write both files.") + parser.add_option("--revision-only", action='store_true', + help="Just print the SVN revision number. Overrides any " + + "file-output-related options.") + parser.add_option("-s", "--source-dir", metavar="DIR", + help="Use repository in the given directory.") + parser.add_option("--git-svn-go-deeper", action='store_true', + help="In a Git-SVN repo, dig down to the last committed " + + "SVN change (historic behaviour).") + parser.add_option("--git-hash-only", action="store_true", + help="In a Git repo with commit positions, report only " + + "the hash of the latest commit with a position.") + opts, args = parser.parse_args(argv[1:]) + + out_file = opts.output + header = opts.header + + while len(args) and out_file is None: + if out_file is None: + out_file = args.pop(0) + if args: + sys.stderr.write('Unexpected arguments: %r\n\n' % args) + parser.print_help() + sys.exit(2) + + if opts.source_dir: + src_dir = opts.source_dir + else: + src_dir = os.path.dirname(os.path.abspath(__file__)) + + version_info = FetchVersionInfo(opts.default_lastchange, + directory=src_dir, + go_deeper=opts.git_svn_go_deeper, + hash_only=opts.git_hash_only) + + if version_info.revision == None: + version_info.revision = '0' + + if opts.revision_only: + print version_info.revision + else: + contents = "LASTCHANGE=%s\n" % version_info.revision + if not out_file and not opts.header: + sys.stdout.write(contents) + else: + if out_file: + WriteIfChanged(out_file, contents) + if header: + WriteIfChanged(header, + GetHeaderContents(header, opts.version_macro, + version_info.revision)) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/util/lib/common/PRESUBMIT.py b/build/util/lib/common/PRESUBMIT.py new file mode 100644 index 00000000000..fca962f1caa --- /dev/null +++ b/build/util/lib/common/PRESUBMIT.py @@ -0,0 +1,16 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +def _RunTests(input_api, output_api): + return (input_api.canned_checks.RunUnitTestsInDirectory( + input_api, output_api, '.', whitelist=[r'.+_test.py$'])) + + +def CheckChangeOnUpload(input_api, output_api): + return _RunTests(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return _RunTests(input_api, output_api) diff --git a/build/util/lib/common/__init__.py b/build/util/lib/common/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py new file mode 100644 index 00000000000..67b550a46c0 --- /dev/null +++ b/build/util/lib/common/perf_result_data_type.py @@ -0,0 +1,20 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +DEFAULT = 'default' +UNIMPORTANT = 'unimportant' +HISTOGRAM = 'histogram' +UNIMPORTANT_HISTOGRAM = 'unimportant-histogram' +INFORMATIONAL = 'informational' + +ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM, + INFORMATIONAL] + + +def IsValidType(datatype): + return datatype in ALL_TYPES + + +def IsHistogram(datatype): + return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM) diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py new file mode 100644 index 00000000000..6cb058b2df3 --- /dev/null +++ b/build/util/lib/common/perf_tests_results_helper.py @@ -0,0 +1,166 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import re +import sys + +import json +import logging +import math + +import perf_result_data_type + + +# Mapping from result type to test output +RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ', + perf_result_data_type.DEFAULT: '*RESULT ', + perf_result_data_type.INFORMATIONAL: '', + perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ', + perf_result_data_type.HISTOGRAM: '*HISTOGRAM '} + + +def _EscapePerfResult(s): + """Escapes |s| for use in a perf result.""" + return re.sub('[\:|=/#&,]', '_', s) + + +def FlattenList(values): + """Returns a simple list without sub-lists.""" + ret = [] + for entry in values: + if isinstance(entry, list): + ret.extend(FlattenList(entry)) + else: + ret.append(entry) + return ret + + +def GeomMeanAndStdDevFromHistogram(histogram_json): + histogram = json.loads(histogram_json) + # Handle empty histograms gracefully. + if not 'buckets' in histogram: + return 0.0, 0.0 + count = 0 + sum_of_logs = 0 + for bucket in histogram['buckets']: + if 'high' in bucket: + bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0 + else: + bucket['mean'] = bucket['low'] + if bucket['mean'] > 0: + sum_of_logs += math.log(bucket['mean']) * bucket['count'] + count += bucket['count'] + + if count == 0: + return 0.0, 0.0 + + sum_of_squares = 0 + geom_mean = math.exp(sum_of_logs / count) + for bucket in histogram['buckets']: + if bucket['mean'] > 0: + sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count'] + return geom_mean, math.sqrt(sum_of_squares / count) + + +def _ValueToString(v): + # Special case for floats so we don't print using scientific notation. + if isinstance(v, float): + return '%f' % v + else: + return str(v) + + +def _MeanAndStdDevFromList(values): + avg = None + sd = None + if len(values) > 1: + try: + value = '[%s]' % ','.join([_ValueToString(v) for v in values]) + avg = sum([float(v) for v in values]) / len(values) + sqdiffs = [(float(v) - avg) ** 2 for v in values] + variance = sum(sqdiffs) / (len(values) - 1) + sd = math.sqrt(variance) + except ValueError: + value = ', '.join(values) + else: + value = values[0] + return value, avg, sd + + +def PrintPages(page_list): + """Prints list of pages to stdout in the format required by perf tests.""" + print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list]) + + +def PrintPerfResult(measurement, trace, values, units, + result_type=perf_result_data_type.DEFAULT, + print_to_stdout=True): + """Prints numerical data to stdout in the format required by perf tests. + + The string args may be empty but they must not contain any colons (:) or + equals signs (=). + This is parsed by the buildbot using: + http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py + + Args: + measurement: A description of the quantity being measured, e.g. "vm_peak". + On the dashboard, this maps to a particular graph. Mandatory. + trace: A description of the particular data point, e.g. "reference". + On the dashboard, this maps to a particular "line" in the graph. + Mandatory. + values: A list of numeric measured values. An N-dimensional list will be + flattened and treated as a simple list. + units: A description of the units of measure, e.g. "bytes". + result_type: Accepts values of perf_result_data_type.ALL_TYPES. + print_to_stdout: If True, prints the output in stdout instead of returning + the output to caller. + + Returns: + String of the formated perf result. + """ + assert perf_result_data_type.IsValidType(result_type), \ + 'result type: %s is invalid' % result_type + + trace_name = _EscapePerfResult(trace) + + if (result_type == perf_result_data_type.UNIMPORTANT or + result_type == perf_result_data_type.DEFAULT or + result_type == perf_result_data_type.INFORMATIONAL): + assert isinstance(values, list) + assert '/' not in measurement + flattened_values = FlattenList(values) + assert len(flattened_values) + value, avg, sd = _MeanAndStdDevFromList(flattened_values) + output = '%s%s: %s%s%s %s' % ( + RESULT_TYPES[result_type], + _EscapePerfResult(measurement), + trace_name, + # Do not show equal sign if the trace is empty. Usually it happens when + # measurement is enough clear to describe the result. + '= ' if trace_name else '', + value, + units) + else: + assert perf_result_data_type.IsHistogram(result_type) + assert isinstance(values, list) + # The histograms can only be printed individually, there's no computation + # across different histograms. + assert len(values) == 1 + value = values[0] + output = '%s%s: %s= %s %s' % ( + RESULT_TYPES[result_type], + _EscapePerfResult(measurement), + trace_name, + value, + units) + avg, sd = GeomMeanAndStdDevFromHistogram(value) + + if avg: + output += '\nAvg %s: %f%s' % (measurement, avg, units) + if sd: + output += '\nSd %s: %f%s' % (measurement, sd, units) + if print_to_stdout: + print output + sys.stdout.flush() + return output diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py new file mode 100644 index 00000000000..010252b28eb --- /dev/null +++ b/build/util/lib/common/unittest_util.py @@ -0,0 +1,149 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utilities for dealing with the python unittest module.""" + +import fnmatch +import sys +import unittest + + +class _TextTestResult(unittest._TextTestResult): + """A test result class that can print formatted text results to a stream. + + Results printed in conformance with gtest output format, like: + [ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc." + [ OK ] autofill.AutofillTest.testAutofillInvalid + [ RUN ] autofill.AutofillTest.testFillProfile: "test desc." + [ OK ] autofill.AutofillTest.testFillProfile + [ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test." + [ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters + """ + def __init__(self, stream, descriptions, verbosity): + unittest._TextTestResult.__init__(self, stream, descriptions, verbosity) + self._fails = set() + + def _GetTestURI(self, test): + return '%s.%s.%s' % (test.__class__.__module__, + test.__class__.__name__, + test._testMethodName) + + def getDescription(self, test): + return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription()) + + def startTest(self, test): + unittest.TestResult.startTest(self, test) + self.stream.writeln('[ RUN ] %s' % self.getDescription(test)) + + def addSuccess(self, test): + unittest.TestResult.addSuccess(self, test) + self.stream.writeln('[ OK ] %s' % self._GetTestURI(test)) + + def addError(self, test, err): + unittest.TestResult.addError(self, test, err) + self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test)) + self._fails.add(self._GetTestURI(test)) + + def addFailure(self, test, err): + unittest.TestResult.addFailure(self, test, err) + self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test)) + self._fails.add(self._GetTestURI(test)) + + def getRetestFilter(self): + return ':'.join(self._fails) + + +class TextTestRunner(unittest.TextTestRunner): + """Test Runner for displaying test results in textual format. + + Results are displayed in conformance with google test output. + """ + + def __init__(self, verbosity=1): + unittest.TextTestRunner.__init__(self, stream=sys.stderr, + verbosity=verbosity) + + def _makeResult(self): + return _TextTestResult(self.stream, self.descriptions, self.verbosity) + + +def GetTestsFromSuite(suite): + """Returns all the tests from a given test suite.""" + tests = [] + for x in suite: + if isinstance(x, unittest.TestSuite): + tests += GetTestsFromSuite(x) + else: + tests += [x] + return tests + + +def GetTestNamesFromSuite(suite): + """Returns a list of every test name in the given suite.""" + return map(lambda x: GetTestName(x), GetTestsFromSuite(suite)) + + +def GetTestName(test): + """Gets the test name of the given unittest test.""" + return '.'.join([test.__class__.__module__, + test.__class__.__name__, + test._testMethodName]) + + +def FilterTestSuite(suite, gtest_filter): + """Returns a new filtered tests suite based on the given gtest filter. + + See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md + for gtest_filter specification. + """ + return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter)) + + +def FilterTests(all_tests, gtest_filter): + """Filter a list of tests based on the given gtest filter. + + Args: + all_tests: List of tests (unittest.TestSuite) + gtest_filter: Filter to apply. + + Returns: + Filtered subset of the given list of tests. + """ + test_names = [GetTestName(test) for test in all_tests] + filtered_names = FilterTestNames(test_names, gtest_filter) + return [test for test in all_tests if GetTestName(test) in filtered_names] + + +def FilterTestNames(all_tests, gtest_filter): + """Filter a list of test names based on the given gtest filter. + + See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md + for gtest_filter specification. + + Args: + all_tests: List of test names. + gtest_filter: Filter to apply. + + Returns: + Filtered subset of the given list of test names. + """ + pattern_groups = gtest_filter.split('-') + positive_patterns = ['*'] + if pattern_groups[0]: + positive_patterns = pattern_groups[0].split(':') + negative_patterns = [] + if len(pattern_groups) > 1: + negative_patterns = pattern_groups[1].split(':') + + tests = [] + test_set = set() + for pattern in positive_patterns: + pattern_tests = [ + test for test in all_tests + if (fnmatch.fnmatch(test, pattern) + and not any(fnmatch.fnmatch(test, p) for p in negative_patterns) + and test not in test_set)] + tests.extend(pattern_tests) + test_set.update(pattern_tests) + return tests diff --git a/build/util/lib/common/unittest_util_test.py b/build/util/lib/common/unittest_util_test.py new file mode 100644 index 00000000000..1514c9b6d4c --- /dev/null +++ b/build/util/lib/common/unittest_util_test.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import logging +import sys +import unittest +import unittest_util + + +class FilterTestNamesTest(unittest.TestCase): + + possible_list = ["Foo.One", + "Foo.Two", + "Foo.Three", + "Bar.One", + "Bar.Two", + "Bar.Three", + "Quux.One", + "Quux.Two", + "Quux.Three"] + + def testMatchAll(self): + x = unittest_util.FilterTestNames(self.possible_list, "*") + self.assertEquals(x, self.possible_list) + + def testMatchPartial(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.*") + self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"]) + + def testMatchFull(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two") + self.assertEquals(x, ["Foo.Two"]) + + def testMatchTwo(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.One", + "Foo.Two", + "Foo.Three"]) + + def testMatchWithNegative(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Foo.One", + "Foo.Two"]) + + def testMatchOverlapping(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.Two", + "Quux.Two"]) + + +if __name__ == '__main__': + logging.getLogger().setLevel(logging.DEBUG) + unittest.main(verbosity=2) diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py new file mode 100644 index 00000000000..a415b1f534b --- /dev/null +++ b/build/util/lib/common/util.py @@ -0,0 +1,151 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generic utilities for all python scripts.""" + +import atexit +import httplib +import os +import signal +import stat +import subprocess +import sys +import tempfile +import urlparse + + +def GetPlatformName(): + """Return a string to be used in paths for the platform.""" + if IsWindows(): + return 'win' + if IsMac(): + return 'mac' + if IsLinux(): + return 'linux' + raise NotImplementedError('Unknown platform "%s".' % sys.platform) + + +def IsWindows(): + return sys.platform == 'cygwin' or sys.platform.startswith('win') + + +def IsLinux(): + return sys.platform.startswith('linux') + + +def IsMac(): + return sys.platform.startswith('darwin') + + +def _DeleteDir(path): + """Deletes a directory recursively, which must exist.""" + # Don't use shutil.rmtree because it can't delete read-only files on Win. + for root, dirs, files in os.walk(path, topdown=False): + for name in files: + filename = os.path.join(root, name) + os.chmod(filename, stat.S_IWRITE) + os.remove(filename) + for name in dirs: + os.rmdir(os.path.join(root, name)) + os.rmdir(path) + + +def Delete(path): + """Deletes the given file or directory (recursively), which must exist.""" + if os.path.isdir(path): + _DeleteDir(path) + else: + os.remove(path) + + +def MaybeDelete(path): + """Deletes the given file or directory (recurisvely), if it exists.""" + if os.path.exists(path): + Delete(path) + + +def MakeTempDir(parent_dir=None): + """Creates a temporary directory and returns an absolute path to it. + + The temporary directory is automatically deleted when the python interpreter + exits normally. + + Args: + parent_dir: the directory to create the temp dir in. If None, the system + temp dir is used. + + Returns: + The absolute path to the temporary directory. + """ + path = tempfile.mkdtemp(dir=parent_dir) + atexit.register(MaybeDelete, path) + return path + + +def Unzip(zip_path, output_dir): + """Unzips the given zip file using a system installed unzip tool. + + Args: + zip_path: zip file to unzip. + output_dir: directory to unzip the contents of the zip file. The directory + must exist. + + Raises: + RuntimeError if the unzip operation fails. + """ + if IsWindows(): + unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y'] + else: + unzip_cmd = ['unzip', '-o'] + unzip_cmd += [zip_path] + if RunCommand(unzip_cmd, output_dir) != 0: + raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir)) + + +def Kill(pid): + """Terminate the given pid.""" + if IsWindows(): + subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)]) + else: + os.kill(pid, signal.SIGTERM) + + +def RunCommand(cmd, cwd=None): + """Runs the given command and returns the exit code. + + Args: + cmd: list of command arguments. + cwd: working directory to execute the command, or None if the current + working directory should be used. + + Returns: + The exit code of the command. + """ + process = subprocess.Popen(cmd, cwd=cwd) + process.wait() + return process.returncode + + +def DoesUrlExist(url): + """Determines whether a resource exists at the given URL. + + Args: + url: URL to be verified. + + Returns: + True if url exists, otherwise False. + """ + parsed = urlparse.urlparse(url) + try: + conn = httplib.HTTPConnection(parsed.netloc) + conn.request('HEAD', parsed.path) + response = conn.getresponse() + except (socket.gaierror, socket.error): + return False + finally: + conn.close() + # Follow both permanent (301) and temporary (302) redirects. + if response.status == 302 or response.status == 301: + return DoesUrlExist(response.getheader('location')) + return response.status == 200 diff --git a/build/util/version.gni b/build/util/version.gni new file mode 100644 index 00000000000..dd28d7a4c6f --- /dev/null +++ b/build/util/version.gni @@ -0,0 +1,59 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This exposes the Chrome version as GN variables for use in build files. +# +# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively. +# However, it is far better to write an action (or use the process_version +# wrapper in chrome/version.gni) to generate a file at build-time with the +# information you need. This allows better dependency checking and GN will +# run faster. +# +# These values should only be used if you REALLY need to depend on them at +# build-time, for example, in the computation of output file names. + +# Give version.py a pattern that will expand to a GN scope consisting of +# all values we need at once. +_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " + + "major = \"@MAJOR@\" minor = \"@MINOR@\" " + + "build = \"@BUILD@\" patch = \"@PATCH@\" " + +# The file containing the Chrome version number. +chrome_version_file = "//chrome/VERSION" + +_result = exec_script("version.py", + [ + "-f", + rebase_path(chrome_version_file, root_build_dir), + "-t", + _version_dictionary_template, + ], + "scope", + [ chrome_version_file ]) + +# Full version. For example "45.0.12321.0" +chrome_version_full = _result.full + +# The consituent parts of the full version. +chrome_version_major = _result.major +chrome_version_minor = _result.minor +chrome_version_build = _result.build +chrome_version_patch = _result.patch + +if (is_mac) { + _result = exec_script("version.py", + [ + "-f", + rebase_path(chrome_version_file, root_build_dir), + "-t", + "@BUILD@.@PATCH_HI@.@PATCH_LO@", + "-e", + "PATCH_HI=int(PATCH)/256", + "-e", + "PATCH_LO=int(PATCH)%256", + ], + "trim string", + [ chrome_version_file ]) + chrome_dylib_version = _result +} diff --git a/build/util/version.gypi b/build/util/version.gypi new file mode 100644 index 00000000000..327a5c2e5f5 --- /dev/null +++ b/build/util/version.gypi @@ -0,0 +1,20 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + 'variables': { + 'version_py_path': '<(DEPTH)/build/util/version.py', + 'version_path': '<(DEPTH)/chrome/VERSION', + 'lastchange_path': '<(DEPTH)/build/util/LASTCHANGE', + }, + 'version_py_path': '<(version_py_path)', + 'version_path': '<(version_path)', + 'lastchange_path': '<(lastchange_path)', + 'version_full': + '= 0.01)): + if verbose: + print 'Copying %s to %s...' % (source, target) + if os.path.exists(target): + # Make the file writable so that we can delete it now. + os.chmod(target, stat.S_IWRITE) + os.unlink(target) + shutil.copy2(source, target) + # Make the file writable so that we can overwrite or delete it later. + os.chmod(target, stat.S_IWRITE) + + +def _CopyRuntime2013(target_dir, source_dir, dll_pattern): + """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't + exist, but the target directory does exist.""" + for file_part in ('p', 'r'): + dll = dll_pattern % file_part + target = os.path.join(target_dir, dll) + source = os.path.join(source_dir, dll) + _CopyRuntimeImpl(target, source) + + +def _CopyRuntime2015(target_dir, source_dir, dll_pattern, suffix): + """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't + exist, but the target directory does exist.""" + for file_part in ('msvcp', 'vccorlib', 'vcruntime'): + dll = dll_pattern % file_part + target = os.path.join(target_dir, dll) + source = os.path.join(source_dir, dll) + _CopyRuntimeImpl(target, source) + # OS installs of Visual Studio (and all installs of Windows 10) put the + # universal CRT files in c:\Windows\System32\downlevel - look for them there + # to support DEPOT_TOOLS_WIN_TOOLCHAIN=0. + if os.path.exists(os.path.join(source_dir, 'downlevel')): + ucrt_src_glob = os.path.join(source_dir, 'downlevel', 'api-ms-win-*.dll') + else: + ucrt_src_glob = os.path.join(source_dir, 'api-ms-win-*.dll') + ucrt_files = glob.glob(ucrt_src_glob) + assert len(ucrt_files) > 0 + for ucrt_src_file in ucrt_files: + file_part = os.path.basename(ucrt_src_file) + ucrt_dst_file = os.path.join(target_dir, file_part) + _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False) + _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix), + os.path.join(source_dir, 'ucrtbase' + suffix)) + + +def _CopyRuntime(target_dir, source_dir, target_cpu, debug): + """Copy the VS runtime DLLs, only if the target doesn't exist, but the target + directory does exist. Handles VS 2013 and VS 2015.""" + suffix = "d.dll" if debug else ".dll" + if GetVisualStudioVersion() == '2015': + _CopyRuntime2015(target_dir, source_dir, '%s140' + suffix, suffix) + else: + _CopyRuntime2013(target_dir, source_dir, 'msvc%s120' + suffix) + + # Copy the PGO runtime library to the release directories. + if not debug and os.environ.get('GYP_MSVS_OVERRIDE_PATH'): + pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'), + 'VC', 'bin') + pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64') + pgo_runtime_dll = 'pgort' + _VersionNumber() + '.dll' + if target_cpu == "x86": + source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll) + if os.path.exists(source_x86): + _CopyRuntimeImpl(os.path.join(target_dir, pgo_runtime_dll), source_x86) + elif target_cpu == "x64": + source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll) + if os.path.exists(source_x64): + _CopyRuntimeImpl(os.path.join(target_dir, pgo_runtime_dll), + source_x64) + else: + raise NotImplementedError("Unexpected target_cpu value:" + target_cpu) + + +def CopyVsRuntimeDlls(output_dir, runtime_dirs): + """Copies the VS runtime DLLs from the given |runtime_dirs| to the output + directory so that even if not system-installed, built binaries are likely to + be able to run. + + This needs to be run after gyp has been run so that the expected target + output directories are already created. + + This is used for the GYP build and gclient runhooks. + """ + x86, x64 = runtime_dirs + out_debug = os.path.join(output_dir, 'Debug') + out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64') + out_release = os.path.join(output_dir, 'Release') + out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64') + out_debug_x64 = os.path.join(output_dir, 'Debug_x64') + out_release_x64 = os.path.join(output_dir, 'Release_x64') + + if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64): + os.makedirs(out_debug_nacl64) + if os.path.exists(out_release) and not os.path.exists(out_release_nacl64): + os.makedirs(out_release_nacl64) + _CopyRuntime(out_debug, x86, "x86", debug=True) + _CopyRuntime(out_release, x86, "x86", debug=False) + _CopyRuntime(out_debug_x64, x64, "x64", debug=True) + _CopyRuntime(out_release_x64, x64, "x64", debug=False) + _CopyRuntime(out_debug_nacl64, x64, "x64", debug=True) + _CopyRuntime(out_release_nacl64, x64, "x64", debug=False) + + +def CopyDlls(target_dir, configuration, target_cpu): + """Copy the VS runtime DLLs into the requested directory as needed. + + configuration is one of 'Debug' or 'Release'. + target_cpu is one of 'x86' or 'x64'. + + The debug configuration gets both the debug and release DLLs; the + release config only the latter. + + This is used for the GN build. + """ + vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs() + if not vs_runtime_dll_dirs: + return + + x64_runtime, x86_runtime = vs_runtime_dll_dirs + runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False) + if configuration == 'Debug': + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True) + + +def _GetDesiredVsToolchainHashes(): + """Load a list of SHA1s corresponding to the toolchains that we want installed + to build with.""" + if GetVisualStudioVersion() == '2015': + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN_PRERELEASE', '0'))): + # Update 3 final with patches. + return ['d5dc33b15d1b2c086f2f6632e2fd15882f80dbd3'] + else: + # Update 2. + return ['95ddda401ec5678f15eeed01d2bee08fcbc5ee97'] + else: + return ['03a4e939cd325d6bc5216af41b92d02dda1366a6'] + + +def ShouldUpdateToolchain(): + """Check if the toolchain should be upgraded.""" + if not os.path.exists(json_data_file): + return True + with open(json_data_file, 'r') as tempf: + toolchain_data = json.load(tempf) + version = toolchain_data['version'] + env_version = GetVisualStudioVersion() + # If there's a mismatch between the version set in the environment and the one + # in the json file then the toolchain should be updated. + return version != env_version + + +def Update(force=False): + """Requests an update of the toolchain to the specific hashes we have at + this revision. The update outputs a .json of the various configuration + information required to pass to gyp which we use in |GetToolchainDir()|. + """ + if force != False and force != '--force': + print >>sys.stderr, 'Unknown parameter "%s"' % force + return 1 + if force == '--force' or os.path.exists(json_data_file): + force = True + + depot_tools_win_toolchain = \ + bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) + if ((sys.platform in ('win32', 'cygwin') or force) and + depot_tools_win_toolchain): + import find_depot_tools + depot_tools_path = find_depot_tools.add_depot_tools_to_path() + # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit + # in the correct directory. + os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() + get_toolchain_args = [ + sys.executable, + os.path.join(depot_tools_path, + 'win_toolchain', + 'get_toolchain_if_necessary.py'), + '--output-json', json_data_file, + ] + _GetDesiredVsToolchainHashes() + if force: + get_toolchain_args.append('--force') + subprocess.check_call(get_toolchain_args) + + return 0 + + +def NormalizePath(path): + while path.endswith("\\"): + path = path[:-1] + return path + + +def GetToolchainDir(): + """Gets location information about the current toolchain (must have been + previously updated by 'update'). This is used for the GN build.""" + runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs() + + # If WINDOWSSDKDIR is not set, search the default SDK path and set it. + if not 'WINDOWSSDKDIR' in os.environ: + default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\10' + if os.path.isdir(default_sdk_path): + os.environ['WINDOWSSDKDIR'] = default_sdk_path + + print '''vs_path = "%s" +sdk_path = "%s" +vs_version = "%s" +wdk_dir = "%s" +runtime_dirs = "%s" +''' % ( + NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH']), + NormalizePath(os.environ['WINDOWSSDKDIR']), + GetVisualStudioVersion(), + NormalizePath(os.environ.get('WDK_DIR', '')), + os.path.pathsep.join(runtime_dll_dirs or ['None'])) + + +def main(): + commands = { + 'update': Update, + 'get_toolchain_dir': GetToolchainDir, + 'copy_dlls': CopyDlls, + } + if len(sys.argv) < 2 or sys.argv[1] not in commands: + print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands) + return 1 + return commands[sys.argv[1]](*sys.argv[2:]) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt new file mode 100644 index 00000000000..022620424e1 --- /dev/null +++ b/build/whitespace_file.txt @@ -0,0 +1,167 @@ +Copyright 2014 The Chromium Authors. All rights reserved. +Use of this useless file is governed by a BSD-style license that can be +found in the LICENSE file. + + +This file is used for making non-code changes to trigger buildbot cycles. Make +any modification below this line. + +====================================================================== + +Let's make a story. Add zero+ sentences for every commit: + +CHÄPTER 1: +It was a dark and blinky night; the rain fell in torrents -- except at +occasional intervals, when it was checked by a violent gust of wind which +swept up the streets (for it is in London that our scene lies), rattling along +the housetops, and fiercely agitating the scanty flame of the lamps that +struggled against the elements. A hooded figure emerged. + +It was a Domo-Kun. + +"What took you so long?", inquired his wife. + +Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the +waffles you brought him?" "You know him, he's not one to forego a waffle, +no matter how burnt," he snickered. + +The pause was filled with the sound of compile errors. + +CHAPTER 2: +The jelly was as dark as night, and just as runny. +The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles +with his fork, watching the runny jelly spread and pool across his plate, +like the blood of a dying fawn. "It reminds me of that time --" he started, as +his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of +images coming from the past flowed through his mind. + +"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly +overhead, barely disturbing the thick cigarette smoke. No doubt was left about +when the fan was last cleaned. + +There was a poignant pause. + +CHAPTER 3: +Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he +began feeling sick. He thought out loud to himself, "No, he wouldn't have done +that to me." He considered that perhaps he shouldn't have pushed so hard. +Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable +horror that had occurred just the week before. + +Next time, there won't be any sushi. Why sushi with waffles anyway? It's like +adorning breakfast cereal with halibut -- shameful. + +CHAPTER 4: +The taste of stale sushi in his mouth the next morning was unbearable. He +wondered where the sushi came from as he attempted to wash the taste away with +a bottle of 3000¥ sake. He tries to recall the cook's face. Purple? Probably. + +CHAPTER 5: +Many tears later, Mr. Usagi would laugh at the memory of the earnest, +well-intentioned Domo-Kun. Another day in the life. That is when he realized that +life goes on. + +TRUISMS (1978-1983) +JENNY HOLZER +A LITTLE KNOWLEDGE CAN GO A LONG WAY +A LOT OF PROFESSIONALS ARE CRACKPOTS +A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER +A NAME MEANS A LOT JUST BY ITSELF +A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD +A RELAXED MAN IS NOT NECESSARILY A BETTER MAN +NO ONE SHOULD EVER USE SVN +AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS +IT IS MANS FATE TO OUTSMART HIMSELF +BEING SURE OF YOURSELF MEANS YOU'RE A FOOL +AM NOT +ARE TOO +IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY +AND THEN, TIME LEAPT BACKWARDS +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT +I'm really tempted to change something above the line. +Reeccciiiipppppeeeeeesssssss!!!!!!!!! +PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION. +WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY + +I can feel the heat closing in, feel them out there making their moves... +What could possibly go wrong? We've already ate our cake. + +Stand Still. Pause Clocks. We can make the World Stop. +WUBWUBWUBWUBWUB + +I want a 1917 build and you will give me what I want. + +This sentence is false. + +Beauty is in the eyes of a Beholder. + +I'm the best at space. + +The first time Yossarian saw the chaplain, he fell madly in love with him. +* +* +* +Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for +the time it did me. There is a wisdom that is woe; but there is a woe that is +madness. And there is a Catskill eagle in some souls that can alike dive down +into the blackest gorges, and soar out of them again and become invisible in +the sunny spaces. And even if he for ever flies within the gorge, that gorge +is in the mountains; so that even in his lowest swoop the mountain eagle is +still higher than other birds upon the plain, even though they soar. +* +* +* + +I'm here to commit lines and drop rhymes +* +This is a line to test and try uploading a cl. + +And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on +the Android bots, and it was good. Except on one bot, where it was bad. And +lo, the change was reverted, and GCC went back to 4.6, where code is slower +and less optimized. And verily did it break the build, because artifacts had +been created with 4.8, and alignment was no longer the same, and a great +sadness descended upon the Android GN buildbot, and it did refuseth to build +any more. But the sheriffs thought to themselves: Placebo! Let us clobber the +bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many +seasons. And so they modified the whitespace file with these immortal lines, +and visited it upon the bots, that great destruction might be wrought upon +their outdated binaries. In clobberus, veritas. + +As the git approaches, light begins to shine through the SCM thrice again... +However, the git, is, after all, quite stupid. + +Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. + +A herd of wild gits appears! Time for CQ :D +And one more for sizes.py... + +What's an overmarketed dietary supplement expressing sadness, relief, +tiredness, or a similar feeling.? Ah-Sigh-ee. + +It was love at first sight. The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him. + +Cool whitespace change for git-cl land + +Oh god the bots are red! I'm blind! Mmmm, cronuts. + +If you stand on your head, you will get footprints in your hair. + +sigh +sigher +pick up cls + +In the BUILD we trust. +^_^ + +In the masters we don't. +In the tryservers, we don't either. +In the CQ sometimes. +Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR) +My sandwiches are like my children: I love them all. +No, really, I couldn't eat another bit. +When I hunger I think of you, and a pastrami sandwich. +Don't make a terrible mistake! + +We probably should actually make a chrome chromebook. + diff --git a/build/win/BUILD.gn b/build/win/BUILD.gn new file mode 100644 index 00000000000..466e7ee84bf --- /dev/null +++ b/build/win/BUILD.gn @@ -0,0 +1,16 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/win/manifest.gni") + +# Depending on this target will cause the manifests for Chrome's default +# Windows and common control compatibility and elevation for executables. +windows_manifest("default_exe_manifest") { + sources = [ + as_invoker_manifest, + common_controls_manifest, + default_compatibility_manifest, + ] + type = "exe" +} diff --git a/build/win/as_invoker.manifest b/build/win/as_invoker.manifest new file mode 100644 index 00000000000..df046fdf684 --- /dev/null +++ b/build/win/as_invoker.manifest @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/build/win/asan.gyp b/build/win/asan.gyp new file mode 100644 index 00000000000..d938426e136 --- /dev/null +++ b/build/win/asan.gyp @@ -0,0 +1,30 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'asan_dynamic_runtime', + 'type': 'none', + 'variables': { + # Every target is going to depend on asan_dynamic_runtime, so allow + # this one to depend on itself. + 'prune_self_dependency': 1, + }, + 'conditions': [ + ['OS=="win"', { + 'copies': [ + { + 'destination': '<(PRODUCT_DIR)', + 'files': [ + # Path is relative to this GYP file. + '<(DEPTH)/<(make_clang_dir)/lib/clang/ + + + + + + + diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest new file mode 100644 index 00000000000..10d10da3826 --- /dev/null +++ b/build/win/compatibility.manifest @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/build/win/copy_cdb_to_output.py b/build/win/copy_cdb_to_output.py new file mode 100644 index 00000000000..ea6ab3f4b30 --- /dev/null +++ b/build/win/copy_cdb_to_output.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import hashlib +import os +import shutil +import sys + +script_dir = os.path.dirname(os.path.realpath(__file__)) +src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir)) +sys.path.insert(0, src_build_dir) + +import vs_toolchain + + +def _HexDigest(file_name): + hasher = hashlib.sha256() + afile = open(file_name, 'rb') + blocksize = 65536 + buf = afile.read(blocksize) + while len(buf) > 0: + hasher.update(buf) + buf = afile.read(blocksize) + afile.close() + return hasher.hexdigest() + + +def _CopyImpl(file_name, target_dir, source_dir, verbose=False): + """Copy |source| to |target| if it doesn't already exist or if it + needs to be updated. + """ + target = os.path.join(target_dir, file_name) + source = os.path.join(source_dir, file_name) + if (os.path.isdir(os.path.dirname(target)) and + ((not os.path.isfile(target)) or + _HexDigest(source) != _HexDigest(target))): + if verbose: + print 'Copying %s to %s...' % (source, target) + if os.path.exists(target): + os.unlink(target) + shutil.copy(source, target) + + +def _ConditionalMkdir(output_dir): + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + + +def _CopyCDBToOutput(output_dir, target_arch): + """Copies the Windows debugging executable cdb.exe to the output + directory, which is created if it does not exist. The output + directory, and target architecture that should be copied, are + passed. Supported values for the target architecture are the GYP + values "ia32" and "x64" and the GN values "x86" and "x64". + """ + _ConditionalMkdir(output_dir) + vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() + # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case + # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run. + win_sdk_dir = os.path.normpath( + os.environ.get('WINDOWSSDKDIR', + 'C:\\Program Files (x86)\\Windows Kits\\10')) + if target_arch == 'ia32' or target_arch == 'x86': + src_arch = 'x86' + elif target_arch == 'x64': + src_arch = 'x64' + else: + print 'copy_cdb_to_output.py: unknown target_arch %s' % target_arch + sys.exit(1) + # We need to copy multiple files, so cache the computed source directory. + src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch) + # We need to copy some helper DLLs to get access to the !uniqstack + # command to dump all threads' stacks. + src_winext_dir = os.path.join(src_dir, 'winext') + dst_winext_dir = os.path.join(output_dir, 'winext') + src_winxp_dir = os.path.join(src_dir, 'winxp') + dst_winxp_dir = os.path.join(output_dir, 'winxp') + _ConditionalMkdir(dst_winext_dir) + _ConditionalMkdir(dst_winxp_dir) + # Note that the outputs from the "copy_cdb_to_output" target need to + # be kept in sync with this list. + _CopyImpl('cdb.exe', output_dir, src_dir) + _CopyImpl('dbgeng.dll', output_dir, src_dir) + _CopyImpl('dbghelp.dll', output_dir, src_dir) + _CopyImpl('dbgmodel.dll', output_dir, src_dir) + _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir) + _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir) + _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir) + _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir) + return 0 + + +def main(): + if len(sys.argv) < 2: + print >>sys.stderr, 'Usage: copy_cdb_to_output.py ' + \ + '' + return 1 + return _CopyCDBToOutput(sys.argv[1], sys.argv[2]) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/win/message_compiler.gni b/build/win/message_compiler.gni new file mode 100644 index 00000000000..814eb6521e1 --- /dev/null +++ b/build/win/message_compiler.gni @@ -0,0 +1,89 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_win, "This only runs on Windows.") + +# Runs mc.exe over a list of sources. The outputs (a header and rc file) are +# placed in the target gen dir, and compiled. +# +# sources +# List of message files to process. +# +# user_mode_logging (optional bool) +# Generates user-mode logging code. Defaults to false (no logging code). +# +# compile_generated_code (optional, deafults = true) +# If unset or true, the generated code will be compiled and linked into +# targets that depend on it. If set to false, the .h and .rc files will only +# be generated. +# +# deps, public_deps, visibility +# Normal meaning. +template("message_compiler") { + if (defined(invoker.compile_generated_code) && + !invoker.compile_generated_code) { + compile_generated_code = false + action_name = target_name + } else { + compile_generated_code = true + action_name = "${target_name}_mc" + source_set_name = target_name + } + + action_foreach(action_name) { + if (compile_generated_code) { + visibility = [ ":$source_set_name" ] + } else { + forward_variables_from(invoker, [ "visibility" ]) + } + + script = "//build/win/message_compiler.py" + + outputs = [ + "$target_gen_dir/{{source_name_part}}.h", + "$target_gen_dir/{{source_name_part}}.rc", + ] + + args = [ + # The first argument is the environment file saved to the build + # directory. This is required because the Windows toolchain setup saves + # the VC paths and such so that running "mc.exe" will work with the + # configured toolchain. This file is in the root build dir. + "environment.$current_cpu", + + # Where to put the header. + "-h", + rebase_path(target_gen_dir, root_build_dir), + + # Where to put the .rc file. + "-r", + rebase_path(target_gen_dir, root_build_dir), + + # Input is Unicode. + "-u", + ] + if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) { + args += [ "-um" ] + } + args += [ "{{source}}" ] + + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "sources", + ]) + } + + if (compile_generated_code) { + # Compile the generated rc file. + source_set(source_set_name) { + forward_variables_from(invoker, [ "visibility" ]) + sources = get_target_outputs(":$action_name") + deps = [ + ":$action_name", + ] + } + } +} diff --git a/build/win/message_compiler.py b/build/win/message_compiler.py new file mode 100644 index 00000000000..86aba4f9300 --- /dev/null +++ b/build/win/message_compiler.py @@ -0,0 +1,28 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Runs the Microsoft Message Compiler (mc.exe). This Python adapter is for the +# GN build, which can only run Python and not native binaries. +# +# Usage: message_compiler.py [*] + +import subprocess +import sys + +# Read the environment block from the file. This is stored in the format used +# by CreateProcess. Drop last 2 NULs, one for list terminator, one for trailing +# vs. separator. +env_pairs = open(sys.argv[1]).read()[:-2].split('\0') +env_dict = dict([item.split('=', 1) for item in env_pairs]) + +# mc writes to stderr, so this explicitly redirects to stdout and eats it. +try: + # This needs shell=True to search the path in env_dict for the mc executable. + subprocess.check_output(["mc.exe"] + sys.argv[2:], + env=env_dict, + stderr=subprocess.STDOUT, + shell=True) +except subprocess.CalledProcessError as e: + print e.output + sys.exit(e.returncode) diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py new file mode 100644 index 00000000000..822354dab6a --- /dev/null +++ b/build/win/reorder-imports.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import glob +import optparse +import os +import shutil +import subprocess +import sys + +def reorder_imports(input_dir, output_dir, architecture): + """Run swapimports.exe on the initial chrome.exe, and write to the output + directory. Also copy over any related files that might be needed + (pdbs, manifests etc.). + """ + + input_image = os.path.join(input_dir, 'nw.exe') + output_image = os.path.join(output_dir, 'nw.exe') + + swap_exe = os.path.join( + __file__, + '..\\..\\..\\third_party\\syzygy\\binaries\\exe\\swapimport.exe') + + args = [swap_exe, '--input-image=%s' % input_image, + '--output-image=%s' % output_image, '--overwrite', '--no-logo'] + + if architecture == 'x64': + args.append('--x64'); + + args.append('nw_elf.dll'); + + subprocess.check_call(args) + + for fname in glob.iglob(os.path.join(input_dir, 'nw.exe.*')): + shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname))) + return 0 + + +def main(argv): + usage = 'reorder_imports.py -i -o -a ' + parser = optparse.OptionParser(usage=usage) + parser.add_option('-i', '--input', help='reorder nw.exe in DIR', + metavar='DIR') + parser.add_option('-o', '--output', help='write new nw.exe to DIR', + metavar='DIR') + parser.add_option('-a', '--arch', help='architecture of build (optional)', + default='ia32') + opts, args = parser.parse_args() + + if not opts.input or not opts.output: + parser.error('Please provide and input and output directory') + return reorder_imports(opts.input, opts.output, opts.arch) + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/build/win/require_administrator.manifest b/build/win/require_administrator.manifest new file mode 100644 index 00000000000..4142e733424 --- /dev/null +++ b/build/win/require_administrator.manifest @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/build/win/syzygy/BUILD.gn b/build/win/syzygy/BUILD.gn new file mode 100644 index 00000000000..1d26b5a6e7c --- /dev/null +++ b/build/win/syzygy/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +copy("copy_syzyasan_binaries") { + visibility = [ + "//chrome/*", + "//content/*", + ] + + source_dir = "//third_party/syzygy/binaries/exe" + + sources = [ + "$source_dir/agent_logger.exe", + "$source_dir/minidump_symbolizer.py", + "$source_dir/syzyasan_rtl.dll", + "$source_dir/syzyasan_rtl.dll.pdb", + ] + + outputs = [ + "$root_out_dir/syzygy/{{source_file_part}}", + ] +} diff --git a/build/win/syzygy/OWNERS b/build/win/syzygy/OWNERS new file mode 100644 index 00000000000..eb5a1c7c7ef --- /dev/null +++ b/build/win/syzygy/OWNERS @@ -0,0 +1,3 @@ +chrisha@chromium.org +sebmarchand@chromiun.org +siggi@chromium.org diff --git a/build/win/syzygy/instrument.py b/build/win/syzygy/instrument.py new file mode 100644 index 00000000000..e103facf0dd --- /dev/null +++ b/build/win/syzygy/instrument.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A utility script to help building Syzygy-instrumented Chrome binaries.""" + +import glob +import logging +import optparse +import os +import shutil +import subprocess +import sys + + +# The default directory containing the Syzygy toolchain. +_DEFAULT_SYZYGY_DIR = os.path.abspath(os.path.join( + os.path.dirname(__file__), '../../..', + 'third_party/syzygy/binaries/exe/')) + +# Basenames of various tools. +_INSTRUMENT_EXE = 'instrument.exe' +_GENFILTER_EXE = 'genfilter.exe' + +_LOGGER = logging.getLogger() + + +def _Shell(*cmd, **kw): + """Shells out to "cmd". Returns a tuple of cmd's stdout, stderr.""" + _LOGGER.info('Running command "%s".', cmd) + prog = subprocess.Popen(cmd, **kw) + + stdout, stderr = prog.communicate() + if prog.returncode != 0: + raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode)) + + return stdout, stderr + + +def _CompileFilter(syzygy_dir, executable, symbol, filter_file, + output_filter_file): + """Compiles the provided filter writing the compiled filter file to + output_filter_file. + """ + cmd = [os.path.abspath(os.path.join(syzygy_dir, _GENFILTER_EXE)), + '--action=compile', + '--input-image=%s' % executable, + '--input-pdb=%s' % symbol, + '--output-file=%s' % output_filter_file, + '--overwrite', + os.path.abspath(filter_file)] + + _Shell(*cmd) + if not os.path.exists(output_filter_file): + raise RuntimeError('Compiled filter file missing: %s' % output_filter_file) + return + + +def _InstrumentBinary(syzygy_dir, mode, executable, symbol, dst_dir, + filter_file, allocation_filter_file): + """Instruments the executable found in input_dir, and writes the resultant + instrumented executable and symbol files to dst_dir. + """ + cmd = [os.path.abspath(os.path.join(syzygy_dir, _INSTRUMENT_EXE)), + '--overwrite', + '--mode=%s' % mode, + '--debug-friendly', + '--input-image=%s' % executable, + '--input-pdb=%s' % symbol, + '--output-image=%s' % os.path.abspath( + os.path.join(dst_dir, os.path.basename(executable))), + '--output-pdb=%s' % os.path.abspath( + os.path.join(dst_dir, os.path.basename(symbol)))] + + if mode == "asan": + cmd.append('--no-augment-pdb') + # Disable some of the new SysyASAN features. We're seeing an increase in + # crash rates and are wondering if they are to blame. + cmd.append( + '--asan-rtl-options="--enable_feature_randomization ' + '--prevent_duplicate_corruption_crashes"') + + # If any filters were specified then pass them on to the instrumenter. + if filter_file: + cmd.append('--filter=%s' % os.path.abspath(filter_file)) + if allocation_filter_file: + cmd.append('--allocation-filter-config-file=%s' % + os.path.abspath(allocation_filter_file)) + + return _Shell(*cmd) + + +def main(options): + # Make sure the destination directory exists. + if not os.path.isdir(options.destination_dir): + _LOGGER.info('Creating destination directory "%s".', + options.destination_dir) + os.makedirs(options.destination_dir) + + # Compile the filter if one was provided. + if options.filter: + _CompileFilter(options.syzygy_dir, + options.input_executable, + options.input_symbol, + options.filter, + options.output_filter_file) + + # Instruments the binaries into the destination directory. + _InstrumentBinary(options.syzygy_dir, + options.mode, + options.input_executable, + options.input_symbol, + options.destination_dir, + options.output_filter_file, + options.allocation_filter_file) + + +def _ParseOptions(): + option_parser = optparse.OptionParser() + option_parser.add_option('--input_executable', + help='The path to the input executable.') + option_parser.add_option('--input_symbol', + help='The path to the input symbol file.') + option_parser.add_option('--mode', + help='Specifies which instrumentation mode is to be used.') + option_parser.add_option('--syzygy-dir', default=_DEFAULT_SYZYGY_DIR, + help='Instrumenter executable to use, defaults to "%default".') + option_parser.add_option('-d', '--destination_dir', + help='Destination directory for instrumented files.') + option_parser.add_option('--filter', + help='An optional filter. This will be compiled and passed to the ' + 'instrumentation executable.') + option_parser.add_option('--output-filter-file', + help='The path where the compiled filter will be written. This is ' + 'required if --filter is specified.') + option_parser.add_option('--allocation-filter-file', + help='The path to the SyzyASAN allocation filter to use.') + options, args = option_parser.parse_args() + + if not options.mode: + option_parser.error('You must provide an instrumentation mode.') + if not options.input_executable: + option_parser.error('You must provide an input executable.') + if not options.input_symbol: + option_parser.error('You must provide an input symbol file.') + if not options.destination_dir: + option_parser.error('You must provide a destination directory.') + if options.filter and not options.output_filter_file: + option_parser.error('You must provide a filter output file.') + + return options + + +if '__main__' == __name__: + logging.basicConfig(level=logging.INFO) + sys.exit(main(_ParseOptions())) diff --git a/build/win/syzygy/reorder.py b/build/win/syzygy/reorder.py new file mode 100644 index 00000000000..ba62cc77062 --- /dev/null +++ b/build/win/syzygy/reorder.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A utility script to help building Syzygy-reordered Chrome binaries.""" + +import logging +import optparse +import os +import subprocess +import sys + + +# The default relink executable to use to reorder binaries. +_DEFAULT_RELINKER = os.path.join( + os.path.join(os.path.dirname(__file__), '../../..'), + 'third_party/syzygy/binaries/exe/relink.exe') + +_LOGGER = logging.getLogger() + +# We use the same seed for all random reorderings to get a deterministic build. +_RANDOM_SEED = 1347344 + + +def _Shell(*cmd, **kw): + """Shells out to "cmd". Returns a tuple of cmd's stdout, stderr.""" + _LOGGER.info('Running command "%s".', cmd) + prog = subprocess.Popen(cmd, **kw) + + stdout, stderr = prog.communicate() + if prog.returncode != 0: + raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode)) + + return stdout, stderr + + +def _ReorderBinary(relink_exe, executable, symbol, destination_dir): + """Reorders the executable found in input_dir, and writes the resultant + reordered executable and symbol files to destination_dir. + + If a file named -order.json exists, imposes that order on the + output binaries, otherwise orders them randomly. + """ + cmd = [relink_exe, + '--overwrite', + '--input-image=%s' % executable, + '--input-pdb=%s' % symbol, + '--output-image=%s' % os.path.abspath( + os.path.join(destination_dir, os.path.basename(executable))), + '--output-pdb=%s' % os.path.abspath( + os.path.join(destination_dir, os.path.basename(symbol))),] + + # Check whether there's an order file available for the executable. + order_file = '%s-order.json' % executable + if os.path.exists(order_file): + # The ordering file exists, let's use that. + _LOGGER.info('Reordering "%s" according to "%s".', + os.path.basename(executable), + os.path.basename(order_file)) + cmd.append('--order-file=%s' % order_file) + else: + # No ordering file, we randomize the output. + _LOGGER.info('Randomly reordering "%s"', executable) + cmd.append('--seed=%d' % _RANDOM_SEED) + + return _Shell(*cmd) + + +def main(options): + logging.basicConfig(level=logging.INFO) + + # Make sure the destination directory exists. + if not os.path.isdir(options.destination_dir): + _LOGGER.info('Creating destination directory "%s".', + options.destination_dir) + os.makedirs(options.destination_dir) + + # Reorder the binaries into the destination directory. + _ReorderBinary(options.relinker, + options.input_executable, + options.input_symbol, + options.destination_dir) + + +def _ParseOptions(): + option_parser = optparse.OptionParser() + option_parser.add_option('--input_executable', + help='The path to the input executable.') + option_parser.add_option('--input_symbol', + help='The path to the input symbol file.') + option_parser.add_option('--relinker', default=_DEFAULT_RELINKER, + help='Relinker exectuable to use, defaults to "%s"' % _DEFAULT_RELINKER) + option_parser.add_option('-d', '--destination_dir', + help='Destination directory for reordered files, defaults to ' + 'the subdirectory "reordered" in the output_dir.') + options, args = option_parser.parse_args() + + if not options.input_executable: + option_parser.error('You must provide an input executable.') + if not options.input_symbol: + option_parser.error('You must provide an input symbol file.') + + if not options.destination_dir: + options.destination_dir = os.path.join(options.output_dir, 'reordered') + + return options + + +if '__main__' == __name__: + sys.exit(main(_ParseOptions())) diff --git a/build/win/syzygy/syzyasan-allocation-filter.txt b/build/win/syzygy/syzyasan-allocation-filter.txt new file mode 100644 index 00000000000..8dbf87baf4e --- /dev/null +++ b/build/win/syzygy/syzyasan-allocation-filter.txt @@ -0,0 +1,14 @@ +// Copyright (c) 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// This file describes suspicious allocation sites that are to be treated +// specially and served from the page heap. It is used at instrumentation time +// to ensure allocations are runtime redirected from specified stack traces. +// +// See chrome_syzygy.gypi and instrument.py for more details. +{ + "hooks": { + "func": [] + } +} diff --git a/build/win/syzygy/syzyasan-instrumentation-filter.txt b/build/win/syzygy/syzyasan-instrumentation-filter.txt new file mode 100644 index 00000000000..51af1bfc964 --- /dev/null +++ b/build/win/syzygy/syzyasan-instrumentation-filter.txt @@ -0,0 +1,10 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# This file describes filtering rules that will be applied when applying +# Syzygy ASAN instrumentation to chrome.dll. It is intended to be used for +# disabling instrumentation of functions with known and deferred bugs, allowing +# functional instrumented builds of Chrome to be produced in the meantime. +# +# See chrome_syzygy.gypi and instrument.py for more details. diff --git a/build/win/syzygy/syzygy.gni b/build/win/syzygy/syzygy.gni new file mode 100644 index 00000000000..1ae2ebef828 --- /dev/null +++ b/build/win/syzygy/syzygy.gni @@ -0,0 +1,137 @@ +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_win) + +# Where the output binaries will be placed. +syzygy_dest_dir = "$root_out_dir/syzygy" + +# Generates a Syzygy optimize target. +# +# binary_name (required) +# Name of the binary to be instrumented, with no extension or path. This +# binary_name is assumed to be in the output directory and must be +# generated by a dependency of this target. +# +# deps (required) +# Normal meaning. +# +# data_deps +# Normal meaning. +template("syzygy_optimize") { + action(target_name) { + if (defined(invoker.visibility)) { + visibility = invoker.visibility + } + script = "//build/win/syzygy/reorder.py" + + binary_name = invoker.binary_name + input_dll = "$root_out_dir/$binary_name" + input_pdb = "$root_out_dir/$binary_name.pdb" + + inputs = [ + input_dll, + #input_pdb, + ] + + outputs = [ + "$syzygy_dest_dir/$binary_name", + "$syzygy_dest_dir/$binary_name.pdb", + ] + + args = [ + "--input_executable", + rebase_path(input_dll, root_build_dir), + "--input_symbol", + rebase_path(input_pdb, root_build_dir), + "--destination_dir", + rebase_path(syzygy_dest_dir, root_build_dir), + ] + + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "public_deps", + ]) + } +} + +# Instruments a binary with SyzyAsan. +# +# binary_name (required) +# Name of the binary to be instrumented, with no extension or path. This +# binary_name is assumed to be in the output directory and must be +# generated by a dependency of this target. +# +# dest_dir (required) +# The destination directory where the instrumented image should be +# written. +# +# deps (required) +# Normal meaning. +# +# public_deps +# Normal meaning. +# +# data_deps +# Normal meaning. +template("syzygy_asan") { + action(target_name) { + if (defined(invoker.visibility)) { + visibility = invoker.visibility + } + script = "//build/win/syzygy/instrument.py" + + filter = "//build/win/syzygy/syzyasan-instrumentation-filter.txt" + + binary_name = invoker.binary_name + dest_dir = invoker.dest_dir + input_image = "$root_out_dir/$binary_name" + input_pdb = "$root_out_dir/$binary_name.pdb" + + inputs = [ + filter, + input_image, + + #input_pdb, + ] + + output_filter = "$dest_dir/win-syzyasan-filter-$binary_name.txt.json" + + outputs = [ + "$dest_dir/$binary_name", + "$dest_dir/$binary_name.pdb", + output_filter, + ] + + args = [ + "--mode", + "asan", + "--input_executable", + rebase_path(input_image, root_build_dir), + "--input_symbol", + rebase_path(input_pdb, root_build_dir), + "--filter", + rebase_path(filter, root_build_dir), + "--output-filter-file", + rebase_path(output_filter, root_build_dir), + "--destination_dir", + rebase_path(dest_dir, root_build_dir), + ] + + deps = [ + "//build/win/syzygy:copy_syzyasan_binaries", + ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + forward_variables_from(invoker, + [ + "data_deps", + "public_deps", + "testonly", + ]) + } +} diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py new file mode 100644 index 00000000000..cff5f43d25a --- /dev/null +++ b/build/win/use_ansi_codes.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Prints if the the terminal is likely to understand ANSI codes.""" + +import os + +# Add more terminals here as needed. +print 'ANSICON' in os.environ diff --git a/build/win_is_xtree_patched.py b/build/win_is_xtree_patched.py new file mode 100644 index 00000000000..3f1994ff9d1 --- /dev/null +++ b/build/win_is_xtree_patched.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Determines if the VS xtree header has been patched to disable C4702.""" + +import os + + +def IsPatched(): + # TODO(scottmg): For now, just return if we're using the packaged toolchain + # script (because we know it's patched). Another case could be added here to + # query the active VS installation and actually check the contents of xtree. + # http://crbug.com/346399. + return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1 + + +def DoMain(_): + """Hook to be called from gyp without starting a separate python + interpreter.""" + return "1" if IsPatched() else "0" + + +if __name__ == '__main__': + print DoMain([]) diff --git a/build/win_precompile.gypi b/build/win_precompile.gypi new file mode 100644 index 00000000000..8849eb10532 --- /dev/null +++ b/build/win_precompile.gypi @@ -0,0 +1,21 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Include this file to make targets in your .gyp use the default +# precompiled header on Windows, in debug builds only as the official +# builders blow up (out of memory) if precompiled headers are used for +# release builds. + +{ + 'conditions': [ + ['OS=="win" and chromium_win_pch==1', { + 'target_defaults': { + 'msvs_precompiled_header': 'build/precompile.h', + 'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc', + 'sources': ['<(DEPTH)/build/precompile.cc'], + 'include_dirs': [ '<(DEPTH)' ], + } + }], + ], +} diff --git a/build/write_build_date_header.py b/build/write_build_date_header.py new file mode 100644 index 00000000000..6fe514fd788 --- /dev/null +++ b/build/write_build_date_header.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +# Copyright (c) 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes a file that contains a define that approximates the build date. + +build_type impacts the timestamp generated: +- default: the build date is set to the most recent first Sunday of a month at + 5:00am. The reason is that it is a time where invalidating the build cache + shouldn't have major reprecussions (due to lower load). +- official: the build date is set to the current date at 5:00am, or the day + before if the current time is before 5:00am. +Either way, it is guaranteed to be in the past and always in UTC. + +It is also possible to explicitly set a build date to be used. +""" + +import argparse +import calendar +import datetime +import doctest +import os +import sys + + +def GetFirstSundayOfMonth(year, month): + """Returns the first sunday of the given month of the given year. + + >>> GetFirstSundayOfMonth(2016, 2) + 7 + >>> GetFirstSundayOfMonth(2016, 3) + 6 + >>> GetFirstSundayOfMonth(2000, 1) + 2 + """ + weeks = calendar.Calendar().monthdays2calendar(year, month) + # Return the first day in the first week that is a Sunday. + return [date_day[0] for date_day in weeks[0] if date_day[1] == 6][0] + + +def GetBuildDate(build_type, utc_now): + """Gets the approximate build date given the specific build type. + + >>> GetBuildDate('default', datetime.datetime(2016, 2, 6, 1, 2, 3)) + 'Jan 03 2016 01:02:03' + >>> GetBuildDate('default', datetime.datetime(2016, 2, 7, 5)) + 'Feb 07 2016 05:00:00' + >>> GetBuildDate('default', datetime.datetime(2016, 2, 8, 5)) + 'Feb 07 2016 05:00:00' + """ + day = utc_now.day + month = utc_now.month + year = utc_now.year + if build_type != 'official': + first_sunday = GetFirstSundayOfMonth(year, month) + # If our build is after the first Sunday, we've already refreshed our build + # cache on a quiet day, so just use that day. + # Otherwise, take the first Sunday of the previous month. + if day >= first_sunday: + day = first_sunday + else: + month -= 1 + if month == 0: + month = 12 + year -= 1 + day = GetFirstSundayOfMonth(year, month) + now = datetime.datetime( + year, month, day, utc_now.hour, utc_now.minute, utc_now.second) + return '{:%b %d %Y %H:%M:%S}'.format(now) + + +def main(): + if doctest.testmod()[0]: + return 1 + argument_parser = argparse.ArgumentParser( + description=sys.modules[__name__].__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + argument_parser.add_argument('output_file', help='The file to write to') + argument_parser.add_argument( + 'build_type', help='The type of build', choices=('official', 'default')) + argument_parser.add_argument( + 'build_date_override', nargs='?', + help='Optional override for the build date. Format must be ' + '\'Mmm DD YYYY HH:MM:SS\'') + args = argument_parser.parse_args() + + if args.build_date_override: + # Format is expected to be "Mmm DD YYYY HH:MM:SS". + build_date = args.build_date_override + else: + now = datetime.datetime.utcnow() + if now.hour < 5: + # The time is locked at 5:00 am in UTC to cause the build cache + # invalidation to not happen exactly at midnight. Use the same calculation + # as the day before. + # See //base/build_time.cc. + now = now - datetime.timedelta(days=1) + now = datetime.datetime(now.year, now.month, now.day, 5, 0, 0) + build_date = GetBuildDate(args.build_type, now) + + output = ('// Generated by //build/write_build_date_header.py\n' + '#ifndef BUILD_DATE\n' + '#define BUILD_DATE "{}"\n' + '#endif // BUILD_DATE\n'.format(build_date)) + + current_contents = '' + if os.path.isfile(args.output_file): + with open(args.output_file, 'r') as current_file: + current_contents = current_file.read() + + if current_contents != output: + with open(args.output_file, 'w') as output_file: + output_file.write(output) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/write_buildflag_header.py b/build/write_buildflag_header.py new file mode 100644 index 00000000000..d46cfc89a9d --- /dev/null +++ b/build/write_buildflag_header.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This writes headers for build flags. See buildflag_header.gni for usage of +# this system as a whole. +# +# The parameters are passed in a response file so we don't have to worry +# about command line lengths. The name of the response file is passed on the +# command line. +# +# The format of the response file is: +# [--flags ] + +import optparse +import os +import shlex +import sys + + +class Options: + def __init__(self, output, rulename, header_guard, flags): + self.output = output + self.rulename = rulename + self.header_guard = header_guard + self.flags = flags + + +def GetOptions(): + parser = optparse.OptionParser() + parser.add_option('--output', help="Output header name inside --gen-dir.") + parser.add_option('--rulename', + help="Helpful name of build rule for including in the " + + "comment at the top of the file.") + parser.add_option('--gen-dir', + help="Path to root of generated file directory tree.") + parser.add_option('--definitions', + help="Name of the response file containing the flags.") + cmdline_options, cmdline_flags = parser.parse_args() + + # Compute header guard by replacing some chars with _ and upper-casing. + header_guard = cmdline_options.output.upper() + header_guard = \ + header_guard.replace('/', '_').replace('\\', '_').replace('.', '_') + header_guard += '_' + + # The actual output file is inside the gen dir. + output = os.path.join(cmdline_options.gen_dir, cmdline_options.output) + + # Definition file in GYP is newline separated, in GN they are shell formatted. + # shlex can parse both of these. + with open(cmdline_options.definitions, 'r') as def_file: + defs = shlex.split(def_file.read()) + flags_index = defs.index('--flags') + + # Everything after --flags are flags. true/false are remapped to 1/0, + # everything else is passed through. + flags = [] + for flag in defs[flags_index + 1 :]: + equals_index = flag.index('=') + key = flag[:equals_index] + value = flag[equals_index + 1:] + + # Canonicalize and validate the value. + if value == 'true': + value = '1' + elif value == 'false': + value = '0' + flags.append((key, str(value))) + + return Options(output=output, + rulename=cmdline_options.rulename, + header_guard=header_guard, + flags=flags) + + +def WriteHeader(options): + with open(options.output, 'w') as output_file: + output_file.write("// Generated by build/write_buildflag_header.py\n") + if options.rulename: + output_file.write('// From "' + options.rulename + '"\n') + + output_file.write('\n#ifndef %s\n' % options.header_guard) + output_file.write('#define %s\n\n' % options.header_guard) + output_file.write('#include "build/buildflag.h"\n\n') + + for pair in options.flags: + output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair) + + output_file.write('\n#endif // %s\n' % options.header_guard) + + +options = GetOptions() +WriteHeader(options) diff --git a/common.gypi b/common.gypi deleted file mode 100644 index d1411ed1c6f..00000000000 --- a/common.gypi +++ /dev/null @@ -1,403 +0,0 @@ -{ - 'variables': { - 'asan%': 0, - 'werror': '', # Turn off -Werror in V8 build. - 'visibility%': 'hidden', # V8's visibility setting - 'target_arch%': 'ia32', # set v8's target architecture - 'host_arch%': 'ia32', # set v8's host architecture - 'want_separate_host_toolset%': 0, # V8 should not build target and host - 'library%': 'static_library', # allow override to 'shared_library' for DLL/.so builds - 'component%': 'static_library', # NB. these names match with what V8 expects - 'msvs_multi_core_compile': '0', # we do enable multicore compiles, but not using the V8 way - 'python%': 'python', - - 'node_shared%': 'false', - 'force_dynamic_crt%': 0, - 'node_use_v8_platform%': 'true', - 'node_use_bundled_v8%': 'true', - 'node_module_version%': '', - - 'node_tag%': '', - 'uv_library%': 'static_library', - - 'openssl_fips%': '', - - # Default to -O0 for debug builds. - 'v8_optimized_debug%': 0, - - # Enable disassembler for `--print-code` v8 options - 'v8_enable_disassembler': 1, - - # Don't bake anything extra into the snapshot. - 'v8_use_external_startup_data%': 0, - - # Don't use ICU data file (icudtl.dat) from V8, we use our own. - 'icu_use_data_file_flag%': 0, - - 'conditions': [ - ['OS == "win"', { - 'os_posix': 0, - 'v8_postmortem_support%': 'false', - }, { - 'os_posix': 1, - 'v8_postmortem_support%': 'true', - }], - ['GENERATOR == "ninja" or OS== "mac"', { - 'OBJ_DIR': '<(PRODUCT_DIR)/obj', - 'V8_BASE': '<(PRODUCT_DIR)/libv8_base.a', - }, { - 'OBJ_DIR': '<(PRODUCT_DIR)/obj.target', - 'V8_BASE': '<(PRODUCT_DIR)/obj.target/deps/v8/src/libv8_base.a', - }], - ['openssl_fips != ""', { - 'OPENSSL_PRODUCT': 'libcrypto.a', - }, { - 'OPENSSL_PRODUCT': 'libopenssl.a', - }], - ['OS=="mac"', { - 'clang%': 1, - }, { - 'clang%': 0, - }], - ], - }, - - 'target_defaults': { - 'default_configuration': 'Release', - 'configurations': { - 'Debug': { - 'variables': { - 'v8_enable_handle_zapping': 1, - }, - 'defines': [ 'DEBUG', '_DEBUG' ], - 'cflags': [ '-g', '-O0' ], - 'conditions': [ - ['target_arch=="x64"', { - 'msvs_configuration_platform': 'x64', - }], - ['OS=="aix"', { - 'cflags': [ '-gxcoff' ], - 'ldflags': [ '-Wl,-bbigtoc' ], - }], - ['OS == "android"', { - 'cflags': [ '-fPIE' ], - 'ldflags': [ '-fPIE', '-pie' ] - }], - ['node_shared=="true"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 3, # MultiThreadedDebugDLL (/MDd) - } - } - }], - ['node_shared=="false"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 1 # MultiThreadedDebug (/MTd) - } - } - }] - ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'Optimization': 0, # /Od, no optimization - 'MinimalRebuild': 'false', - 'OmitFramePointers': 'false', - 'BasicRuntimeChecks': 3, # /RTC1 - }, - 'VCLinkerTool': { - 'LinkIncremental': 2, # enable incremental linking - }, - }, - 'xcode_settings': { - 'GCC_OPTIMIZATION_LEVEL': '0', # stop gyp from defaulting to -Os - }, - }, - 'Release': { - 'variables': { - 'v8_enable_handle_zapping': 0, - }, - 'cflags': [ '-O3' ], - 'conditions': [ - ['target_arch=="x64"', { - 'msvs_configuration_platform': 'x64', - }], - ['OS=="solaris"', { - # pull in V8's postmortem metadata - 'ldflags': [ '-Wl,-z,allextract' ] - }], - ['OS!="mac" and OS!="win"', { - 'cflags': [ '-fno-omit-frame-pointer' ], - }], - ['OS == "android"', { - 'cflags': [ '-fPIE' ], - 'ldflags': [ '-fPIE', '-pie' ] - }], - ['node_shared=="true"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 2 # MultiThreadedDLL (/MD) - } - } - }], - ['node_shared=="false"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 0 # MultiThreaded (/MT) - } - } - }] - ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'Optimization': 3, # /Ox, full optimization - 'FavorSizeOrSpeed': 1, # /Ot, favour speed over size - 'InlineFunctionExpansion': 2, # /Ob2, inline anything eligible - 'WholeProgramOptimization': 'true', # /GL, whole program optimization, needed for LTCG - 'OmitFramePointers': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'RuntimeTypeInfo': 'false', - 'AdditionalOptions': [ - '/MP', # compile across multiple CPUs - ], - }, - 'VCLibrarianTool': { - 'AdditionalOptions': [ - '/LTCG', # link time code generation - ], - }, - 'VCLinkerTool': { - 'LinkTimeCodeGeneration': 1, # link-time code generation - 'OptimizeReferences': 2, # /OPT:REF - 'EnableCOMDATFolding': 2, # /OPT:ICF - 'LinkIncremental': 1, # disable incremental linking - }, - }, - } - }, - # Forcibly disable -Werror. We support a wide range of compilers, it's - # simply not feasible to squelch all warnings, never mind that the - # libraries in deps/ are not under our control. - 'cflags!': ['-Werror'], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'StringPooling': 'true', # pool string literals - 'DebugInformationFormat': 3, # Generate a PDB - 'WarningLevel': 3, - 'BufferSecurityCheck': 'true', - 'ExceptionHandling': 0, # /EHsc - 'SuppressStartupBanner': 'true', - 'WarnAsError': 'false', - }, - 'VCLibrarianTool': { - }, - 'VCLinkerTool': { - 'conditions': [ - ['target_arch=="ia32"', { - 'TargetMachine' : 1, # /MACHINE:X86 - 'target_conditions': [ - ['_type=="executable"', { - 'AdditionalOptions': [ '/SubSystem:Console,"5.01"' ], - }], - ], - }], - ['target_arch=="x64"', { - 'TargetMachine' : 17, # /MACHINE:AMD64 - 'target_conditions': [ - ['_type=="executable"', { - 'AdditionalOptions': [ '/SubSystem:Console,"5.02"' ], - }], - ], - }], - ], - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', # /MAP - 'MapExports': 'true', # /MAPINFO:EXPORTS - 'RandomizedBaseAddress': 2, # enable ASLR - 'DataExecutionPrevention': 2, # enable DEP - 'AllowIsolation': 'true', - 'SuppressStartupBanner': 'true', - }, - }, - 'msvs_disabled_warnings': [4351, 4355, 4800], - 'conditions': [ - ['asan == 1 and OS != "mac"', { - 'cflags+': [ - '-fno-omit-frame-pointer', - '-fsanitize=address', - '-DLEAK_SANITIZER' - ], - 'cflags!': [ '-fomit-frame-pointer' ], - 'ldflags': [ '-fsanitize=address' ], - }], - ['asan == 1 and OS == "mac"', { - 'xcode_settings': { - 'OTHER_CFLAGS+': [ - '-fno-omit-frame-pointer', - '-gline-tables-only', - '-fsanitize=address', - '-DLEAK_SANITIZER' - ], - 'OTHER_CFLAGS!': [ - '-fomit-frame-pointer', - ], - }, - 'target_conditions': [ - ['_type!="static_library"', { - 'xcode_settings': {'OTHER_LDFLAGS': ['-fsanitize=address']}, - }], - ], - }], - ['OS == "win"', { - 'msvs_cygwin_shell': 0, # prevent actions from trying to use cygwin - 'defines': [ - 'WIN32', - # we don't really want VC++ warning us about - # how dangerous C functions are... - '_CRT_SECURE_NO_DEPRECATE', - # ... or that C implementations shouldn't use - # POSIX names - '_CRT_NONSTDC_NO_DEPRECATE', - # Make sure the STL doesn't try to use exceptions - '_HAS_EXCEPTIONS=0', - 'BUILDING_V8_SHARED=1', - 'BUILDING_UV_SHARED=1', - ], - }], - [ 'OS in "linux freebsd openbsd solaris aix"', { - 'cflags': [ '-pthread', ], - 'ldflags': [ '-pthread' ], - }], - [ 'OS in "linux freebsd openbsd solaris android aix"', { - 'cflags': [ '-Wall', '-Wextra', '-Wno-unused-parameter', ], - 'cflags_cc': [ '-fno-rtti', '-fno-exceptions', '-std=gnu++0x' ], - 'ldflags': [ '-rdynamic' ], - 'target_conditions': [ - # The 1990s toolchain on SmartOS can't handle thin archives. - ['_type=="static_library" and OS=="solaris"', { - 'standalone_static_library': 1, - }], - ], - 'conditions': [ - [ 'target_arch=="ia32"', { - 'cflags': [ '-m32' ], - 'ldflags': [ '-m32' ], - }], - [ 'target_arch=="x32"', { - 'cflags': [ '-mx32' ], - 'ldflags': [ '-mx32' ], - }], - [ 'target_arch=="x64"', { - 'cflags': [ '-m64' ], - 'ldflags': [ '-m64' ], - }], - [ 'target_arch=="ppc" and OS!="aix"', { - 'cflags': [ '-m32' ], - 'ldflags': [ '-m32' ], - }], - [ 'target_arch=="ppc64" and OS!="aix"', { - 'cflags': [ '-m64', '-mminimal-toc' ], - 'ldflags': [ '-m64' ], - }], - [ 'target_arch=="s390"', { - 'cflags': [ '-m31' ], - 'ldflags': [ '-m31' ], - }], - [ 'target_arch=="s390x"', { - 'cflags': [ '-m64' ], - 'ldflags': [ '-m64' ], - }], - [ 'OS=="solaris"', { - 'cflags': [ '-pthreads' ], - 'ldflags': [ '-pthreads' ], - 'cflags!': [ '-pthread' ], - 'ldflags!': [ '-pthread' ], - }], - [ 'OS=="aix"', { - 'conditions': [ - [ 'target_arch=="ppc"', { - 'ldflags': [ '-Wl,-bmaxdata:0x60000000/dsa' ], - }], - [ 'target_arch=="ppc64"', { - 'cflags': [ '-maix64' ], - 'ldflags': [ '-maix64' ], - }], - ], - 'ldflags': [ '-Wl,-bbigtoc' ], - 'ldflags!': [ '-rdynamic' ], - }], - [ 'node_shared=="true"', { - 'cflags': [ '-fPIC' ], - }], - ], - }], - ['OS=="android"', { - 'target_conditions': [ - ['_toolset=="target"', { - 'defines': [ '_GLIBCXX_USE_C99_MATH' ], - 'libraries': [ '-llog' ], - }], - ], - }], - ['OS=="mac"', { - 'defines': ['_DARWIN_USE_64_BIT_INODE=1'], - 'xcode_settings': { - 'ALWAYS_SEARCH_USER_PATHS': 'NO', - 'GCC_CW_ASM_SYNTAX': 'NO', # No -fasm-blocks - 'GCC_DYNAMIC_NO_PIC': 'NO', # No -mdynamic-no-pic - # (Equivalent to -fPIC) - 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', # -fno-exceptions - 'GCC_ENABLE_CPP_RTTI': 'NO', # -fno-rtti - 'GCC_ENABLE_PASCAL_STRINGS': 'NO', # No -mpascal-strings - 'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics - 'PREBINDING': 'NO', # No -Wl,-prebind - 'MACOSX_DEPLOYMENT_TARGET': '10.7', # -mmacosx-version-min=10.7 - 'USE_HEADERMAP': 'NO', - 'OTHER_CFLAGS': [ - '-fno-strict-aliasing', - ], - 'WARNING_CFLAGS': [ - '-Wall', - '-Wendif-labels', - '-W', - '-Wno-unused-parameter', - ], - }, - 'target_conditions': [ - ['_type!="static_library"', { - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-Wl,-no_pie', - '-Wl,-search_paths_first', - ], - }, - }], - ], - 'conditions': [ - ['target_arch=="ia32"', { - 'xcode_settings': {'ARCHS': ['i386']}, - }], - ['target_arch=="x64"', { - 'xcode_settings': {'ARCHS': ['x86_64']}, - }], - ['clang==1', { - 'xcode_settings': { - 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0', - 'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++0x', # -std=gnu++0x - 'CLANG_CXX_LIBRARY': 'libc++', - }, - }], - ], - }], - ['OS=="freebsd" and node_use_dtrace=="true"', { - 'libraries': [ '-lelf' ], - }], - ['OS=="freebsd"', { - 'ldflags': [ - '-Wl,--export-dynamic', - ], - }] - ], - } -} diff --git a/config.gypi b/config.gypi new file mode 100644 index 00000000000..9ed89461b31 --- /dev/null +++ b/config.gypi @@ -0,0 +1,39 @@ +# Do not edit. Generated by the configure script. +{ 'target_defaults': { 'cflags': [], + 'default_configuration': 'Release', + 'defines': ['OPENSSL_NO_SSL2=1'], + 'include_dirs': [], + 'libraries': []}, + 'variables': { 'clang': 1, + 'gcc_version': 49, + 'host_arch': 'x64', + 'icu_small': 'false', + 'node_install_npm': 'true', + 'node_prefix': '', + 'node_shared': 'true', + 'node_shared_http_parser': 'false', + 'node_shared_libuv': 'false', + 'node_shared_openssl': 'false', + 'node_shared_v8': 'false', + 'node_shared_zlib': 'false', + 'node_tag': '', + 'node_release_urlbase': '', + 'node_byteorder': 'little', + 'node_use_dtrace': 'false', + 'node_use_etw': 'false', + 'node_use_mdb': 'false', + 'node_use_openssl': 'true', + 'node_use_perfctr': 'false', + 'openssl_no_asm': 0, + 'python': '/usr/bin/python', + 'target_arch': 'x64', + 'uv_library': 'static_library', + 'uv_parent_path': '/deps/uv/', + 'uv_use_dtrace': 'false', + 'v8_enable_gdbjit': 0, + 'v8_enable_i18n_support': 0, + 'v8_no_strict_aliasing': 1, + 'v8_optimized_debug': 0, + 'v8_random_seed': 0, + 'v8_use_snapshot': 'true', + 'want_separate_host_toolset': 0}} diff --git a/deps/cares/cares.gyp b/deps/cares/cares.gyp index 158cef39b51..fc634b7b4d9 100644 --- a/deps/cares/cares.gyp +++ b/deps/cares/cares.gyp @@ -1,4 +1,7 @@ { + 'variables': { + 'library%': 'static_library', + }, 'target_defaults': { 'conditions': [ ['OS!="win"', { @@ -112,6 +115,7 @@ }], [ 'OS=="win"', { 'include_dirs': [ 'config/win32' ], + 'msvs_disabled_warnings': [4133, 4018], 'sources': [ 'src/config-win32.h', 'src/windows_port.c', diff --git a/deps/cares/src/ares_create_query.c b/deps/cares/src/ares_create_query.c index 7f4c52d41df..6eb921b5e84 100644 --- a/deps/cares/src/ares_create_query.c +++ b/deps/cares/src/ares_create_query.c @@ -16,6 +16,11 @@ #include "ares_setup.h" +#ifndef _WIN32 +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Warray-bounds" +#endif + #ifdef HAVE_NETINET_IN_H # include #endif @@ -200,3 +205,7 @@ int ares_create_query(const char *name, int dnsclass, int type, return ARES_SUCCESS; } + +#ifndef _WIN32 +#pragma clang diagnostic pop +#endif diff --git a/deps/cares/src/ares_process.c b/deps/cares/src/ares_process.c index 0325f5191d8..8cab077556a 100644 --- a/deps/cares/src/ares_process.c +++ b/deps/cares/src/ares_process.c @@ -460,7 +460,7 @@ static void read_udp_packets(ares_channel channel, fd_set *read_fds, else fromlen = sizeof(from.sa6); count = (ssize_t)recvfrom(server->udp_socket, (void *)buf, - sizeof(buf), 0, &from.sa, &fromlen); + sizeof(buf), 0, &from.sa, (socklen_t *)&fromlen); #else count = sread(server->udp_socket, buf, sizeof(buf)); #endif diff --git a/deps/http_parser/http_parser.c b/deps/http_parser/http_parser.c index 719617549d0..72bf757ddbc 100644 --- a/deps/http_parser/http_parser.c +++ b/deps/http_parser/http_parser.c @@ -2391,7 +2391,7 @@ http_parser_parse_url(const char *buf, size_t buflen, int is_connect, break; default: - assert(!"Unexpected state"); + assert(0 && "Unexpected state"); return 1; } diff --git a/deps/http_parser/http_parser.gyp b/deps/http_parser/http_parser.gyp index ef34ecaeaea..93d3259e38f 100644 --- a/deps/http_parser/http_parser.gyp +++ b/deps/http_parser/http_parser.gyp @@ -13,20 +13,10 @@ 'Debug': { 'defines': [ 'DEBUG', '_DEBUG' ], 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 1, # static debug - }, - }, }, 'Release': { 'defines': [ 'NDEBUG' ], 'cflags': [ '-Wall', '-Wextra', '-O3' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 0, # static release - }, - }, } }, 'msvs_settings': { @@ -39,6 +29,9 @@ }, }, 'conditions': [ + ['clang==1', { + 'cflags': ['-Wno-error=string-conversion'], + }], ['OS == "win"', { 'defines': [ 'WIN32' diff --git a/deps/openssl/asm/arm-void-gas/modes/ghash-armv4.S b/deps/openssl/asm/arm-void-gas/modes/ghash-armv4.S index 6f699dbd870..c54f5149974 100644 --- a/deps/openssl/asm/arm-void-gas/modes/ghash-armv4.S +++ b/deps/openssl/asm/arm-void-gas/modes/ghash-armv4.S @@ -3,11 +3,6 @@ .text .code 32 -#ifdef __clang__ -#define ldrplb ldrbpl -#define ldrneb ldrbne -#endif - .type rem_4bit,%object .align 5 rem_4bit: diff --git a/deps/openssl/openssl.gyp b/deps/openssl/openssl.gyp index ae5c980e06a..d5a0800c0e7 100644 --- a/deps/openssl/openssl.gyp +++ b/deps/openssl/openssl.gyp @@ -4,18 +4,22 @@ { 'variables': { - 'is_clang': 0, + 'is_clang': 1, 'gcc_version': 0, 'openssl_no_asm%': 0, 'llvm_version%': 0, 'xcode_version%': 0, 'gas_version%': 0, - 'openssl_fips%': 'false', + 'openssl_fips%': '', + 'node_byteorder%': 'little', + 'conditions': [ + ['OS=="mac"', { 'openssl_no_asm%': 1 } ], + ], }, 'targets': [ { 'target_name': 'openssl', - 'type': '<(library)', + 'type': 'static_library', 'includes': ['openssl.gypi'], 'sources': ['<@(openssl_sources)'], 'sources/': [ @@ -149,12 +153,15 @@ 'includes': ['openssl.gypi'], 'include_dirs': ['<@(openssl_default_include_dirs)'], 'defines': ['<@(openssl_default_defines_all)'], + 'cflags!': ['-fvisibility=hidden'], + 'cflags_cc!': ['-fvisibility-inlines-hidden'], 'conditions': [ ['OS=="win"', { 'defines': ['<@(openssl_default_defines_win)'], 'link_settings': { 'libraries': ['<@(openssl_default_libraries_win)'], }, + 'msvs_disabled_warnings': [4311], }, { 'defines': ['<@(openssl_default_defines_not_win)'], 'cflags': ['-Wno-missing-field-initializers'], @@ -167,7 +174,7 @@ ] }], ['is_clang==1 or gcc_version>=43', { - 'cflags': ['-Wno-old-style-declaration'], + 'cflags': ['-Wno-error=unused-command-line-argument', '-Wno-error=parentheses-equality', '-no-integrated-as'], }], ['OS=="solaris"', { 'defines': ['__EXTENSIONS__'], diff --git a/deps/openssl/openssl/ssl/d1_srvr.c b/deps/openssl/openssl/ssl/d1_srvr.c index bc875b53c9a..0c59629880a 100644 --- a/deps/openssl/openssl/ssl/d1_srvr.c +++ b/deps/openssl/openssl/ssl/d1_srvr.c @@ -126,6 +126,7 @@ # include #endif + static const SSL_METHOD *dtls1_get_server_method(int ver); static int dtls1_send_hello_verify_request(SSL *s); diff --git a/deps/openssl/openssl/ssl/s3_srvr.c b/deps/openssl/openssl/ssl/s3_srvr.c index 01ccd5d2ae7..6fc38146b18 100644 --- a/deps/openssl/openssl/ssl/s3_srvr.c +++ b/deps/openssl/openssl/ssl/s3_srvr.c @@ -170,6 +170,7 @@ #endif #include + #ifndef OPENSSL_NO_SSL3_METHOD static const SSL_METHOD *ssl3_get_server_method(int ver); diff --git a/deps/uv/include/uv.h b/deps/uv/include/uv.h index baa0b28124b..0208a02e6c8 100644 --- a/deps/uv/include/uv.h +++ b/deps/uv/include/uv.h @@ -28,6 +28,8 @@ extern "C" { #endif #ifdef _WIN32 +#pragma warning(push) +#pragma warning(disable: 4201) /* Windows - set up dll import/export decorators. */ # if defined(BUILDING_UV_SHARED) /* Building shared library. */ @@ -257,6 +259,7 @@ UV_EXTERN int uv_replace_allocator(uv_malloc_func malloc_func, uv_free_func free_func); UV_EXTERN uv_loop_t* uv_default_loop(void); +UV_EXTERN void uv_init_nw(int); UV_EXTERN int uv_loop_init(uv_loop_t* loop); UV_EXTERN int uv_loop_close(uv_loop_t* loop); /* @@ -766,6 +769,9 @@ UV_EXTERN int uv_async_init(uv_loop_t*, uv_async_t* async, uv_async_cb async_cb); UV_EXTERN int uv_async_send(uv_async_t* async); +#ifdef _WIN32 +UV_EXTERN int uv_async_send_nw(uv_async_t* async); +#endif /* @@ -1465,6 +1471,7 @@ struct uv_loop_s { void* active_reqs[2]; /* Internal flag to signal loop stop. */ unsigned int stop_flag; + void* keventfunc; UV_LOOP_PRIVATE_FIELDS }; @@ -1489,6 +1496,10 @@ struct uv_loop_s { #undef UV_LOOP_PRIVATE_FIELDS #undef UV_LOOP_PRIVATE_PLATFORM_FIELDS +#ifdef _WIN32 +#pragma warning(pop) +#endif + #ifdef __cplusplus } #endif diff --git a/deps/uv/src/unix/fs.c b/deps/uv/src/unix/fs.c index 085970a06d9..f5a02d03c30 100644 --- a/deps/uv/src/unix/fs.c +++ b/deps/uv/src/unix/fs.c @@ -46,6 +46,9 @@ #include #include +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Warray-bounds" + #if defined(__DragonFly__) || \ defined(__FreeBSD__) || \ defined(__OpenBSD__) || \ @@ -1322,3 +1325,4 @@ void uv_fs_req_cleanup(uv_fs_t* req) { uv__free(req->ptr); req->ptr = NULL; } +#pragma clang diagnostic pop diff --git a/deps/uv/src/unix/getaddrinfo.c b/deps/uv/src/unix/getaddrinfo.c index 2049aea2f38..66be10b5b59 100644 --- a/deps/uv/src/unix/getaddrinfo.c +++ b/deps/uv/src/unix/getaddrinfo.c @@ -88,7 +88,7 @@ int uv__getaddrinfo_translate_error(int sys_err) { case EAI_SYSTEM: return -errno; #endif } - assert(!"unknown EAI_* error code"); + assert(!(int)("unknown EAI_* error code")); abort(); return 0; /* Pacify compiler. */ } diff --git a/deps/uv/src/unix/kqueue.c b/deps/uv/src/unix/kqueue.c index fffd4626f17..a310dee0ff7 100644 --- a/deps/uv/src/unix/kqueue.c +++ b/deps/uv/src/unix/kqueue.c @@ -36,6 +36,8 @@ static void uv__fs_event(uv_loop_t* loop, uv__io_t* w, unsigned int fflags); +typedef int (*keventfunc_t)(int kq, const struct kevent *changelist, int nchanges, + struct kevent *eventlist, int nevents, const struct timespec *timeout); int uv__kqueue_init(uv_loop_t* loop) { loop->backend_fd = kqueue(); @@ -87,6 +89,8 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { int op; int i; + keventfunc_t keventfunc = loop->keventfunc ? (keventfunc_t)loop->keventfunc : &kevent; + if (loop->nfds == 0) { assert(QUEUE_EMPTY(&loop->watcher_queue)); return; @@ -119,7 +123,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { EV_SET(events + nevents, w->fd, filter, op, fflags, 0, 0); if (++nevents == ARRAY_SIZE(events)) { - if (kevent(loop->backend_fd, events, nevents, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, events, nevents, NULL, 0, NULL)) abort(); nevents = 0; } @@ -129,7 +133,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { EV_SET(events + nevents, w->fd, EVFILT_WRITE, EV_ADD, 0, 0, 0); if (++nevents == ARRAY_SIZE(events)) { - if (kevent(loop->backend_fd, events, nevents, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, events, nevents, NULL, 0, NULL)) abort(); nevents = 0; } @@ -158,7 +162,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { if (pset != NULL) pthread_sigmask(SIG_BLOCK, pset, NULL); - nfds = kevent(loop->backend_fd, + nfds = keventfunc(loop->backend_fd, events, nevents, events, @@ -175,7 +179,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { SAVE_ERRNO(uv__update_time(loop)); if (nfds == 0) { - assert(timeout != -1); + //assert(timeout != -1); return; } @@ -213,7 +217,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { struct kevent events[1]; EV_SET(events + 0, fd, ev->filter, EV_DELETE, 0, 0, 0); - if (kevent(loop->backend_fd, events, 1, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, events, 1, NULL, 0, NULL)) if (errno != EBADF && errno != ENOENT) abort(); @@ -238,7 +242,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { /* TODO batch up */ struct kevent events[1]; EV_SET(events + 0, fd, ev->filter, EV_DELETE, 0, 0, 0); - if (kevent(loop->backend_fd, events, 1, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, events, 1, NULL, 0, NULL)) if (errno != ENOENT) abort(); } @@ -252,7 +256,7 @@ void uv__io_poll(uv_loop_t* loop, int timeout) { /* TODO batch up */ struct kevent events[1]; EV_SET(events + 0, fd, ev->filter, EV_DELETE, 0, 0, 0); - if (kevent(loop->backend_fd, events, 1, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, events, 1, NULL, 0, NULL)) if (errno != ENOENT) abort(); } @@ -342,7 +346,8 @@ static void uv__fs_event(uv_loop_t* loop, uv__io_t* w, unsigned int fflags) { /* MAXPATHLEN == PATH_MAX but the former is what XNU calls it internally. */ char pathbuf[MAXPATHLEN]; #endif - + keventfunc_t keventfunc = loop->keventfunc ? (keventfunc_t)loop->keventfunc : &kevent; + handle = container_of(w, uv_fs_event_t, event_watcher); if (fflags & (NOTE_ATTRIB | NOTE_EXTEND)) @@ -370,7 +375,7 @@ static void uv__fs_event(uv_loop_t* loop, uv__io_t* w, unsigned int fflags) { EV_SET(&ev, w->fd, EVFILT_VNODE, EV_ADD | EV_ONESHOT, fflags, 0, 0); - if (kevent(loop->backend_fd, &ev, 1, NULL, 0, NULL)) + if (keventfunc(loop->backend_fd, &ev, 1, NULL, 0, NULL)) abort(); } diff --git a/deps/uv/src/unix/loop.c b/deps/uv/src/unix/loop.c index 92e96f09ed0..df1fc7f2d7f 100644 --- a/deps/uv/src/unix/loop.c +++ b/deps/uv/src/unix/loop.c @@ -56,6 +56,8 @@ int uv_loop_init(uv_loop_t* loop) { loop->backend_fd = -1; loop->emfile_fd = -1; + loop->keventfunc = NULL; + loop->timer_counter = 0; loop->stop_flag = 0; diff --git a/deps/uv/src/unix/proctitle.c b/deps/uv/src/unix/proctitle.c index 19214e5ec97..ed6f9d906b1 100644 --- a/deps/uv/src/unix/proctitle.c +++ b/deps/uv/src/unix/proctitle.c @@ -35,6 +35,14 @@ static struct { char** uv_setup_args(int argc, char** argv) { + if (argc > 0) { + process_title.len = strlen(argv[0]); + process_title.str = uv__malloc(process_title.len + 1); + memcpy(process_title.str, argv[0], process_title.len + 1); + } + return argv; +#if 0 + char** new_argv; size_t size; char* s; @@ -71,6 +79,7 @@ char** uv_setup_args(int argc, char** argv) { new_argv[i] = NULL; return new_argv; +#endif } diff --git a/deps/uv/src/uv-common.c b/deps/uv/src/uv-common.c index ba26446915a..5a3c8a36258 100644 --- a/deps/uv/src/uv-common.c +++ b/deps/uv/src/uv-common.c @@ -578,19 +578,43 @@ int uv_loop_configure(uv_loop_t* loop, uv_loop_option option, ...) { } +static uv_key_t thread_ctx_key; +static int thread_ctx_initiated = 0; +static int g_worker_support = 0; + static uv_loop_t default_loop_struct; static uv_loop_t* default_loop_ptr; +void uv_init_nw(int worker_support) { + g_worker_support = worker_support; +} uv_loop_t* uv_default_loop(void) { - if (default_loop_ptr != NULL) + if (!g_worker_support) { + if (default_loop_ptr != NULL) + return default_loop_ptr; + if (uv_loop_init(&default_loop_struct)) + return NULL; + default_loop_ptr = &default_loop_struct; return default_loop_ptr; + } else { + uv_loop_t* loop; + if (!thread_ctx_initiated) { + thread_ctx_initiated = 1; + uv_key_create(&thread_ctx_key); + } + loop = (uv_loop_t*)uv_key_get(&thread_ctx_key); + if (loop != NULL) + return loop; - if (uv_loop_init(&default_loop_struct)) - return NULL; + loop = malloc(sizeof(uv_loop_t)); + memset(loop, 0, sizeof(uv_loop_t)); + if (uv_loop_init(loop)) + return NULL; - default_loop_ptr = &default_loop_struct; - return default_loop_ptr; + uv_key_set(&thread_ctx_key, loop); + return loop; + } } @@ -628,8 +652,15 @@ int uv_loop_close(uv_loop_t* loop) { #ifndef NDEBUG memset(loop, -1, sizeof(*loop)); #endif - if (loop == default_loop_ptr) - default_loop_ptr = NULL; + if (g_worker_support) { + uv_loop_t* default_loop_ptr; + default_loop_ptr = (uv_loop_t*)uv_key_get(&thread_ctx_key); + if (loop == default_loop_ptr) + uv_key_set(&thread_ctx_key, NULL); + } else { + if (loop == default_loop_ptr) + default_loop_ptr = NULL; + } return 0; } @@ -639,7 +670,10 @@ void uv_loop_delete(uv_loop_t* loop) { uv_loop_t* default_loop; int err; - default_loop = default_loop_ptr; + if (g_worker_support) + default_loop = (uv_loop_t*)uv_key_get(&thread_ctx_key); + else + default_loop = default_loop_ptr; err = uv_loop_close(loop); (void) err; /* Squelch compiler warnings. */ diff --git a/deps/uv/src/win/async.c b/deps/uv/src/win/async.c index ad240ab8972..11cce85512f 100644 --- a/deps/uv/src/win/async.c +++ b/deps/uv/src/win/async.c @@ -84,6 +84,24 @@ int uv_async_send(uv_async_t* handle) { } +int uv_async_send_nw(uv_async_t* handle) { + uv_loop_t* loop = handle->loop; + + if (handle->type != UV_ASYNC) { + /* Can't set errno because that's not thread-safe. */ + return -1; + } + + /* The user should make sure never to call uv_async_send to a closing */ + /* or closed handle. */ + assert(!(handle->flags & UV__HANDLE_CLOSING)); + + POST_COMPLETION_FOR_REQ(loop, &handle->async_req); + + return 0; +} + + void uv_process_async_wakeup_req(uv_loop_t* loop, uv_async_t* handle, uv_req_t* req) { assert(handle->type == UV_ASYNC); diff --git a/deps/uv/src/win/core.c b/deps/uv/src/win/core.c index ba306ebc083..def23a2a544 100644 --- a/deps/uv/src/win/core.c +++ b/deps/uv/src/win/core.c @@ -98,7 +98,7 @@ static void uv_init(void) { * FDs even though they return the proper error code in the release build. */ #if defined(_DEBUG) && (defined(_MSC_VER) || defined(__MINGW64_VERSION_MAJOR)) - _CrtSetReportHook(uv__crt_dbg_report_handler); + //_CrtSetReportHook(uv__crt_dbg_report_handler); #endif /* Fetch winapi function pointers. This must be done first because other @@ -374,10 +374,11 @@ int uv_run(uv_loop_t *loop, uv_run_mode mode) { int r; int ran_pending; void (*poll)(uv_loop_t* loop, DWORD timeout); - +#if 1 if (pGetQueuedCompletionStatusEx) poll = &uv_poll_ex; else +#endif poll = &uv_poll; r = uv__loop_alive(loop); diff --git a/deps/uv/src/win/process.c b/deps/uv/src/win/process.c index 855c3740816..792ab0ffe63 100644 --- a/deps/uv/src/win/process.c +++ b/deps/uv/src/win/process.c @@ -1075,6 +1075,8 @@ int uv_spawn(uv_loop_t* loop, process_flags |= DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP; } + process_flags |= CREATE_NO_WINDOW; + if (!CreateProcessW(application_path, arguments, NULL, diff --git a/deps/uv/src/win/req-inl.h b/deps/uv/src/win/req-inl.h index b5e502eef55..17425870148 100644 --- a/deps/uv/src/win/req-inl.h +++ b/deps/uv/src/win/req-inl.h @@ -81,6 +81,7 @@ INLINE static void uv_req_init(uv_loop_t* loop, uv_req_t* req) { req->type = UV_UNKNOWN_REQ; + req->next_req = NULL; SET_REQ_SUCCESS(req); } @@ -91,7 +92,8 @@ INLINE static uv_req_t* uv_overlapped_to_req(OVERLAPPED* overlapped) { INLINE static void uv_insert_pending_req(uv_loop_t* loop, uv_req_t* req) { - req->next_req = NULL; + if (req->next_req) + return; if (loop->pending_reqs_tail) { #ifdef _DEBUG /* Ensure the request is not already in the queue, or the queue @@ -156,6 +158,7 @@ INLINE static int uv_process_reqs(uv_loop_t* loop) { while (next != NULL) { req = next; next = req->next_req != first ? req->next_req : NULL; + req->next_req = NULL; switch (req->type) { case UV_READ: diff --git a/deps/uv/uv.gyp b/deps/uv/uv.gyp index 2fdd59ac784..4c4603ce1ea 100644 --- a/deps/uv/uv.gyp +++ b/deps/uv/uv.gyp @@ -1,4 +1,7 @@ { + 'variables': { + 'uv_library%': 'static_library', + }, 'target_defaults': { 'conditions': [ ['OS != "win"', { @@ -18,9 +21,10 @@ ], 'xcode_settings': { 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden - 'WARNING_CFLAGS': [ '-Wall', '-Wextra', '-Wno-unused-parameter' ], + 'WARNING_CFLAGS': [ '-Wall', '-Wextra', '-Wno-unused-parameter' , '-Wno-error=gnu-folding-constant', '-Wno-varargs'], 'OTHER_CFLAGS': [ '-g', '--std=gnu89', '-pedantic' ], - } + }, + 'msvs_disabled_warnings': [4267, 4477], }, 'targets': [ @@ -31,6 +35,7 @@ 'include', 'src/', ], + 'defines': [ 'BUILDING_UV_SHARED=1' ], 'direct_dependent_settings': { 'include_dirs': [ 'include' ], 'conditions': [ @@ -126,6 +131,7 @@ '-Wall', '-Wextra', '-Wno-unused-parameter', + '-Wno-varargs', ], 'sources': [ 'include/uv-unix.h', @@ -271,7 +277,7 @@ [ 'OS in "ios mac freebsd dragonflybsd openbsd netbsd".split()', { 'sources': [ 'src/unix/kqueue.c' ], }], - ['uv_library=="shared_library"', { + ['component=="shared_library"', { 'defines': [ 'BUILDING_UV_SHARED=1' ] }], ] diff --git a/lib/dummystream.js b/lib/dummystream.js new file mode 100644 index 00000000000..e7a6efbe528 --- /dev/null +++ b/lib/dummystream.js @@ -0,0 +1,17 @@ +module.exports = DummyStream; + +var W = require('_stream_writable'); +var util = require('util'); +util.inherits(DummyStream, W); + +function DummyStream() { + W.apply(this, arguments); + this.buffer = []; + this.written = 0; +} + +DummyStream.prototype._write = function(chunk, encoding, cb) { + this.buffer.push(chunk.toString()); + this.written += chunk.length; + cb(); +}; diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 8b8d066ab03..bf9fa1fec41 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -29,7 +29,10 @@ setupGlobalVariables(); if (!process._noBrowserGlobals) { setupGlobalTimeouts(); + if (process.__nwjs) { + }else{ setupGlobalConsole(); + } } const _process = NativeModule.require('internal/process'); @@ -57,6 +60,20 @@ }); process.argv[0] = process.execPath; + if (process.__nwjs) { + var Module = NativeModule.require('module'); + var module = new Module('.', null); + global.process.mainModule = module; + module._compile('global.module = module;\n' + + 'global.require = global.__nw_require = require;\n', 'nw-emulate-node'); + if (process.argv[1]) { + var path = NativeModule.require('path'); + process.argv[1] = path.resolve(process.argv[1]); + preloadModules(); + Module.runMain(); + } + return; + } // There are various modes that Node can run in. The most common two // are running from a script and running the REPL - but there are a few // others like the debugger or running --eval arguments. Here we decide @@ -286,15 +303,6 @@ // If someone handled it, then great. otherwise, die in C++ land // since that means that we'll exit the process, emit the 'exit' event if (!caught) { - try { - if (!process._exiting) { - process._exiting = true; - process.emit('exit', 1); - } - } catch (er) { - // nothing to be done about it at this point. - } - // if we handled an error, then make sure any ticks get processed } else { NativeModule.require('timers').setImmediate(process._tickCallback); diff --git a/lib/internal/process/stdio.js b/lib/internal/process/stdio.js index c8f36c5df60..48352e225c1 100644 --- a/lib/internal/process/stdio.js +++ b/lib/internal/process/stdio.js @@ -7,7 +7,10 @@ function setupStdio() { function getStdout() { if (stdout) return stdout; - stdout = createWritableStdioStream(1); + if (process.platform === 'win32') + stdout = createWritableDummyStream(1); + else + stdout = createWritableStdioStream(1); stdout.destroy = stdout.destroySoon = function(er) { er = er || new Error('process.stdout cannot be closed.'); stdout.emit('error', er); @@ -20,7 +23,10 @@ function setupStdio() { function getStderr() { if (stderr) return stderr; - stderr = createWritableStdioStream(2); + if (process.platform === 'win32') + stderr = createWritableDummyStream(2); + else + stderr = createWritableStdioStream(2); stderr.destroy = stderr.destroySoon = function(er) { er = er || new Error('process.stderr cannot be closed.'); stderr.emit('error', er); @@ -131,6 +137,18 @@ function setupStdio() { }; } +function createWritableDummyStream(fd) { + var DummyStream = require('dummystream'); + var stream = new DummyStream(); + stream.fd = fd; + + stream._isStdio = true; + stream.isTTY = false; + + return stream; + +} + function createWritableStdioStream(fd) { var stream; const tty_wrap = process.binding('tty_wrap'); diff --git a/lib/module.js b/lib/module.js index 2d886d8a52e..90d628d67b5 100644 --- a/lib/module.js +++ b/lib/module.js @@ -11,6 +11,19 @@ const internalModuleReadFile = process.binding('fs').internalModuleReadFile; const internalModuleStat = process.binding('fs').internalModuleStat; const preserveSymlinks = !!process.binding('config').preserveSymlinks; +function extensionUrl(str) { + if (typeof str !== 'string') { + throw new Error('Expected a string'); + } + + var pathName = path.resolve(str).replace(/\\/g, '/'); + + var relative = path.relative(global.__dirname, pathName); + + var ret = encodeURI('chrome-extension://' + global.__nwjs_ext_id + '/' + relative); + return ret; +}; + // If obj.hasOwnProperty has been overridden, then calling // obj.hasOwnProperty(prop) will break. // See: https://github.com/joyent/node/issues/1707 @@ -81,6 +94,12 @@ function readPackage(requestPath) { } const jsonPath = path.resolve(requestPath, 'package.json'); + if (global.__nwjs_cv) { + var url = extensionUrl(jsonPath); + var request = new global.XMLHttpRequest(); + request.open('GET', url, false); + request.send(null); + } const json = internalModuleReadFile(path._makeLong(jsonPath)); if (json === undefined) { @@ -434,6 +453,11 @@ Module._load = function(request, parent, isMain) { if (isMain) { process.mainModule = module; module.id = '.'; + if (process.__nwjs) { + // require() in DOM needs this module as parent + module._compile('global.module = module;\n' + + 'global.require = require;\n', 'nw-emulate-node'); + } } Module._cache[filename] = module; @@ -578,6 +602,12 @@ Module.prototype._compile = function(content, filename) { // Native extension for .js Module._extensions['.js'] = function(module, filename) { + if (global.__nwjs_cv) { + var url = extensionUrl(filename); + var request = new global.XMLHttpRequest(); + request.open('GET', url, false); + request.send(null); + } var content = fs.readFileSync(filename, 'utf8'); module._compile(internalModule.stripBOM(content), filename); }; @@ -585,6 +615,12 @@ Module._extensions['.js'] = function(module, filename) { // Native extension for .json Module._extensions['.json'] = function(module, filename) { + if (global.__nwjs_cv) { + var url = extensionUrl(filename); + var request = new global.XMLHttpRequest(); + request.open('GET', url, false); + request.send(null); + } var content = fs.readFileSync(filename, 'utf8'); try { module.exports = JSON.parse(internalModule.stripBOM(content)); @@ -597,6 +633,12 @@ Module._extensions['.json'] = function(module, filename) { //Native extension for .node Module._extensions['.node'] = function(module, filename) { + if (global.__nwjs_cv) { + var url = extensionUrl(filename); + var request = new global.XMLHttpRequest(); + request.open('GET', url, false); + request.send(null); + } return process.dlopen(module, path._makeLong(filename)); }; diff --git a/lib/url.js b/lib/url.js index 201ebfedcc5..18a60c8b8f6 100644 --- a/lib/url.js +++ b/lib/url.js @@ -1,11 +1,11 @@ 'use strict'; function importPunycode() { - try { - return process.binding('icu'); - } catch (e) { + // try { + // return process.binding('icu'); + // } catch (e) { return require('punycode'); - } +// } } const { toASCII } = importPunycode(); diff --git a/node.gyp b/node.gyp index fa98547e09b..1c628753e92 100644 --- a/node.gyp +++ b/node.gyp @@ -1,5 +1,6 @@ { 'variables': { + 'icu_gyp_path%': '../icu/icu.gyp', 'v8_use_snapshot%': 'false', 'node_use_dtrace%': 'false', 'node_use_lttng%': 'false', @@ -7,19 +8,31 @@ 'node_use_perfctr%': 'false', 'node_no_browser_globals%': 'false', 'node_use_v8_platform%': 'true', - 'node_use_bundled_v8%': 'true', - 'node_shared%': 'false', 'force_dynamic_crt%': 0, + 'node_use_bundled_v8': 'false', + 'node_shared': 'true', + 'v8_inspector': 'false', + 'node_enable_d8': 'false', 'node_module_version%': '', 'node_shared_zlib%': 'false', 'node_shared_http_parser%': 'false', 'node_shared_cares%': 'false', 'node_shared_libuv%': 'false', - 'node_use_openssl%': 'true', - 'node_shared_openssl%': 'false', + 'node_use_openssl': 'true', + 'node_shared_openssl': 'false', + 'openssl_fips': '', 'node_v8_options%': '', 'node_enable_v8_vtunejit%': 'false', 'node_core_target_name%': 'node', + 'node_target_type%': 'shared_library', + 'node_tag%': '', + 'node_release_urlbase%': '', + 'node_byteorder%': 'little', + 'python%': 'python', + 'icu_small%': 'false', + 'v8_postmortem_support%' : 'false', + 'V8_BASE%': '<(PRODUCT_DIR)/../nw/obj/v8/libv8_libbase.a', + 'V8_PLTFRM%': '<(PRODUCT_DIR)/../nw/obj/v8/libv8_libplatform.a', 'library_files': [ 'lib/internal/bootstrap_node.js', 'lib/_debug_agent.js', @@ -34,6 +47,7 @@ 'lib/dgram.js', 'lib/dns.js', 'lib/domain.js', + 'lib/dummystream.js', 'lib/events.js', 'lib/fs.js', 'lib/http.js', @@ -122,22 +136,51 @@ ], }, + 'includes': [ + '../../build/util/version.gypi', + ], + 'targets': [ { 'target_name': '<(node_core_target_name)', 'type': '<(node_target_type)', - 'dependencies': [ 'node_js2c#host', + ##'../../v8/tools/gyp/v8.gyp:v8', + #'../../v8/src/v8.gyp:v8_libplatform', + #'../../chrome/chrome.gyp:chrome_dll', ], + 'msvs_disabled_warnings': [4146, 4267, 4003, 4065, 4477], + + 'xcode_settings': { + 'WARNING_CFLAGS': [ '-Wno-error=deprecated-declarations' ], + 'LD_RUNPATH_SEARCH_PATHS': [ '@loader_path/../../../../../../..', ], + }, + 'include_dirs': [ 'src', + 'deps/openssl/openssl/include', + #'../boringssl/src/include', 'tools/msvs/genfiles', 'deps/uv/src/ares', '<(SHARED_INTERMEDIATE_DIR)', # for node_natives.h + '../../v8', # include/v8_platform.h + '../../v8/include' ], + 'direct_dependent_settings': { + 'include_dirs': [ + '../../v8/include', + 'deps/uv/include', + 'deps/cares/include', + ], + 'defines': [ + 'BUILDING_NW_NODE=1', + ], + + }, + 'sources': [ 'src/debug-agent.cc', 'src/async-wrap.cc', @@ -220,8 +263,8 @@ 'src/util.cc', 'src/string_search.cc', 'deps/http_parser/http_parser.h', - 'deps/v8/include/v8.h', - 'deps/v8/include/v8-debug.h', + #'deps/v8/include/v8.h', + #'deps/v8/include/v8-debug.h', '<(SHARED_INTERMEDIATE_DIR)/node_natives.h', # javascript files to make for an even more pleasant IDE experience '<@(library_files)', @@ -235,6 +278,10 @@ 'NODE_WANT_INTERNALS=1', # Warn when using deprecated V8 APIs. 'V8_DEPRECATION_WARNINGS=1', + 'BUILDING_NW_NODE=1', + 'V8_SHARED', + 'USING_V8_SHARED', + 'V8_USE_EXTERNAL_STARTUP_DATA' ], @@ -261,8 +308,8 @@ }], [ 'node_use_bundled_v8=="true"', { 'dependencies': [ - 'deps/v8/src/v8.gyp:v8', - 'deps/v8/src/v8.gyp:v8_libplatform' + #'deps/v8/src/v8.gyp:v8', + #'deps/v8/src/v8.gyp:v8_libplatform' ], }], [ 'node_use_v8_platform=="true"', { @@ -291,11 +338,22 @@ 'NODE_RELEASE_URLBASE="<(node_release_urlbase)"', ] }], + ['node_target_type=="shared_library"', { + 'direct_dependent_settings': { + 'defines': [ + 'USING_UV_SHARED=1', + 'BUILDING_NODE_EXTENSION=1', + ], + }, + }], + ['clang==1', { + 'cflags': ['-Wno-error=missing-declarations', '-Wno-error=array-bounds'], + }], [ 'v8_enable_i18n_support==1', { 'defines': [ 'NODE_HAVE_I18N_SUPPORT=1' ], 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', + '../icu/icu.gyp:icui18n', + '../icu/icu.gyp:icuuc', ], 'conditions': [ [ 'icu_small=="true"', { @@ -353,9 +411,9 @@ [ 'node_shared_openssl=="false"', { 'dependencies': [ './deps/openssl/openssl.gyp:openssl', - + #'../boringssl/boringssl.gyp:boringssl', # For tests - './deps/openssl/openssl.gyp:openssl-cli', + #'./deps/openssl/openssl.gyp:openssl-cli', ], # Do not let unused OpenSSL symbols to slip away 'conditions': [ @@ -364,16 +422,16 @@ [ 'node_target_type!="static_library"', { 'xcode_settings': { 'OTHER_LDFLAGS': [ - '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)', + #'-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)', ], }, 'conditions': [ ['OS in "linux freebsd" and node_shared=="false"', { 'ldflags': [ - '-Wl,--whole-archive,' - '<(PRODUCT_DIR)/obj.target/deps/openssl/' - '<(OPENSSL_PRODUCT)', - '-Wl,--no-whole-archive', + #'-Wl,--whole-archive,' + # '<(PRODUCT_DIR)/obj.target/deps/openssl/' + # '<(OPENSSL_PRODUCT)', + #'-Wl,--no-whole-archive', ], }], # openssl.def is based on zlib.def, zlib symbols @@ -466,7 +524,7 @@ 'defines': [ 'NODE_NO_BROWSER_GLOBALS' ], } ], [ 'node_use_bundled_v8=="true" and v8_postmortem_support=="true"', { - 'dependencies': [ 'deps/v8/src/v8.gyp:postmortem-metadata' ], + 'dependencies': [ '../../v8/src/v8.gyp:postmortem-metadata' ], 'conditions': [ # -force_load is not applicable for the static library [ 'node_target_type!="static_library"', { @@ -497,7 +555,7 @@ [ 'OS=="win"', { 'sources': [ 'src/backtrace_win32.cc', - 'src/res/node.rc', + #'src/res/node.rc', ], 'defines!': [ 'NODE_PLATFORM="win"', @@ -508,11 +566,14 @@ 'NODE_PLATFORM="win32"', '_UNICODE=1', ], - 'libraries': [ '-lpsapi.lib' ] + 'libraries': [ '-lpsapi.lib', '<(PRODUCT_DIR)/../nw/obj/v8/v8_libbase.lib', '<(PRODUCT_DIR)/../nw/obj/v8/v8_libplatform.lib', '<(PRODUCT_DIR)/../nw/nw.dll.lib' ] }, { # POSIX 'defines': [ '__POSIX__' ], 'sources': [ 'src/backtrace_posix.cc' ], }], + [ 'OS=="win" and component=="shared_library"', { + 'libraries': [ '<(PRODUCT_DIR)/../nw/v8.dll.lib' ] + }], [ 'OS=="mac"', { # linking Corefoundation is needed since certain OSX debugging tools # like Instruments require it for some features @@ -524,6 +585,18 @@ # we need to use node's preferred "darwin" rather than gyp's preferred "mac" 'NODE_PLATFORM="darwin"', ], + 'postbuilds': [ + { + 'postbuild_name': 'Fix Framework Link', + 'action': [ + 'install_name_tool', + '-change', + '@executable_path/../Versions/<(version_full)/<(mac_product_name) Framework.framework/<(mac_product_name) Framework', + '@executable_path/../../../<(mac_product_name) Framework.framework/<(mac_product_name) Framework', + '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}' + ], + }, + ], }], [ 'OS=="freebsd"', { 'libraries': [ @@ -550,10 +623,61 @@ 'NODE_PLATFORM="sunos"', ], }], - [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"', { - 'ldflags': [ '-Wl,-z,noexecstack', - '-Wl,--whole-archive <(V8_BASE)', - '-Wl,--no-whole-archive' ] + [ 'OS=="linux"', { + 'cflags': [ "-Wno-unused-result" ], + }], + [ 'OS=="linux" and component == "shared_library"', { + 'ldflags': [ '-L<(PRODUCT_DIR)/../nw/lib/', '-lv8', + '-Wl,--whole-archive <(V8_BASE)', + '<(V8_PLTFRM)', + '-Wl,--no-whole-archive' ] + }], + [ 'OS=="linux" and component != "shared_library"', { + 'ldflags': [ '-L<(PRODUCT_DIR)/../nw/lib/', '-lnw', + '-Wl,--whole-archive <(V8_BASE)', + '<(V8_PLTFRM)', + '-Wl,--no-whole-archive' ] + }], + [ 'OS=="mac" and component == "shared_library"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-L<(PRODUCT_DIR)/../nw/', '-lv8', + '<(PRODUCT_DIR)/../nw/nwjs\ Framework.framework/nwjs\ Framework', + '-Wl,-force_load <(V8_BASE)', + '-Wl,-force_load <(V8_PLTFRM)', + ], + }, + 'postbuilds': [ + { + 'postbuild_name': 'Fix iculib Link', + 'action': [ + 'install_name_tool', + '-change', + '/usr/local/lib/libicuuc.dylib', + '@rpath/libicuuc.dylib', + '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}' + ], + }, + { + 'postbuild_name': 'Fix iculib Link2', + 'action': [ + 'install_name_tool', + '-change', + '/usr/local/lib/libicui18n.dylib', + '@rpath/libicui18n.dylib', + '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}' + ], + }, + ], + }], + [ 'OS=="mac" and component != "shared_library"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '<(PRODUCT_DIR)/../nw/nwjs\ Framework.framework/nwjs\ Framework', + '-Wl,-force_load <(V8_BASE)', + '-Wl,-force_load <(V8_PLTFRM)', + ], + }, }], [ 'OS=="sunos"', { 'ldflags': [ '-Wl,-M,/usr/lib/ld/map.noexstk' ], @@ -856,10 +980,14 @@ { 'target_name': 'cctest', 'type': 'executable', - 'dependencies': [ 'deps/gtest/gtest.gyp:gtest' ], + 'dependencies': [ + #'../../testing/gtest.gyp:gtest', + '../../v8/src/v8.gyp:v8', + '../../v8/src/v8.gyp:v8_libplatform' + ], 'include_dirs': [ 'src', - 'deps/v8/include' + '../../v8/include' ], 'defines': [ # gtest's ASSERT macros conflict with our own. @@ -901,13 +1029,13 @@ }], [ 'node_use_v8_platform=="true"', { 'dependencies': [ - 'deps/v8/src/v8.gyp:v8_libplatform', + #'deps/v8/src/v8.gyp:v8_libplatform', ], }], [ 'node_use_bundled_v8=="true"', { 'dependencies': [ - 'deps/v8/src/v8.gyp:v8', - 'deps/v8/src/v8.gyp:v8_libplatform' + #'deps/v8/src/v8.gyp:v8', + #'deps/v8/src/v8.gyp:v8_libplatform' ], }], ] diff --git a/src/env.cc b/src/env.cc index 8efe13816c0..638fd25a3d3 100644 --- a/src/env.cc +++ b/src/env.cc @@ -152,4 +152,27 @@ void Environment::PrintSyncTrace() const { fflush(stderr); } + +bool Environment::KickNextTick(Environment::AsyncCallbackScope* scope) { + TickInfo* info = tick_info(); + + if (scope->in_makecallback()) { + return true; + } + + if (info->length() == 0) { + isolate()->RunMicrotasks(); + } + + if (info->length() == 0) { + info->set_index(0); + return true; + } + + Local ret = + tick_callback_function()->Call(process_object(), 0, nullptr); + + return !ret.IsEmpty(); +} + } // namespace node diff --git a/src/env.h b/src/env.h index c040c1a7533..0d50b6b439a 100644 --- a/src/env.h +++ b/src/env.h @@ -463,6 +463,8 @@ class Environment { inline int64_t get_async_wrap_uid(); + bool KickNextTick(AsyncCallbackScope* scope); + inline uint32_t* heap_statistics_buffer() const; inline void set_heap_statistics_buffer(uint32_t* pointer); diff --git a/src/node.cc b/src/node.cc index be9cd6442c5..23d22b7570c 100644 --- a/src/node.cc +++ b/src/node.cc @@ -8,6 +8,11 @@ #include "node_internals.h" #include "node_revert.h" +#include + +#include +#include "node_webkit.h" + #if defined HAVE_PERFCTR #include "node_counters.h" #endif @@ -94,6 +99,44 @@ typedef int mode_t; extern char **environ; #endif +NODE_MODULE_REF(cares_wrap) +NODE_MODULE_REF(fs_event_wrap) +NODE_MODULE_REF(stream_wrap) +NODE_MODULE_REF(buffer) +NODE_MODULE_REF(config) +NODE_MODULE_REF(contextify) +NODE_MODULE_REF(crypto) +NODE_MODULE_REF(fs) +NODE_MODULE_REF(http_parser) +NODE_MODULE_REF(os) +NODE_MODULE_REF(v8) +NODE_MODULE_REF(zlib) +NODE_MODULE_REF(pipe_wrap) +NODE_MODULE_REF(process_wrap) +NODE_MODULE_REF(signal_wrap) +NODE_MODULE_REF(spawn_sync) +NODE_MODULE_REF(tcp_wrap) +NODE_MODULE_REF(timer_wrap) +NODE_MODULE_REF(tls_wrap) +NODE_MODULE_REF(tty_wrap) +NODE_MODULE_REF(udp_wrap) +NODE_MODULE_REF(util) +NODE_MODULE_REF(uv) + +static uv_key_t thread_ctx_key; +static int thread_ctx_created = 0; +static int g_worker_support = 0; + +struct thread_ctx_st { + node::Environment* env; + node::node_module* modpending; + node::node_module* modlist_builtin; + node::node_module* modlist_linked; + node::node_module* modlist_addon; + int handle_counter; + int quit_flag; +}; + namespace node { using v8::Array; @@ -155,10 +198,15 @@ static int v8_thread_pool_size = v8_default_thread_pool_size; static bool prof_process = false; static bool v8_is_profiling = false; static bool node_is_initialized = false; -static node_module* modpending; -static node_module* modlist_builtin; -static node_module* modlist_linked; -static node_module* modlist_addon; +static bool node_is_nwjs = false; + + + +NODE_EXTERN v8::Persistent g_context; +NODE_EXTERN v8::Persistent g_dom_context; +static UVRunFn g_nw_uv_run = nullptr; +static NWTickCallback g_nw_tick_callback = nullptr; +static const char* g_native_blob_path = nullptr; #if defined(NODE_HAVE_I18N_SUPPORT) // Path to ICU data (for i18n / Intl) @@ -989,6 +1037,7 @@ void* ArrayBufferAllocator::Allocate(size_t size) { return node::UncheckedMalloc(size); } +#if 0 static bool DomainHasErrorHandler(const Environment* env, const Local& domain) { HandleScope scope(env->isolate()); @@ -1034,8 +1083,10 @@ static bool DomainsStackHasErrorHandler(const Environment* env) { return false; } +#endif +#if 0 static bool ShouldAbortOnUncaughtException(Isolate* isolate) { HandleScope scope(isolate); @@ -1048,7 +1099,7 @@ static bool ShouldAbortOnUncaughtException(Isolate* isolate) { return isEmittingTopLevelDomainError || !DomainsStackHasErrorHandler(env); } - +#endif void SetupDomainUse(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -1158,7 +1209,7 @@ void SetupPromises(const FunctionCallbackInfo& args) { CHECK(args[0]->IsFunction()); - isolate->SetPromiseRejectCallback(PromiseRejectCallback); + //isolate->SetPromiseRejectCallback(PromiseRejectCallback); env->set_promise_reject_function(args[0].As()); env->process_object()->Delete( @@ -1258,23 +1309,7 @@ Local MakeCallback(Environment* env, } } - if (callback_scope.in_makecallback()) { - return ret; - } - - Environment::TickInfo* tick_info = env->tick_info(); - - if (tick_info->length() == 0) { - env->isolate()->RunMicrotasks(); - } - - Local process = env->process_object(); - - if (tick_info->length() == 0) { - tick_info->set_index(0); - } - - if (env->tick_callback_function()->Call(process, 0, nullptr).IsEmpty()) { + if (!env->KickNextTick(&callback_scope)) { return Undefined(env->isolate()); } @@ -2320,26 +2355,42 @@ void CPUUsage(const FunctionCallbackInfo& args) { } extern "C" void node_module_register(void* m) { - struct node_module* mp = reinterpret_cast(m); + struct node_module* mp; + if (g_worker_support) { + mp = (struct node_module*)malloc(sizeof(struct node_module)); + memcpy(mp, m, sizeof(struct node_module)); + } else + mp = reinterpret_cast(m); + if (!thread_ctx_created) { + thread_ctx_created = 1; + uv_key_create(&thread_ctx_key); + } + thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (!tls_ctx) { + tls_ctx = (struct thread_ctx_st*)malloc(sizeof(struct thread_ctx_st)); + memset(tls_ctx, 0, sizeof(struct thread_ctx_st)); + uv_key_set(&thread_ctx_key, tls_ctx); + } if (mp->nm_flags & NM_F_BUILTIN) { - mp->nm_link = modlist_builtin; - modlist_builtin = mp; + mp->nm_link = tls_ctx->modlist_builtin; + tls_ctx->modlist_builtin = mp; } else if (!node_is_initialized) { // "Linked" modules are included as part of the node project. // Like builtins they are registered *before* node::Init runs. mp->nm_flags = NM_F_LINKED; - mp->nm_link = modlist_linked; - modlist_linked = mp; + mp->nm_link = tls_ctx->modlist_linked; + tls_ctx->modlist_linked = mp; } else { - modpending = mp; + tls_ctx->modpending = mp; } } struct node_module* get_builtin_module(const char* name) { struct node_module* mp; + thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); - for (mp = modlist_builtin; mp != nullptr; mp = mp->nm_link) { + for (mp = tls_ctx->modlist_builtin; mp != nullptr; mp = mp->nm_link) { if (strcmp(mp->nm_modname, name) == 0) break; } @@ -2350,8 +2401,9 @@ struct node_module* get_builtin_module(const char* name) { struct node_module* get_linked_module(const char* name) { struct node_module* mp; + thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); - for (mp = modlist_linked; mp != nullptr; mp = mp->nm_link) { + for (mp = tls_ctx->modlist_linked; mp != nullptr; mp = mp->nm_link) { if (strcmp(mp->nm_modname, name) == 0) break; } @@ -2371,8 +2423,9 @@ typedef void (UV_DYNAMIC* extInit)(Local exports); void DLOpen(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); uv_lib_t lib; + thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); - CHECK_EQ(modpending, nullptr); + CHECK_EQ(tls_ctx->modpending, nullptr); if (args.Length() != 2) { env->ThrowError("process.dlopen takes exactly 2 arguments."); @@ -2386,8 +2439,8 @@ void DLOpen(const FunctionCallbackInfo& args) { // Objects containing v14 or later modules will have registered themselves // on the pending list. Activate all of them now. At present, only one // module per object is supported. - node_module* const mp = modpending; - modpending = nullptr; + node_module* const mp = tls_ctx->modpending; + tls_ctx->modpending = nullptr; if (is_dlopen_error) { Local errmsg = OneByteString(env->isolate(), uv_dlerror(&lib)); @@ -2430,8 +2483,8 @@ void DLOpen(const FunctionCallbackInfo& args) { } mp->nm_dso_handle = lib.handle; - mp->nm_link = modlist_addon; - modlist_addon = mp; + mp->nm_link = tls_ctx->modlist_addon; + tls_ctx->modlist_addon = mp; Local exports_string = env->exports_string(); Local exports = module->Get(exports_string)->ToObject(env->isolate()); @@ -2475,6 +2528,8 @@ void FatalException(Isolate* isolate, HandleScope scope(isolate); Environment* env = Environment::GetCurrent(isolate); + if (!env) //FIXME: check why env is null #4912 + return; Local process_object = env->process_object(); Local fatal_exception_string = env->fatal_exception_string(); Local fatal_exception_function = @@ -2510,7 +2565,7 @@ void FatalException(Isolate* isolate, } } - if (exit_code) { + if (!node_is_nwjs && exit_code) { #if HAVE_INSPECTOR if (use_inspector) { env->inspector_agent()->FatalException(error, message); @@ -2529,7 +2584,7 @@ void FatalException(Isolate* isolate, const TryCatch& try_catch) { } -void OnMessage(Local message, Local error) { +NODE_EXTERN void OnMessage(Local message, Local error) { // The current version of V8 sends messages for errors only // (thus `error` is always set). FatalException(Isolate::GetCurrent(), error, message); @@ -2988,6 +3043,9 @@ void SetupProcessObject(Environment* env, ProcessTitleSetter, env->as_external()).FromJust()); + if (node_is_nwjs) + READONLY_PROPERTY(process, "__nwjs", Integer::New(env->isolate(), 1)); + // process.version READONLY_PROPERTY(process, "version", @@ -3376,7 +3434,8 @@ void LoadEnvironment(Environment* env) { HandleScope handle_scope(env->isolate()); env->isolate()->SetFatalErrorHandler(node::OnFatalError); - env->isolate()->AddMessageListener(OnMessage); + //if (!node_is_nwjs) + env->isolate()->AddMessageListener(OnMessage); atexit(AtProcessExit); @@ -4185,6 +4244,7 @@ void Init(int* argc, // Initialize prog_start_time to get relative uptime. prog_start_time = static_cast(uv_now(uv_default_loop())); + if (!node_is_nwjs) { // Make inherited handles noninheritable. uv_disable_stdio_inheritance(); @@ -4195,6 +4255,8 @@ void Init(int* argc, DispatchDebugMessagesAsyncCallback)); uv_unref(reinterpret_cast(&dispatch_debug_messages_async)); + } //node_is_nwjs + #if defined(NODE_V8_OPTIONS) // Should come before the call to V8::SetFlagsFromCommandLine() // so the user can disable a flag --foo at run-time by passing @@ -4261,9 +4323,11 @@ void Init(int* argc, const char no_typed_array_heap[] = "--typed_array_max_size_in_heap=0"; V8::SetFlagsFromString(no_typed_array_heap, sizeof(no_typed_array_heap) - 1); + if (!node_is_nwjs) { if (!use_debug_agent) { RegisterDebugSignalHandler(); } + } //node_is_nwjs // We should set node_is_initialized here instead of in node::Start, // otherwise embedders using node::Init to initialize everything will not be @@ -4375,7 +4439,9 @@ static void StartNodeInstance(void* arg) { NodeInstanceData* instance_data = static_cast(arg); Isolate::CreateParams params; ArrayBufferAllocator array_buffer_allocator; + if (!node_is_nwjs) { params.array_buffer_allocator = &array_buffer_allocator; + } #ifdef NODE_ENABLE_VTUNE_PROFILING params.code_event_handler = vTune::GetVtuneCodeEventHandler(); #endif @@ -4397,8 +4463,7 @@ static void StartNodeInstance(void* arg) { Locker locker(isolate); Isolate::Scope isolate_scope(isolate); HandleScope handle_scope(isolate); - IsolateData isolate_data(isolate, instance_data->event_loop(), - array_buffer_allocator.zero_fill_field()); + IsolateData isolate_data(isolate, instance_data->event_loop()); Local context = Context::New(isolate); Context::Scope context_scope(context); Environment env(&isolate_data, context); @@ -4408,9 +4473,10 @@ static void StartNodeInstance(void* arg) { instance_data->exec_argv(), v8_is_profiling); +#if 0 isolate->SetAbortOnUncaughtExceptionCallback( ShouldAbortOnUncaughtException); - +#endif // Start debug agent when argv has --debug if (instance_data->use_debug_agent()) { const char* path = instance_data->argc() > 1 @@ -4477,6 +4543,115 @@ static void StartNodeInstance(void* arg) { isolate = nullptr; } +#ifdef V8_USE_EXTERNAL_STARTUP_DATA +// Helper class to load the startup data files from disk. +// +// This is meant as a convenience for stand-alone binaries like d8, cctest, +// unittest. A V8 embedder would likely either handle startup data on their +// own or just disable the feature if they don't want to handle it at all, +// while tools like cctest need to work in either configuration. Hence this is +// not meant for inclusion in the general v8 library. +class StartupDataHandler { + public: + // Load startup data, and call the v8::V8::Set*DataBlob API functions. + // + // natives_blob and snapshot_blob will be loaded realitive to exec_path, + // which would usually be the equivalent of argv[0]. + StartupDataHandler(const char* exec_path, const char* natives_blob, + const char* snapshot_blob); + ~StartupDataHandler(); + + private: + static char* RelativePath(char** buffer, const char* exec_path, + const char* name); + + void LoadFromFiles(const char* natives_blob, const char* snapshot_blob); + + void Load(const char* blob_file, v8::StartupData* startup_data, + void (*setter_fn)(v8::StartupData*)); + + v8::StartupData natives_; + v8::StartupData snapshot_; + + // Disallow copy & assign. + StartupDataHandler(const StartupDataHandler& other); + void operator=(const StartupDataHandler& other); +}; + +StartupDataHandler::StartupDataHandler(const char* exec_path, + const char* natives_blob, + const char* snapshot_blob) { + // If we have (at least one) explicitly given blob, use those. + // If not, use the default blob locations next to the d8 binary. + if (natives_blob || snapshot_blob) { + LoadFromFiles(natives_blob, snapshot_blob); + } else { + char* natives; + char* snapshot; + LoadFromFiles(RelativePath(&natives, exec_path, "natives_blob.bin"), + RelativePath(&snapshot, exec_path, "snapshot_blob.bin")); + + free(natives); + free(snapshot); + } +} + + +StartupDataHandler::~StartupDataHandler() { + delete[] natives_.data; + delete[] snapshot_.data; +} + + +char* StartupDataHandler::RelativePath(char** buffer, const char* exec_path, + const char* name) { + const char* last_slash = strrchr(exec_path, '/'); + if (last_slash) { + int after_slash = last_slash - exec_path + 1; + int name_length = static_cast(strlen(name)); + *buffer = reinterpret_cast(calloc(after_slash + name_length + 1, 1)); + strncpy(*buffer, exec_path, after_slash); + strncat(*buffer, name, name_length); + } else { + *buffer = strdup(name); + } + return *buffer; +} + + +void StartupDataHandler::LoadFromFiles(const char* natives_blob, + const char* snapshot_blob) { + Load(natives_blob, &natives_, v8::V8::SetNativesDataBlob); + Load(snapshot_blob, &snapshot_, v8::V8::SetSnapshotDataBlob); +} + + +void StartupDataHandler::Load(const char* blob_file, + v8::StartupData* startup_data, + void (*setter_fn)(v8::StartupData*)) { + startup_data->data = NULL; + startup_data->raw_size = 0; + + if (!blob_file) return; + + FILE* file = fopen(blob_file, "rb"); + if (!file) return; + + fseek(file, 0, SEEK_END); + startup_data->raw_size = ftell(file); + rewind(file); + + startup_data->data = new char[startup_data->raw_size]; + int read_size = static_cast(fread(const_cast(startup_data->data), + 1, startup_data->raw_size, file)); + fclose(file); + + if (startup_data->raw_size == read_size) (*setter_fn)(startup_data); +} + +#endif // V8_USE_EXTERNAL_STARTUP_DATA + + int Start(int argc, char** argv) { PlatformInit(); @@ -4502,7 +4677,17 @@ int Start(int argc, char** argv) { V8::SetEntropySource(crypto::EntropySource); #endif +#ifdef V8_USE_EXTERNAL_STARTUP_DATA + //StartupDataHandler startup_data(argv[0], nullptr, nullptr); +#if defined(__APPLE__) + V8::InitializeExternalStartupData(g_native_blob_path); +#else + V8::InitializeExternalStartupData(argv[0]); +#endif +#endif + v8_platform.Initialize(v8_thread_pool_size); + V8::Initialize(); v8_initialized = true; @@ -4529,5 +4714,391 @@ int Start(int argc, char** argv) { return exit_code; } +void ref_node_modules() { +NODE_MODULE_REF2(cares_wrap) +NODE_MODULE_REF2(fs_event_wrap) +NODE_MODULE_REF2(stream_wrap) +NODE_MODULE_REF2(buffer) +NODE_MODULE_REF2(config) +NODE_MODULE_REF2(contextify) +NODE_MODULE_REF2(crypto) +NODE_MODULE_REF2(fs) +NODE_MODULE_REF2(http_parser) +NODE_MODULE_REF2(os) +NODE_MODULE_REF2(v8) +NODE_MODULE_REF2(zlib) +NODE_MODULE_REF2(pipe_wrap) +NODE_MODULE_REF2(process_wrap) +NODE_MODULE_REF2(signal_wrap) +NODE_MODULE_REF2(spawn_sync) +NODE_MODULE_REF2(tcp_wrap) +NODE_MODULE_REF2(timer_wrap) +NODE_MODULE_REF2(tls_wrap) +NODE_MODULE_REF2(tty_wrap) +NODE_MODULE_REF2(udp_wrap) +NODE_MODULE_REF2(util) +NODE_MODULE_REF2(uv) +} + +NODE_EXTERN v8::Handle CallNWTickCallback(Environment* env, const v8::Handle ret) { + return (*g_nw_tick_callback)(env, ret); +} + } // namespace node + +extern "C" { +void wakeup_callback(uv_async_t* handle) { + // do nothing, just make libuv exit loop. +} + +void idle_callback(uv_idle_t* handle) { + // do nothing, just make libuv exit loop. +} + +void timer_callback(uv_timer_t* timer) { + // libuv would block unexpectedly with zero-timeout timer + // this is a workaround of libuv bug #574: + // https://github.com/joyent/libuv/issues/574 + uv_idle_start(static_cast(timer->data), idle_callback); +} + + +NODE_EXTERN int g_uv_run(void* loop, int mode) { + return uv_run((uv_loop_t*)loop, (uv_run_mode)mode); +} + +NODE_EXTERN void g_set_uv_run(UVRunFn uv_run_fn) { + node::g_nw_uv_run = uv_run_fn; +} + +NODE_EXTERN int g_node_start(int argc, char** argv) { + return node::Start(argc, argv); +} + +NODE_EXTERN void g_set_blob_path(const char* path) { + node::g_native_blob_path = path; +} + +NODE_EXTERN void g_msg_pump_nest_enter(msg_pump_context_t* ctx) { + ctx->loop = uv_loop_new(); + + ctx->wakeup_events->push_back((uv_async_t*)ctx->wakeup_event); + ctx->wakeup_event = new uv_async_t; + uv_async_init((uv_loop_t*)ctx->loop, (uv_async_t*)ctx->wakeup_event, wakeup_callback); +} + +NODE_EXTERN void g_msg_pump_pre_loop(msg_pump_context_t* ctx) { + ctx->idle_handle = new uv_idle_t; + uv_idle_init((uv_loop_t*)ctx->loop, (uv_idle_t*)ctx->idle_handle); + + ctx->delay_timer = new uv_timer_t; + ((uv_timer_t*)ctx->delay_timer)->data = ctx->idle_handle; + uv_timer_init((uv_loop_t*)ctx->loop, (uv_timer_t*)ctx->delay_timer); +} + +NODE_EXTERN void g_msg_pump_did_work(msg_pump_context_t* ctx) { + thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (tls_ctx && tls_ctx->env) { + v8::Isolate* isolate = tls_ctx->env->isolate(); + v8::HandleScope handleScope(isolate); + v8::Context::Scope cscope(tls_ctx->env->context()); + (*node::g_nw_uv_run)((uv_loop_t*)ctx->loop, UV_RUN_NOWAIT); + node::CallNWTickCallback(tls_ctx->env, v8::Undefined(isolate)); + } +} + +NODE_EXTERN void g_msg_pump_need_work(msg_pump_context_t* ctx) { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (tls_ctx && tls_ctx->env) { + tls_ctx->env->context()->Enter(); + } + (*node::g_nw_uv_run)((uv_loop_t*)ctx->loop, UV_RUN_ONCE); + if (tls_ctx && tls_ctx->env) { + tls_ctx->env->context()->Exit(); + } +} + +NODE_EXTERN void g_msg_pump_delay_work(msg_pump_context_t* ctx, int sec) { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (tls_ctx && tls_ctx->env) { + tls_ctx->env->context()->Enter(); + } + uv_timer_start((uv_timer_t*)ctx->delay_timer, timer_callback, sec, 0); + (*node::g_nw_uv_run)((uv_loop_t*)ctx->loop, UV_RUN_ONCE); + uv_idle_stop((uv_idle_t*)ctx->idle_handle); + uv_timer_stop((uv_timer_t*)ctx->delay_timer); + if (tls_ctx && tls_ctx->env) { + tls_ctx->env->context()->Exit(); + } +} + +NODE_EXTERN void g_msg_pump_nest_leave(msg_pump_context_t* ctx) { + uv_close((uv_handle_t*)(ctx->wakeup_event), NULL); + // Delete external loop. + uv_loop_close((uv_loop_t*)ctx->loop); + free((uv_loop_t*)ctx->loop); + ctx->loop = nullptr; + // // Restore previous async handle. + delete (uv_async_t*)ctx->wakeup_event; + ctx->wakeup_event = ctx->wakeup_events->back(); + ctx->wakeup_events->pop_back(); +} + +NODE_EXTERN uv_loop_t* g_uv_default_loop() { + return uv_default_loop(); +} + +NODE_EXTERN void g_msg_pump_clean_ctx(msg_pump_context_t* ctx) { + uv_close((uv_handle_t*)ctx->idle_handle, NULL); + uv_run(uv_default_loop(), UV_RUN_NOWAIT); + delete (uv_idle_t*)ctx->idle_handle; + ctx->idle_handle = nullptr; + + uv_close((uv_handle_t*)ctx->delay_timer, NULL); + uv_run(uv_default_loop(), UV_RUN_NOWAIT); + delete (uv_timer_t*)ctx->delay_timer; + ctx->delay_timer = nullptr; +} + +NODE_EXTERN void g_msg_pump_sched_work(uv_async_t* wakeup_event) { +#ifdef _WIN32 + uv_async_send_nw(wakeup_event); +#else + uv_async_send(wakeup_event); +#endif +} + +NODE_EXTERN void g_msg_pump_ctor(uv_async_t** wakeup_event, int worker_support) { + uv_init_nw(worker_support); + g_worker_support = worker_support; + *wakeup_event = new uv_async_t; + uv_async_init(uv_default_loop(), *wakeup_event, wakeup_callback); + node::g_nw_uv_run = (UVRunFn)uv_run; +} + +NODE_EXTERN void g_msg_pump_dtor(uv_async_t** wakeup_event) { + uv_close((uv_handle_t*)*wakeup_event, NULL); + uv_run(uv_default_loop(), UV_RUN_NOWAIT); + delete *wakeup_event; + *wakeup_event = nullptr; + uv_loop_close(uv_default_loop()); +} + +NODE_EXTERN bool g_is_node_initialized() { + return node::node_is_initialized; +} + +NODE_EXTERN void g_call_tick_callback(node::Environment* env) { + v8::HandleScope scope(env->isolate()); + v8::Context::Scope context_scope(env->context()); + node::Environment::AsyncCallbackScope callback_scope(env); + + env->KickNextTick(&callback_scope); +} + +// copied beginning of Start() until v8::Initialize() +NODE_EXTERN void g_setup_nwnode(int argc, char** argv, bool worker) { + node::node_is_initialized = true; + node::node_is_nwjs = true; + node::node_isolate = v8::Isolate::GetCurrent(); +} + +static void walk_cb(uv_handle_t* handle, void* arg) { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)arg; + if (uv_is_active(handle)) + tls_ctx->handle_counter++; +} + +static void quit_timer_cb(uv_timer_t* timer) { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + assert(tls_ctx); + tls_ctx->quit_flag = 1; + //std::cerr << "quit timer timeout"; +} + +NODE_EXTERN void g_stop_nw_instance() { + if (!g_worker_support) + return; + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + assert(tls_ctx); + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + bool more; + uv_timer_t quit_timer; + uv_loop_t* loop = tls_ctx->env->event_loop(); + uv_timer_init(loop, &quit_timer); + uv_timer_start(&quit_timer, quit_timer_cb, 10000, 0); + do { + tls_ctx->handle_counter = 0; + uv_walk(loop, walk_cb, tls_ctx); + //std::cerr << "handles: " << tls_ctx->handle_counter; + // quit timer and async hanle for loop wakeup + if (tls_ctx->handle_counter <= 2) + more = false; + else + //uv_print_active_handles(tls_ctx->env->event_loop(), stderr); + more = uv_run(loop, UV_RUN_ONCE); + if (more == false) { + node::EmitBeforeExit(tls_ctx->env); + + // Emit `beforeExit` if the loop became alive either after emitting + // event, or after running some callbacks. + more = uv_loop_alive(loop); + if (uv_run(loop, UV_RUN_NOWAIT) != 0) + more = true; + tls_ctx->handle_counter = 0; + uv_walk(loop, walk_cb, tls_ctx); + //std::cerr << "handles: " << tls_ctx->handle_counter; + if (tls_ctx->handle_counter <= 2) + more = false; + } + } while (more == true && !tls_ctx->quit_flag); + uv_timer_stop(&quit_timer); + + struct node::node_module* mp, *mp2; + for (mp = tls_ctx->modlist_builtin; mp != nullptr;) { + mp2 = mp->nm_link; + free(mp); + mp = mp2; + } + for (mp = tls_ctx->modlist_linked; mp != nullptr;) { + mp2 = mp->nm_link; + free(mp); + mp = mp2; + } + node::FreeEnvironment(tls_ctx->env); + free(tls_ctx); + uv_key_set(&thread_ctx_key, NULL); + //std::cerr << "QUIT LOOP" << std::endl; +} + +NODE_EXTERN void g_start_nw_instance(int argc, char *argv[], v8::Handle context) { + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(context); + + argv = uv_setup_args(argc, argv); + + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (!tls_ctx) { + tls_ctx = (struct thread_ctx_st*)malloc(sizeof(struct thread_ctx_st)); + memset(tls_ctx, 0, sizeof(struct thread_ctx_st)); + uv_key_set(&thread_ctx_key, tls_ctx); + node::ref_node_modules(); + } + node::IsolateData* isolate_data = node::CreateIsolateData(isolate, uv_default_loop()); + tls_ctx->env = node::CreateEnvironment(isolate_data, context, argc, argv, 0, nullptr); + node::LoadEnvironment(tls_ctx->env); +} + +NODE_EXTERN void g_set_nw_tick_callback(NWTickCallback tick_callback) { + node::g_nw_tick_callback = tick_callback; +} + +NODE_EXTERN void* g_get_node_env() { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + return tls_ctx->env; +} + +NODE_EXTERN void g_get_node_context(v8::Local* ret) { + *ret = v8::Local::New(v8::Isolate::GetCurrent(), node::g_context); +} + +NODE_EXTERN void g_set_node_context(v8::Isolate* isolate, v8::Local* context) { + node::g_context.Reset(isolate, *context); +} + +NODE_EXTERN void* g_get_current_env(v8::Handle context) { + return node::Environment::GetCurrent(context); +} + +NODE_EXTERN void g_emit_exit(node::Environment* env) { + node::EmitExit(env); +} + +NODE_EXTERN void g_run_at_exit(node::Environment* env) { + node::RunAtExit(env); +} + +NODE_EXTERN void g_promise_reject_callback(v8::PromiseRejectMessage* data) { + node::PromiseRejectCallback(*data); +} + +NODE_EXTERN void g_uv_init_nw(int worker) { + uv_init_nw(worker); +} + +#ifdef __APPLE__ + +void UvNoOp(uv_async_t* handle) { +} + +NODE_EXTERN void g_msg_pump_ctor_osx(msg_pump_context_t* ctx, void* EmbedThreadRunner, void* kevent_hook, void* data, int worker_support) { + uv_init_nw(worker_support); + g_worker_support = worker_support; + // Add dummy handle for libuv, otherwise libuv would quit when there is + // nothing to do. + ctx->dummy_uv_handle = new uv_async_t; + uv_async_init(uv_default_loop(), (uv_async_t*)ctx->dummy_uv_handle, UvNoOp); + + // Start worker that will interrupt main loop when having uv events. + ctx->embed_sem = new uv_sem_t; + uv_sem_init((uv_sem_t*)ctx->embed_sem, 0); + ctx->embed_thread = new uv_thread_t; + uv_thread_create((uv_thread_t*)ctx->embed_thread, (uv_thread_cb)EmbedThreadRunner, data); + + uv_loop_t* uvloop = uv_default_loop(); + uvloop->keventfunc = kevent_hook; + + ctx->loop = uvloop; + + // Execute loop for once. + uv_run(uv_default_loop(), UV_RUN_NOWAIT); + node::g_nw_uv_run = (UVRunFn)uv_run; +} + +NODE_EXTERN void g_msg_pump_dtor_osx(msg_pump_context_t* ctx) { + uv_thread_join((uv_thread_t*)ctx->embed_thread); + + delete (uv_async_t*)ctx->dummy_uv_handle; + ctx->dummy_uv_handle = nullptr; + + delete (uv_sem_t*)ctx->embed_sem; + ctx->embed_sem = nullptr; + + delete (uv_thread_t*)ctx->embed_thread; + ctx->embed_thread = nullptr; +} + +NODE_EXTERN int g_nw_uvrun_nowait() { + return (*node::g_nw_uv_run)(uv_default_loop(), UV_RUN_NOWAIT); +} + +NODE_EXTERN int g_uv_runloop_once() { + struct thread_ctx_st* tls_ctx = (struct thread_ctx_st*)uv_key_get(&thread_ctx_key); + if (tls_ctx && tls_ctx->env) { + v8::Isolate* isolate = tls_ctx->env->isolate(); + v8::HandleScope handleScope(isolate); + v8::Context::Scope cscope(tls_ctx->env->context()); + return (*node::g_nw_uv_run)(uv_default_loop(), UV_RUN_ONCE); + } + return (*node::g_nw_uv_run)(uv_default_loop(), UV_RUN_ONCE); +} + +NODE_EXTERN int g_uv_backend_timeout() { + return uv_backend_timeout(uv_default_loop()); +} + +NODE_EXTERN void g_uv_sem_post(msg_pump_context_t* ctx) { + uv_sem_post((uv_sem_t*)ctx->embed_sem); +} + +NODE_EXTERN int g_uv_backend_fd() { + return uv_backend_fd(uv_default_loop()); +} + +NODE_EXTERN void g_uv_sem_wait(msg_pump_context_t* ctx) { + uv_sem_wait((uv_sem_t*)ctx->embed_sem); +} +#endif +} diff --git a/src/node.h b/src/node.h index 873551fa338..97d6f1cdb55 100644 --- a/src/node.h +++ b/src/node.h @@ -8,7 +8,7 @@ # define NODE_EXTERN __declspec(dllimport) # endif #else -# define NODE_EXTERN /* nothing */ +# define NODE_EXTERN __attribute__((visibility("default"))) #endif #ifdef BUILDING_NODE_EXTENSION @@ -295,10 +295,6 @@ NODE_DEPRECATED("Use ParseEncoding(isolate, ...)", NODE_EXTERN void FatalException(v8::Isolate* isolate, const v8::TryCatch& try_catch); -NODE_DEPRECATED("Use FatalException(isolate, ...)", - inline void FatalException(const v8::TryCatch& try_catch) { - return FatalException(v8::Isolate::GetCurrent(), try_catch); -}) // Don't call with encoding=UCS2. NODE_EXTERN v8::Local Encode(v8::Isolate* isolate, @@ -449,6 +445,13 @@ extern "C" NODE_EXTERN void node_module_register(void* mod); } \ } +#define NODE_MODULE_REF(modname) \ + extern void _node_ref_ ## modname(); + +#define NODE_MODULE_REF2(modname) \ + _node_ref_ ## modname(); + + #define NODE_MODULE_CONTEXT_AWARE_X(modname, regfunc, priv, flags) \ extern "C" { \ static node::node_module _module = \ @@ -466,6 +469,9 @@ extern "C" NODE_EXTERN void node_module_register(void* mod); NODE_C_CTOR(_register_ ## modname) { \ node_module_register(&_module); \ } \ + } \ + void _node_ref_ ## modname() { \ + node_module_register(&_module); \ } #define NODE_MODULE(modname, regfunc) \ diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 07a4106642b..0d7d1cde16e 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -55,10 +55,12 @@ bool zero_fill_all_buffers = false; namespace { +#if 0 inline void* BufferMalloc(size_t length) { return zero_fill_all_buffers ? node::UncheckedCalloc(length) : node::UncheckedMalloc(length); } +#endif } // namespace @@ -239,7 +241,7 @@ MaybeLocal New(Isolate* isolate, char* data = nullptr; if (length > 0) { - data = static_cast(BufferMalloc(length)); + data = static_cast(isolate->array_buffer_allocator()->Allocate(length)); if (data == nullptr) return Local(); @@ -248,11 +250,17 @@ MaybeLocal New(Isolate* isolate, CHECK(actual <= length); if (actual == 0) { - free(data); + isolate->array_buffer_allocator()->Free(data, length); data = nullptr; - } else if (actual < length) { + } +#if 0 //FIXME #4357: costs some extra bytes here. It shouldn't be + //significant because of the length calculation in + //StringBytes::Size() + //v8 buffer allocator doesn't support reallocate + else if (actual < length) { data = node::Realloc(data, actual); } +#endif } Local buf; @@ -260,7 +268,7 @@ MaybeLocal New(Isolate* isolate, return scope.Escape(buf); // Object failed to be created. Clean up resources. - free(data); + isolate->array_buffer_allocator()->Free(data, length); return Local(); } @@ -284,7 +292,7 @@ MaybeLocal New(Environment* env, size_t length) { void* data; if (length > 0) { - data = BufferMalloc(length); + data = env->isolate()->array_buffer_allocator()->Allocate(length); if (data == nullptr) return Local(); } else { @@ -303,7 +311,7 @@ MaybeLocal New(Environment* env, size_t length) { return scope.Escape(ui); // Object failed to be created. Clean up resources. - free(data); + env->isolate()->array_buffer_allocator()->Free(data, length); return Local(); } @@ -329,7 +337,7 @@ MaybeLocal Copy(Environment* env, const char* data, size_t length) { void* new_data; if (length > 0) { CHECK_NE(data, nullptr); - new_data = node::UncheckedMalloc(length); + new_data = env->isolate()->array_buffer_allocator()->Allocate(length); if (new_data == nullptr) return Local(); memcpy(new_data, data, length); @@ -349,7 +357,7 @@ MaybeLocal Copy(Environment* env, const char* data, size_t length) { return scope.Escape(ui); // Object failed to be created. Clean up resources. - free(new_data); + env->isolate()->array_buffer_allocator()->Free(new_data, length); return Local(); } @@ -428,7 +436,6 @@ MaybeLocal New(Environment* env, char* data, size_t length) { return Local(); } - void CreateFromString(const FunctionCallbackInfo& args) { CHECK(args[0]->IsString()); CHECK(args[1]->IsString()); diff --git a/src/node_buffer.h b/src/node_buffer.h index 686450d984e..400fb0276d7 100644 --- a/src/node_buffer.h +++ b/src/node_buffer.h @@ -13,7 +13,10 @@ namespace Buffer { static const unsigned int kMaxLength = sizeof(int32_t) == sizeof(intptr_t) ? 0x3fffffff : 0x7fffffff; +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wignored-attributes" NODE_EXTERN typedef void (*FreeCallback)(char* data, void* hint); +#pragma clang diagnostic pop NODE_EXTERN bool HasInstance(v8::Local val); NODE_EXTERN bool HasInstance(v8::Local val); diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 25cb003d39e..98ff6fbacc0 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -1460,7 +1460,7 @@ static Local X509ToObject(Environment* env, X509* cert) { (void) BIO_reset(bio); BN_ULONG exponent_word = BN_get_word(rsa->e); - BIO_printf(bio, "0x%lx", exponent_word); + BIO_printf(bio, "0x%lx", (unsigned long)exponent_word); BIO_get_mem_ptr(bio, &mem); info->Set(env->exponent_string(), @@ -3326,7 +3326,7 @@ bool CipherBase::GetAuthTag(char** out, unsigned int* out_len) const { if (initialised_ || kind_ != kCipher || !auth_tag_) return false; *out_len = auth_tag_len_; - *out = node::Malloc(auth_tag_len_); + *out = static_cast(env()->isolate()->array_buffer_allocator()->Allocate(auth_tag_len_)); memcpy(*out, auth_tag_, auth_tag_len_); return true; } @@ -4901,7 +4901,7 @@ void ECDH::ComputeSecret(const FunctionCallbackInfo& args) { // NOTE: field_size is in bits int field_size = EC_GROUP_get_degree(ecdh->group_); size_t out_len = (field_size + 7) / 8; - char* out = node::Malloc(out_len); + char* out = static_cast(env->isolate()->array_buffer_allocator()->Allocate(out_len)); int r = ECDH_compute_key(out, out_len, pub, ecdh->key_, nullptr); EC_POINT_free(pub); @@ -4936,7 +4936,7 @@ void ECDH::GetPublicKey(const FunctionCallbackInfo& args) { if (size == 0) return env->ThrowError("Failed to get public key length"); - unsigned char* out = node::Malloc(size); + unsigned char* out = static_cast(env->isolate()->array_buffer_allocator()->Allocate(size)); int r = EC_POINT_point2oct(ecdh->group_, pub, form, out, size, nullptr); if (r != size) { @@ -4961,7 +4961,7 @@ void ECDH::GetPrivateKey(const FunctionCallbackInfo& args) { return env->ThrowError("Failed to get ECDH private key"); int size = BN_num_bytes(b); - unsigned char* out = node::Malloc(size); + unsigned char* out = static_cast(env->isolate()->array_buffer_allocator()->Allocate(size)); if (size != BN_bn2bin(b, out)) { free(out); @@ -5351,7 +5351,7 @@ class RandomBytesRequest : public AsyncWrap { : AsyncWrap(env, object, AsyncWrap::PROVIDER_CRYPTO), error_(0), size_(size), - data_(node::Malloc(size)) { + data_(static_cast(env->isolate()->array_buffer_allocator()->Allocate(size))) { Wrap(object, this); } @@ -5373,7 +5373,7 @@ class RandomBytesRequest : public AsyncWrap { } inline void release() { - free(data_); + env()->isolate()->array_buffer_allocator()->Free(data_, size_); size_ = 0; } diff --git a/src/node_internals.h b/src/node_internals.h index 99b0cec2df2..61fffb21b9d 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -55,6 +55,7 @@ inline v8::Local PersistentToLocal( v8::Isolate* isolate, const v8::Persistent& persistent); +NODE_EXTERN v8::Handle CallTickCallback(Environment* env, const v8::Handle ret); // Call with valid HandleScope and while inside Context scope. v8::Local MakeCallback(Environment* env, v8::Local recv, diff --git a/src/node_webkit.h b/src/node_webkit.h new file mode 100644 index 00000000000..2375157f723 --- /dev/null +++ b/src/node_webkit.h @@ -0,0 +1,51 @@ +#ifndef _NW_NODE_INTERFACE_H +#define _NW_NODE_INTERFACE_H + +#include + +typedef struct _msg_pump_context_t { +#if defined(__APPLE__) + void* embed_thread; + + // Semaphore to wait for main loop in the polling thread. + void* embed_sem; + + // Dummy handle to make uv's loop not quit. + void* dummy_uv_handle; +#endif + void* loop; + std::vector* wakeup_events; + void* wakeup_event; + void* idle_handle; + void* delay_timer; +} msg_pump_context_t; + +typedef bool (*IsNodeInitializedFn)(); +typedef void (*CallTickCallbackFn)(void* env); +typedef v8::Handle (*NWTickCallback)(void* env, const v8::Handle ret); +typedef void (*SetupNWNodeFn)(int argc, char **argv, bool); +typedef void (*GetNodeContextFn)(void*); +typedef void (*SetNodeContextFn)(v8::Isolate* isolate, void* ctx); +typedef void (*SetNWTickCallbackFn)(NWTickCallback tick_callback); +typedef void (*StartNWInstanceFn)(int argc, char *argv[], v8::Handle ctx); +typedef void* (*GetNodeEnvFn)(); +typedef void* (*GetCurrentEnvironmentFn)(v8::Handle context); +typedef int (*EmitExitFn)(void* env); +typedef void (*RunAtExitFn)(void* env); +typedef void (*VoidHookFn)(void*); +typedef void (*VoidIntHookFn)(void*, int); +typedef int (*UVRunFn)(void*, int); +typedef void (*SetUVRunFn)(UVRunFn); +typedef int (*NodeStartFn)(int argc, char *argv[]); +typedef void (*SetBlobPathFn)(const char *path); +typedef void* (*GetPointerFn)(); +typedef void (*VoidPtr2Fn)(void*, void*); +typedef void (*VoidPtr3Fn)(void*, void*, void*); +typedef void (*VoidPtr4Fn)(void*, void*, void*, void*); +typedef void (*VoidPtr4IntFn)(void*, void*, void*, void*, int); +typedef void (*VoidVoidFn)(); +typedef int (*IntVoidFn)(); +typedef void (*VoidIntFn)(int); +typedef bool (*BoolPtrFn)(void*); + +#endif diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index ac656505503..54b385ebad5 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -148,7 +148,9 @@ void StreamWrap::OnAlloc(uv_handle_t* handle, void StreamWrap::OnAllocImpl(size_t size, uv_buf_t* buf, void* ctx) { - buf->base = node::Malloc(size); + StreamWrap* wrap = static_cast(ctx); + Environment* env = wrap->env(); + buf->base = static_cast(env->isolate()->array_buffer_allocator()->Allocate(size)); buf->len = size; } @@ -187,19 +189,23 @@ void StreamWrap::OnReadImpl(ssize_t nread, if (nread < 0) { if (buf->base != nullptr) - free(buf->base); + env->isolate()->array_buffer_allocator()->Free(buf->base, buf->len); wrap->EmitData(nread, Local(), pending_obj); return; } if (nread == 0) { if (buf->base != nullptr) - free(buf->base); + env->isolate()->array_buffer_allocator()->Free(buf->base, buf->len); return; } + char* base = static_cast(buf->base); //NOTE: realloc is + //removed here because + //nread is always less + //than buf->len, see the + //check in the next line CHECK_LE(static_cast(nread), buf->len); - char* base = node::Realloc(buf->base, nread); if (pending == UV_TCP) { pending_obj = AcceptHandle(env, wrap); diff --git a/src/string_bytes.cc b/src/string_bytes.cc index 882ca6e3e89..5bdcdde4bac 100644 --- a/src/string_bytes.cc +++ b/src/string_bytes.cc @@ -320,7 +320,7 @@ size_t StringBytes::Write(Isolate* isolate, } case BASE64: - if (is_extern) { + if (is_extern && str->IsExternalOneByte()) { nbytes = base64_decode(buf, buflen, data, external_nbytes); } else { String::Value value(str); @@ -332,7 +332,7 @@ size_t StringBytes::Write(Isolate* isolate, break; case HEX: - if (is_extern) { + if (is_extern && str->IsExternalOneByte()) { nbytes = hex_decode(buf, buflen, data, external_nbytes); } else { String::Value value(str); diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index d56128fec6c..77e054a16b2 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -661,7 +661,10 @@ void TLSWrap::OnReadImpl(ssize_t nread, void TLSWrap::OnAllocSelf(size_t suggested_size, uv_buf_t* buf, void* ctx) { - buf->base = node::Malloc(suggested_size); + TLSWrap* wrap = static_cast(ctx); + Environment* env = wrap->env(); + + buf->base = static_cast(env->isolate()->array_buffer_allocator()->Allocate(suggested_size)); buf->len = suggested_size; } diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index d14eefd64d6..ea0e6feeff4 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -373,7 +373,10 @@ void UDPWrap::OnSend(uv_udp_send_t* req, int status) { void UDPWrap::OnAlloc(uv_handle_t* handle, size_t suggested_size, uv_buf_t* buf) { - buf->base = node::Malloc(suggested_size); + UDPWrap* wrap = static_cast(((uv_udp_t*)handle)->data); + Environment* env = wrap->env(); + + buf->base = static_cast(env->isolate()->array_buffer_allocator()->Allocate(suggested_size)); buf->len = suggested_size; } @@ -383,14 +386,14 @@ void UDPWrap::OnRecv(uv_udp_t* handle, const uv_buf_t* buf, const struct sockaddr* addr, unsigned int flags) { + UDPWrap* wrap = static_cast(handle->data); + Environment* env = wrap->env(); if (nread == 0 && addr == nullptr) { if (buf->base != nullptr) - free(buf->base); + env->isolate()->array_buffer_allocator()->Free(buf->base, buf->len); return; } - UDPWrap* wrap = static_cast(handle->data); - Environment* env = wrap->env(); HandleScope handle_scope(env->isolate()); Context::Scope context_scope(env->context()); @@ -405,12 +408,12 @@ void UDPWrap::OnRecv(uv_udp_t* handle, if (nread < 0) { if (buf->base != nullptr) - free(buf->base); + env->isolate()->array_buffer_allocator()->Free(buf->base, buf->len); wrap->MakeCallback(env->onmessage_string(), arraysize(argv), argv); return; } - char* base = node::UncheckedRealloc(buf->base, nread); + char* base = static_cast(buf->base); //NOTE: we don't realloc here argv[2] = Buffer::New(env, base, nread).ToLocalChecked(); argv[3] = AddressToJS(env, addr); wrap->MakeCallback(env->onmessage_string(), arraysize(argv), argv);