summaryrefslogtreecommitdiff
path: root/chromium/build
diff options
context:
space:
mode:
authorAndras Becsi <andras.becsi@digia.com>2013-12-11 21:33:03 +0100
committerAndras Becsi <andras.becsi@digia.com>2013-12-13 12:34:07 +0100
commitf2a33ff9cbc6d19943f1c7fbddd1f23d23975577 (patch)
tree0586a32aa390ade8557dfd6b4897f43a07449578 /chromium/build
parent5362912cdb5eea702b68ebe23702468d17c3017a (diff)
downloadqtwebengine-chromium-f2a33ff9cbc6d19943f1c7fbddd1f23d23975577.tar.gz
Update Chromium to branch 1650 (31.0.1650.63)
Change-Id: I57d8c832eaec1eb2364e0a8e7352a6dd354db99f Reviewed-by: Jocelyn Turcotte <jocelyn.turcotte@digia.com>
Diffstat (limited to 'chromium/build')
-rw-r--r--chromium/build/all.gyp38
-rw-r--r--chromium/build/all_android.gyp22
-rw-r--r--chromium/build/android/dex_action.gypi2
-rw-r--r--chromium/build/android/instr_action.gypi52
-rw-r--r--chromium/build/android/push_libraries.gypi2
-rw-r--r--chromium/build/common.gypi170
-rwxr-xr-xchromium/build/env_dump.py56
-rw-r--r--chromium/build/filename_rules.gypi2
-rwxr-xr-xchromium/build/gdb-add-index124
-rwxr-xr-xchromium/build/get_landmines.py63
-rwxr-xr-xchromium/build/install-build-deps-android.sh4
-rwxr-xr-xchromium/build/install-build-deps.sh54
-rw-r--r--chromium/build/ios/grit_whitelist.txt9
-rw-r--r--chromium/build/isolate.gypi3
-rw-r--r--chromium/build/java.gypi94
-rw-r--r--chromium/build/java_apk.gypi63
-rw-r--r--chromium/build/java_prebuilt.gypi55
-rw-r--r--chromium/build/landmine_utils.py114
-rwxr-xr-xchromium/build/landmines.py173
-rw-r--r--chromium/build/linux/system.gyp90
-rw-r--r--chromium/build/linux/unbundle/README28
-rw-r--r--chromium/build/linux/unbundle/openssl.gyp25
-rwxr-xr-xchromium/build/linux/unbundle/remove_bundled_libraries.py87
-rwxr-xr-xchromium/build/linux/unbundle/replace_gyp_files.py1
-rwxr-xr-xchromium/build/mac/edit_xibs.sh2
-rwxr-xr-xchromium/build/sanitize-png-files.sh445
-rw-r--r--chromium/build/slave/OWNERS24
-rw-r--r--chromium/build/slave/README8
-rwxr-xr-xchromium/build/tree_truth.sh101
-rw-r--r--chromium/build/util/LASTCHANGE2
-rw-r--r--chromium/build/util/LASTCHANGE.blink2
-rw-r--r--chromium/build/util/lib/common/perf_result_data_type.py20
-rw-r--r--chromium/build/util/lib/common/perf_tests_results_helper.py151
-rw-r--r--chromium/build/whitespace_file.txt10
34 files changed, 1330 insertions, 766 deletions
diff --git a/chromium/build/all.gyp b/chromium/build/all.gyp
index d90b5a5795c..af20d05d051 100644
--- a/chromium/build/all.gyp
+++ b/chromium/build/all.gyp
@@ -14,7 +14,7 @@
'../chrome/chrome.gyp:*',
'../content/content.gyp:*',
'../crypto/crypto.gyp:*',
- '../media/media.gyp:*',
+ '../mojo/mojo.gyp:*',
'../net/net.gyp:*',
'../sdch/sdch.gyp:*',
'../sql/sql.gyp:*',
@@ -41,12 +41,12 @@
'../gpu/tools/tools.gyp:*',
'../ipc/ipc.gyp:*',
'../jingle/jingle.gyp:*',
+ '../media/media.gyp:*',
'../ppapi/ppapi.gyp:*',
'../ppapi/ppapi_internal.gyp:*',
'../printing/printing.gyp:*',
'../skia/skia.gyp:*',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
- '../third_party/cld/cld.gyp:*',
'../third_party/codesighs/codesighs.gyp:*',
'../third_party/ffmpeg/ffmpeg.gyp:*',
'../third_party/iccjpeg/iccjpeg.gyp:*',
@@ -207,7 +207,6 @@
'../base/base.gyp:base_unittests',
'../chrome/chrome.gyp:unit_tests',
'../crypto/crypto.gyp:crypto_unittests',
- '../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../sql/sql.gyp:sql_unittests',
'../ui/ui.gyp:ui_unittests',
@@ -228,18 +227,20 @@
'../content/content.gyp:content_shell',
'../content/content.gyp:content_unittests',
'../device/device_tests.gyp:device_unittests',
- '../gpu/gpu.gyp:gpu_unittests',
+ '../google_apis/google_apis.gyp:google_apis_unittests',
'../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+ '../gpu/gpu.gyp:gpu_unittests',
'../ipc/ipc.gyp:ipc_tests',
'../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
'../ppapi/ppapi_internal.gyp:ppapi_unittests',
'../printing/printing.gyp:printing_unittests',
'../remoting/remoting.gyp:remoting_unittests',
'../sync/sync.gyp:sync_unit_tests',
+ '../third_party/WebKit/public/all.gyp:all_blink',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../webkit/renderer/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
- '../third_party/WebKit/public/all.gyp:all_blink',
],
}],
['OS=="win"', {
@@ -342,6 +343,19 @@
'../chrome/chrome.gyp:performance_browser_tests',
'../chrome/chrome.gyp:performance_ui_tests',
'../chrome/chrome.gyp:sync_performance_tests',
+ '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+ ],
+ 'conditions': [
+ ['OS!="ios" and OS!="win"', {
+ 'dependencies': [
+ '../breakpad/breakpad.gyp:minidump_stackwalk',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'dependencies': [
+ '../chrome/chrome.gyp:linux_symbols'
+ ],
+ }],
],
}, # target_name: chromium_builder_perf
{
@@ -466,8 +480,8 @@
# We refer to content_shell directly rather than all_webkit
# because we don't want the _unittests binaries.
- '../content/content.gyp:content_browsertests',
- '../content/content.gyp:content_shell',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_shell',
'../net/net.gyp:dns_fuzz_stub',
],
@@ -514,6 +528,7 @@
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
'../ui/ui.gyp:ui_unittests',
'../url/url.gyp:url_unittests',
'../webkit/renderer/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
@@ -545,6 +560,7 @@
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
'../ui/ui.gyp:ui_unittests',
'../url/url.gyp:url_unittests',
'../webkit/renderer/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
@@ -613,6 +629,7 @@
'dependencies': [
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:gcapi_test',
'../chrome/chrome.gyp:installer_util_unittests',
'../chrome/chrome.gyp:interactive_ui_tests',
'../chrome/chrome.gyp:mini_installer_test',
@@ -641,6 +658,7 @@
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
'../ui/ui.gyp:ui_unittests',
'../ui/views/views.gyp:views_unittests',
'../url/url.gyp:url_unittests',
@@ -743,7 +761,9 @@
'type': 'none',
'dependencies': [
'../chrome/chrome.gyp:crash_service',
+ '../chrome/chrome.gyp:gcapi_dll',
'../chrome/chrome.gyp:policy_templates',
+ '../courgette/courgette.gyp:courgette64',
'../chrome/installer/mini_installer.gyp:mini_installer',
'../courgette/courgette.gyp:courgette',
'../cloud_print/cloud_print.gyp:cloud_print',
@@ -767,12 +787,8 @@
'dependencies': [
'../chrome/chrome.gyp:crash_service_win64',
'../chrome_frame/chrome_frame.gyp:npchrome_frame',
- '../courgette/courgette.gyp:courgette64',
# Omitting tests from Win64 to speed up cycle times.
- '../chrome/chrome.gyp:automated_ui_tests',
- '../chrome/chrome.gyp:chromedriver',
'../chrome/chrome.gyp:interactive_ui_tests',
- '../chrome/chrome.gyp:reliability_tests',
],
}],
['component != "shared_library" and wix_exists == "True" and \
diff --git a/chromium/build/all_android.gyp b/chromium/build/all_android.gyp
index 02965807a97..de6341fa2e9 100644
--- a/chromium/build/all_android.gyp
+++ b/chromium/build/all_android.gyp
@@ -40,8 +40,8 @@
{
# The current list of tests for android. This is temporary
# until the full set supported. If adding a new test here,
- # please also add it to build/android/run_tests.py, else the
- # test is not run.
+ # please also add it to build/android/pylib/gtest/gtest_config.py,
+ # else the test is not run.
#
# WARNING:
# Do not add targets here without communicating the implications
@@ -65,6 +65,7 @@
'../chrome/chrome.gyp:unit_tests',
'../components/components.gyp:components_unittests',
'../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_gl_tests',
'../content/content.gyp:content_shell_test_apk',
'../content/content.gyp:content_unittests',
'../gpu/gpu.gyp:gl_tests',
@@ -96,6 +97,7 @@
'../chrome/chrome.gyp:unit_tests_apk',
'../components/components.gyp:components_unittests_apk',
'../content/content.gyp:content_browsertests_apk',
+ '../content/content.gyp:content_gl_tests_apk',
'../content/content.gyp:content_unittests_apk',
'../content/content.gyp:video_decode_accelerator_unittest_apk',
'../gpu/gpu.gyp:gl_tests_apk',
@@ -116,6 +118,22 @@
],
},
{
+ # WebRTC Android APK tests.
+ 'target_name': 'android_builder_webrtc',
+ 'type': 'none',
+ 'variables': {
+ # WebRTC tests are normally not built by Chromium bots.
+ 'include_tests%': 0,
+ },
+ 'conditions': [
+ ['"<(gtest_target_type)"=="shared_library" and include_tests==1', {
+ 'dependencies': [
+ '../third_party/webrtc/build/apk_tests.gyp:*',
+ ],
+ }],
+ ],
+ }, # target_name: android_builder_webrtc
+ {
# Experimental / in-progress targets that are expected to fail
# but we still try to compile them on bots (turning the stage
# orange, not red).
diff --git a/chromium/build/android/dex_action.gypi b/chromium/build/android/dex_action.gypi
index ac956b6e343..9b640d6c43d 100644
--- a/chromium/build/android/dex_action.gypi
+++ b/chromium/build/android/dex_action.gypi
@@ -33,6 +33,7 @@
'input_paths': [],
'proguard_enabled%': 'false',
'proguard_enabled_input_path%': '',
+ 'dex_no_locals%': 0,
},
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
@@ -51,6 +52,7 @@
'--configuration-name=<(CONFIGURATION_NAME)',
'--proguard-enabled=<(proguard_enabled)',
'--proguard-enabled-input-path=<(proguard_enabled_input_path)',
+ '--no-locals=<(dex_no_locals)',
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
'--ignore=>!(echo \'>(_inputs)\' | md5sum)',
diff --git a/chromium/build/android/instr_action.gypi b/chromium/build/android/instr_action.gypi
new file mode 100644
index 00000000000..7b159989f3d
--- /dev/null
+++ b/chromium/build/android/instr_action.gypi
@@ -0,0 +1,52 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# instruments either java class files, or jars.
+
+{
+ 'variables': {
+ 'instr_type%': 'jar',
+ 'input_path%': '',
+ 'output_path%': '',
+ 'stamp_path%': '',
+ 'extra_instr_args': [
+ '--coverage-file=<(_target_name).em',
+ '--sources-file=<(_target_name)_sources.txt',
+ ],
+ 'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
+ 'conditions': [
+ ['emma_instrument != 0', {
+ 'extra_instr_args': [
+ '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
+ '--src-root=<(DEPTH)',
+ '--emma-jar=<(emma_jar)',
+ ],
+ 'conditions': [
+ ['instr_type == "jar"', {
+ 'instr_action': 'instrument_jar',
+ }, {
+ 'instr_action': 'instrument_classes',
+ }]
+ ],
+ }, {
+ 'instr_action': 'copy',
+ 'extra_instr_args': [],
+ }]
+ ]
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/emma_instr.py',
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
+ '<(instr_action)',
+ '--input-path=<(input_path)',
+ '--output-path=<(output_path)',
+ '--stamp=<(stamp_path)',
+ '<@(extra_instr_args)',
+ ]
+}
diff --git a/chromium/build/android/push_libraries.gypi b/chromium/build/android/push_libraries.gypi
index 1f17660c448..ea89e924404 100644
--- a/chromium/build/android/push_libraries.gypi
+++ b/chromium/build/android/push_libraries.gypi
@@ -15,6 +15,7 @@
# 'libraries_source_dir': 'location where stripped libraries are stored'
# 'device_library_dir': 'location on the device where to put pushed libraries',
# 'push_stamp': 'file to touch when the action is complete'
+# 'configuration_name': 'The build CONFIGURATION_NAME'
# },
# 'includes': [ '../../build/android/push_libraries.gypi' ],
# ],
@@ -41,5 +42,6 @@
'--device-dir=<(device_library_dir)',
'--libraries-json=<(ordered_libraries_file)',
'--stamp=<(push_stamp)',
+ '--configuration-name=<(configuration_name)',
],
}
diff --git a/chromium/build/common.gypi b/chromium/build/common.gypi
index 16e8a729e5e..cf5b1962922 100644
--- a/chromium/build/common.gypi
+++ b/chromium/build/common.gypi
@@ -143,6 +143,13 @@
'toolkit_uses_gtk%': 0,
}],
+ # Whether we're a traditional desktop unix.
+ ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and chromeos==0', {
+ 'desktop_linux%': 1,
+ }, {
+ 'desktop_linux%': 0,
+ }],
+
# Enable HiDPI on Mac OS and Chrome OS.
['OS=="mac" or chromeos==1', {
'enable_hidpi%': 1,
@@ -165,14 +172,6 @@
}, {
'use_default_render_theme%': 0,
}],
-
- # TODO(thestig) Remove the linux_lsb_release check after all the
- # official Ubuntu Lucid builder are gone.
- ['OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0', {
- 'linux_lsb_release%': '<!(lsb_release -r -s)',
- }, {
- 'linux_lsb_release%': '',
- }], # OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0
],
},
@@ -182,6 +181,7 @@
'target_arch%': '<(target_arch)',
'toolkit_views%': '<(toolkit_views)',
'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+ 'desktop_linux%': '<(desktop_linux)',
'use_aura%': '<(use_aura)',
'use_ash%': '<(use_ash)',
'use_cras%': '<(use_cras)',
@@ -197,7 +197,6 @@
'buildtype%': '<(buildtype)',
'branding%': '<(branding)',
'arm_version%': '<(arm_version)',
- 'linux_lsb_release%': '<(linux_lsb_release)',
# Set to 1 to enable fast builds. Set to 2 for even faster builds
# (it disables debug info for fastest compilation - only for use
@@ -350,6 +349,12 @@
# print, UI, etc.
'enable_printing%': 1,
+ # Set the version of CLD.
+ # 0: Don't specify the version. This option is for the Finch testing.
+ # 1: Use only CLD1.
+ # 2: Use only CLD2.
+ 'cld_version%': 1,
+
# Enable spell checker.
'enable_spellcheck%': 1,
@@ -376,11 +381,11 @@
# Enable FTP support by default.
'disable_ftp_support%': 0,
- # XInput2 multitouch support is disabled by default (use_xi2_mt=0).
- # Setting to non-zero value enables XI2 MT. When XI2 MT is enabled,
+ # XInput2 multitouch support is enabled by default (use_xi2_mt=2).
+ # Setting to zero value disables XI2 MT. When XI2 MT is enabled,
# the input value also defines the required XI2 minor minimum version.
# For example, use_xi2_mt=2 means XI2.2 or above version is required.
- 'use_xi2_mt%': 0,
+ 'use_xi2_mt%': 2,
# Use of precompiled headers on Windows.
#
@@ -523,9 +528,9 @@
'enable_automation%': 0,
'enable_extensions%': 0,
'enable_google_now%': 0,
+ 'cld_version%': 1,
'enable_spellcheck%': 0,
'enable_themes%': 0,
- 'proprietary_codecs%': 1,
'remoting%': 0,
'arm_neon%': 0,
'arm_neon_optional%': 1,
@@ -533,8 +538,8 @@
'native_memory_pressure_signals%': 1,
}],
- # Enable basic printing for Chrome for Android but disable printing
- # completely for WebView.
+ # Enable basic printing for Chrome for Android but disable printing
+ # completely for WebView.
['OS=="android" and android_webview_build==0', {
'enable_printing%': 2,
}],
@@ -542,6 +547,15 @@
'enable_printing%': 0,
}],
+ # Android OS includes support for proprietary codecs regardless of
+ # building Chromium or Google Chrome. We also ship Google Chrome with
+ # proprietary codecs.
+ ['OS=="android" or branding=="Chrome"', {
+ 'proprietary_codecs%': 1,
+ }, {
+ 'proprietary_codecs%': 0,
+ }],
+
# Enable autofill dialog for Android, Mac and Views-enabled platforms.
['toolkit_views==1 or (OS=="android" and android_webview_build==0) or OS=="mac"', {
'enable_autofill_dialog%': 1
@@ -564,6 +578,7 @@
'enable_automation%': 0,
'enable_extensions%': 0,
'enable_google_now%': 0,
+ 'cld_version%': 1,
'enable_printing%': 0,
'enable_session_service%': 0,
'enable_themes%': 0,
@@ -636,7 +651,7 @@
#
# On Aura, this allows per-tile painting to be used in the browser
# compositor.
- ['OS!="mac" and OS!="android"', {
+ ['OS!="android"', {
'use_canvas_skia%': 1,
}],
@@ -666,7 +681,8 @@
'sysroot%': '<!(cd <(DEPTH) && pwd -P)/arm-sysroot',
}], # OS=="linux" and target_arch=="arm" and chromeos==0
- ['linux_lsb_release=="12.04"', {
+
+ ['OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0', {
'conditions': [
['target_arch=="x64"', {
'sysroot%': '<!(cd <(DEPTH) && pwd -P)/chrome/installer/linux/debian_wheezy_amd64-sysroot',
@@ -675,7 +691,7 @@
'sysroot%': '<!(cd <(DEPTH) && pwd -P)/chrome/installer/linux/debian_wheezy_i386-sysroot',
}],
],
- }], # linux_lsb_release=="12.04"
+ }], # OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0
['OS=="linux" and target_arch=="mipsel"', {
'sysroot%': '<!(cd <(DEPTH) && pwd -P)/mipsel-sysroot/sysroot',
@@ -791,6 +807,7 @@
'use_pango%': '<(use_pango)',
'use_ozone%': '<(use_ozone)',
'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+ 'desktop_linux%': '<(desktop_linux)',
'use_x11%': '<(use_x11)',
'use_gnome_keyring%': '<(use_gnome_keyring)',
'linux_fpic%': '<(linux_fpic)',
@@ -851,6 +868,7 @@
'enable_printing%': '<(enable_printing)',
'enable_spellcheck%': '<(enable_spellcheck)',
'enable_google_now%': '<(enable_google_now)',
+ 'cld_version%': '<(cld_version)',
'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
'disable_ftp_support%': '<(disable_ftp_support)',
'enable_task_manager%': '<(enable_task_manager)',
@@ -876,6 +894,7 @@
'spdy_proxy_auth_value%': '<(spdy_proxy_auth_value)',
'enable_mdns%' : '<(enable_mdns)',
'v8_optimized_debug': '<(v8_optimized_debug)',
+ 'proprietary_codecs%': '<(proprietary_codecs)',
# Use system nspr instead of the bundled one.
'use_system_nspr%': 0,
@@ -918,6 +937,10 @@
# Currently ignored on Windows.
'coverage%': 0,
+ # Set to 1 to enable java code coverage. Instruments classes during build
+ # to produce .ec files during runtime.
+ 'emma_coverage%': 0,
+
# Set to 1 to force Visual C++ to use legacy debug information format /Z7.
# This is useful for parallel compilation tools which can't support /Zi.
# Only used on Windows.
@@ -931,13 +954,6 @@
# to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
- # Whether usage of OpenMAX is enabled.
- 'enable_openmax%': 0,
-
- # Whether proprietary audio/video codecs are assumed to be included with
- # this build (only meaningful if branding!=Chrome).
- 'proprietary_codecs%': 0,
-
# TODO(bradnelson): eliminate this when possible.
# To allow local gyp files to prevent release.vsprops from being included.
# Yes(1) means include release.vsprops.
@@ -1137,6 +1153,12 @@
# Use the chromium skia by default.
'use_system_skia%': '0',
+ # Use brlapi from brltty for braille display support.
+ 'use_brlapi%': 0,
+
+ # Relative path to icu.gyp from this file.
+ 'icu_gyp_path': '../third_party/icu/icu.gyp',
+
'conditions': [
# The version of GCC in use, set later in platforms that use GCC and have
# not explicitly chosen to build with clang. Currently, this means all
@@ -1194,10 +1216,10 @@
'use_system_libxml%': 1,
'use_system_sqlite%': 1,
'locales==': [
- 'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-GB', 'en-US', 'es', 'fi',
- 'fr', 'he', 'hr', 'hu', 'id', 'it', 'ja', 'ko', 'ms', 'nb', 'nl',
- 'pl', 'pt', 'pt-PT', 'ro', 'ru', 'sk', 'sv', 'th', 'tr', 'uk', 'vi',
- 'zh-CN', 'zh-TW',
+ 'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-GB', 'en-US', 'es', 'es-MX',
+ 'fi', 'fr', 'he', 'hr', 'hu', 'id', 'it', 'ja', 'ko', 'ms', 'nb',
+ 'nl', 'pl', 'pt', 'pt-PT', 'ro', 'ru', 'sk', 'sv', 'th', 'tr', 'uk',
+ 'vi', 'zh-CN', 'zh-TW',
],
# The Mac SDK is set for iOS builds and passed through to Mac
@@ -1447,6 +1469,9 @@
}],
['component=="shared_library"', {
'win_use_allocator_shim%': 0,
+ },{
+ # Turn on multiple dll by default on Windows when in static_library.
+ 'chrome_multiple_dll%': 1,
}],
['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
# Only enabled by default for ninja because it's buggy in VS.
@@ -1535,6 +1560,9 @@
['chromeos==1', {
'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
}],
+ ['desktop_linux==1', {
+ 'grit_defines': ['-D', 'desktop_linux'],
+ }],
['toolkit_views==1', {
'grit_defines': ['-D', 'toolkit_views'],
}],
@@ -1583,8 +1611,7 @@
}],
['OS == "ios"', {
'grit_defines': [
- # define for iOS specific resources.
- '-D', 'ios',
+ '-t', 'ios',
# iOS uses a whitelist to filter resources.
'-w', '<(DEPTH)/build/ios/grit_whitelist.txt'
],
@@ -1621,6 +1648,9 @@
['enable_webrtc==1', {
'grit_defines': ['-D', 'enable_webrtc'],
}],
+ ['enable_mdns==1', {
+ 'grit_defines': ['-D', 'enable_mdns'],
+ }],
['clang_use_chrome_plugins==1 and OS!="win"', {
'clang_chrome_plugins_flags': [
'<!@(<(DEPTH)/tools/clang/scripts/plugin_flags.sh)'
@@ -1911,9 +1941,6 @@
'<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
],
}],
- ['chrome_multiple_dll', {
- 'defines': ['CHROME_MULTIPLE_DLL'],
- }],
['OS=="linux" and clang==1 and host_arch=="ia32"', {
# TODO(dmikurube): Remove -Wno-sentinel when Clang/LLVM is fixed.
# See http://crbug.com/162818.
@@ -1998,7 +2025,7 @@
['google_tv==1', {
'defines': ['GOOGLE_TV=1'],
}],
- ['use_xi2_mt!=0', {
+ ['use_xi2_mt!=0 and use_x11==1', {
'defines': ['USE_XI2_MT=<(use_xi2_mt)'],
}],
['file_manager_extension==1', {
@@ -2286,6 +2313,9 @@
['enable_google_now==1', {
'defines': ['ENABLE_GOOGLE_NOW=1'],
}],
+ ['cld_version!=0', {
+ 'defines': ['CLD_VERSION=<(cld_version)'],
+ }],
['enable_printing==1', {
'defines': ['ENABLE_FULL_PRINTING=1', 'ENABLE_PRINTING=1'],
}],
@@ -2531,7 +2561,6 @@
'WTF_USE_DYNAMIC_ANNOTATIONS=1',
],
'xcode_settings': {
- 'COPY_PHASE_STRIP': 'NO',
'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
'OTHER_CFLAGS': [
'<@(debug_extra_cflags)',
@@ -3285,13 +3314,6 @@
],
}],
],
- 'conditions': [
- ['OS=="mac"', {
- 'cflags': [
- '-mllvm -asan-globals=0', # http://crbug.com/196561
- ],
- }],
- ],
}],
['lsan==1', {
'target_conditions': [
@@ -3304,6 +3326,7 @@
],
'defines': [
'LEAK_SANITIZER',
+ 'WTF_USE_LEAK_SANITIZER=1',
],
}],
],
@@ -3831,6 +3854,7 @@
'ALWAYS_SEARCH_USER_PATHS': 'NO',
# Don't link in libarclite_macosx.a, see http://crbug.com/156530.
'CLANG_LINK_OBJC_RUNTIME': 'NO', # -fno-objc-link-runtime
+ 'COPY_PHASE_STRIP': 'NO',
'GCC_C_LANGUAGE_STANDARD': 'c99', # -std=c99
'GCC_CW_ASM_SYNTAX': 'NO', # No -fasm-blocks
'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', # -fno-exceptions
@@ -3865,11 +3889,12 @@
'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
- # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
- # when building with clang. This warning is triggered when the
- # override keyword is used via the OVERRIDE macro from
- # base/compiler_specific.h.
- 'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+ # gnu++11 instead of c++11 is needed because some code uses
+ # typeof() (a GNU extension).
+ # TODO(thakis): Eventually switch this to c++11 instead of
+ # gnu++11 (once typeof can be removed, which is blocked on c++11
+ # being available everywhere).
+ 'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++11', # -std=gnu++11
# Warn if automatic synthesis is triggered with
# the -Wobjc-missing-property-synthesis flag.
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
@@ -3902,16 +3927,6 @@
# http://crbug.com/255186
'-Wno-deprecated',
],
- 'OTHER_CPLUSPLUSFLAGS': [
- # gnu++11 instead of c++11 is needed because some code uses
- # typeof() (a GNU extension).
- # TODO(thakis): Eventually switch this to c++11 instead of
- # gnu++11 (once typeof can be removed, which is blocked on c++11
- # being available everywhere).
- # TODO(thakis): Use CLANG_CXX_LANGUAGE_STANDARD instead once all
- # bots use xcode 4 -- http://crbug.com/147515).
- '$(inherited)', '-std=gnu++11',
- ],
}],
['use_libcpp==1', {
'OTHER_CPLUSPLUSFLAGS': ['-stdlib=libc++'],
@@ -3957,7 +3972,6 @@
'xcode_settings': {
'OTHER_CFLAGS': [
'-fsanitize=address',
- '-mllvm -asan-globals=0', # http://crbug.com/196561
'-w', # http://crbug.com/162783
],
},
@@ -4182,16 +4196,11 @@
# This next block is mostly common with the 'mac' section above,
# but keying off (or setting) 'clang' isn't valid for iOS as it
- # also seems to mean using the custom build of clang.
+ # also means using Chromium's build of clang.
# TODO(stuartmorgan): switch to c++0x (see TODOs in the clang
# section above).
'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++0x',
- # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
- # when building with clang. This warning is triggered when the
- # override keyword is used via the OVERRIDE macro from
- # base/compiler_specific.h.
- 'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
# Warn if automatic synthesis is triggered with
# the -Wobjc-missing-property-synthesis flag.
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
@@ -4673,6 +4682,35 @@
],
}],
],
+ 'configurations': {
+ # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+ # This block adds *project-wide* configuration settings to each project
+ # file. It's almost always wrong to put things here. Specify your
+ # custom |configurations| in target_defaults to add them to targets instead.
+ 'conditions': [
+ ['OS=="ios"', {
+ 'Debug': {
+ 'xcode_settings': {
+ # Enable 'Build Active Architecture Only' for Debug. This
+ # avoids a project-level warning in Xcode.
+ # Note that this configuration uses the default VALID_ARCHS value
+ # because if there is a device connected Xcode sets the active arch
+ # to the arch of the device. In cases where the device's arch is not
+ # in VALID_ARCHS (e.g. iPhone5 is armv7s) Xcode complains because it
+ # can't determine what arch to compile for.
+ 'ONLY_ACTIVE_ARCH': 'YES',
+ },
+ },
+ 'Release': {
+ 'xcode_settings': {
+ # Override VALID_ARCHS and omit armv7s. Otherwise Xcode compiles for
+ # both armv7 and armv7s, doubling the binary size.
+ 'VALID_ARCHS': 'armv7 i386',
+ },
+ },
+ }],
+ ],
+ },
'xcode_settings': {
# DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
# This block adds *project-wide* configuration settings to each project
@@ -4710,8 +4748,6 @@
],
}],
['OS=="ios"', {
- # Just build armv7, until armv7s is correctly tested.
- 'VALID_ARCHS': 'armv7 i386',
# Target both iPhone and iPad.
'TARGETED_DEVICE_FAMILY': '1,2',
}],
diff --git a/chromium/build/env_dump.py b/chromium/build/env_dump.py
new file mode 100755
index 00000000000..21edfe633c7
--- /dev/null
+++ b/chromium/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-f', '--output-json',
+ help='File to dump the environment as JSON into.')
+ parser.add_option(
+ '-d', '--dump-mode', action='store_true',
+ help='Dump the environment to sys.stdout and exit immediately.')
+
+ parser.disable_interspersed_args()
+ options, args = parser.parse_args()
+ if options.dump_mode:
+ if args or options.output_json:
+ parser.error('Cannot specify args or --output-json with --dump-mode.')
+ json.dump(dict(os.environ), sys.stdout)
+ else:
+ if not options.output_json:
+ parser.error('Requires --output-json option.')
+
+ envsetup_cmd = ' '.join(map(pipes.quote, args))
+ full_cmd = [
+ 'bash', '-c',
+ '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+ ]
+ try:
+ output = subprocess.check_output(full_cmd)
+ except Exception as e:
+ sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+ env_diff = {}
+ new_env = json.loads(output)
+ for k, val in new_env.items():
+ if k == '_' or (k in os.environ and os.environ[k] == val):
+ continue
+ env_diff[k] = val
+ with open(options.output_json, 'w') as f:
+ json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/filename_rules.gypi b/chromium/build/filename_rules.gypi
index a8d429b7aa4..cf8c3572e50 100644
--- a/chromium/build/filename_rules.gypi
+++ b/chromium/build/filename_rules.gypi
@@ -86,7 +86,7 @@
]
}],
['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
- 'sources/': [ ['exclude', '_aurax11\\.(h|cc)$'] ]
+ 'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
}],
['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
diff --git a/chromium/build/gdb-add-index b/chromium/build/gdb-add-index
index 4975532213b..0d66d8dac83 100755
--- a/chromium/build/gdb-add-index
+++ b/chromium/build/gdb-add-index
@@ -4,8 +4,84 @@
# found in the LICENSE file.
#
# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+ trap "" EXIT USR1 # Avoid reentrancy.
+
+ local jobs=$(jobs -p)
+ if [ -n "$jobs" ]; then
+ echo -n "Killing outstanding index jobs..."
+ kill -KILL $(jobs -p)
+ wait
+ echo "done"
+ fi
+
+ if [ -f "$DIRECTORY" ]; then
+ echo -n "Removing temp directory $DIRECTORY..."
+ rm -rf $DIRECTORY
+ echo done
+ fi
+}
+
+# Add index to one binary.
+function index_one_file {
+ local file=$1
+ local basename=$(basename "$file")
+
+ local readelf_out=$(readelf -S "$file")
+ if [[ $readelf_out =~ "gdb_index" ]]; then
+ echo "Skipped $basename -- already contains index."
+ else
+ local start=$(date +"%s%N")
+ echo "Adding index to $basename..."
+
+ gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
+ local index_file="$DIRECTORY/$basename.gdb-index"
+ if [ -f "$index_file" ]; then
+ objcopy --add-section .gdb_index="$index_file" \
+ --set-section-flags .gdb_index=readonly "$file" "$file"
+ local finish=$(date +"%s%N")
+ local elappsed=$(((finish - start)/1000000))
+ echo " ...$basename indexed. [${elappsed}ms]"
+ else
+ echo " ...$basename unindexable."
+ fi
+ fi
+}
-set -e
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+ # Start a background subshell to run the index command.
+ {
+ index_one_file $1
+ kill -SIGUSR1 $$ # $$ resolves to the parent script.
+ exit 129 # See comment above wait loop at bottom.
+ } &
+}
+
+CUR_FILE_NUM=0
+function index_next {
+ if (( CUR_FILE_NUM >= ${#FILES_TO_INDEX[@]} )); then
+ return
+ fi
+
+ async_index "${FILES_TO_INDEX[CUR_FILE_NUM]}"
+ ((CUR_FILE_NUM += 1)) || true
+}
+
+
+########
+### Main body of the script.
if [[ ! $# == 1 ]]; then
echo "Usage: $0 path-to-binary"
@@ -18,30 +94,38 @@ if [[ ! -f "$FILENAME" ]]; then
exit 1
fi
+# Ensure we cleanup on on exit.
+trap on_exit EXIT
+
# We're good to go! Create temp directory for index files.
DIRECTORY=$(mktemp -d)
echo "Made temp directory $DIRECTORY."
-# Always remove directory on exit.
-trap "{ echo -n Removing temp directory $DIRECTORY...;
- rm -rf $DIRECTORY; echo done; }" EXIT
-
-# Grab all the chromium shared library files.
-so_files=$(ldd "$FILENAME" 2>/dev/null \
+# Create array with the filename and all shared libraries that
+# have the same dirname. The dirname is a signal that these
+# shared libraries were part of the same build as the binary.
+declare -a FILES_TO_INDEX=($FILENAME
+ $(ldd "$FILENAME" 2>/dev/null \
| grep $(dirname "$FILENAME") \
| sed "s/.*[ \t]\(.*\) (.*/\1/")
+)
-# Add index to binary and the shared library dependencies.
-for file in "$FILENAME" $so_files; do
- basename=$(basename "$file")
- echo -n "Adding index to $basename..."
- readelf_out=$(readelf -S "$file")
- if [[ $readelf_out =~ "gdb_index" ]]; then
- echo "already contains index. Skipped."
- else
- gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
- objcopy --add-section .gdb_index="$DIRECTORY"/$basename.gdb-index \
- --set-section-flags .gdb_index=readonly "$file" "$file"
- echo "done."
- fi
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+INDEX_TASKS=${INDEX_TASKS:-4}
+for ((i=0;i<${INDEX_TASKS};i++)); do
+ index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (( $? > 128 )); do
+ wait
done
diff --git a/chromium/build/get_landmines.py b/chromium/build/get_landmines.py
new file mode 100755
index 00000000000..05c9de69628
--- /dev/null
+++ b/chromium/build/get_landmines.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import optparse
+import sys
+
+import landmine_utils
+
+
+builder = landmine_utils.platform
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines(target):
+ """
+ ALL LANDMINES ARE EMITTED FROM HERE.
+ target can be one of {'Release', 'Debug', 'Debug_x64', 'Release_x64'}.
+ """
+ if (distributor() == 'goma' and platform() == 'win32' and
+ builder() == 'ninja'):
+ print 'Need to clobber winja goma due to backend cwd cache fix.'
+ if platform() == 'android':
+ print 'Clobber: Resources removed in r195014 require clobber.'
+ if platform() == 'win' and builder() == 'ninja':
+ print 'Compile on cc_unittests fails due to symbols removed in r185063.'
+ if platform() == 'linux' and builder() == 'ninja':
+ print 'Builders switching from make to ninja will clobber on this.'
+ if platform() == 'mac':
+ print 'Switching from bundle to unbundled dylib (issue 14743002).'
+ if (platform() == 'win' and builder() == 'ninja' and
+ gyp_msvs_version() == '2012' and
+ gyp_defines().get('target_arch') == 'x64' and
+ gyp_defines().get('dcheck_always_on') == '1'):
+ print "Switched win x64 trybots from VS2010 to VS2012."
+ print 'Need to clobber everything due to an IDL change in r154579 (blink)'
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-t', '--target',
+ help=='Target for which the landmines have to be emitted')
+
+ options, args = parser.parse_args()
+
+ if args:
+ parser.error('Unknown arguments %s' % args)
+
+ print_landmines(options.target)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/build/install-build-deps-android.sh b/chromium/build/install-build-deps-android.sh
index f6b445bae18..9417b1a76ab 100755
--- a/chromium/build/install-build-deps-android.sh
+++ b/chromium/build/install-build-deps-android.sh
@@ -49,8 +49,8 @@ sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils
# Few binaries in the Android SDK require 32-bit libraries on the host.
sudo apt-get -y install lib32z1 g++-multilib
-if /usr/bin/lsb_release -r -s | grep -q "12."; then
- # Ubuntu 12.x
+if [ $(/usr/bin/lsb_release -r -s | cut -d"." -f1) -ge 12 ]; then
+ # Ubuntu >= 12.x
sudo apt-get -y install ant
# Java can not be installed via ppa on Ubuntu 12.04+ so we'll
diff --git a/chromium/build/install-build-deps.sh b/chromium/build/install-build-deps.sh
index c858fa018ee..8296cb8928c 100755
--- a/chromium/build/install-build-deps.sh
+++ b/chromium/build/install-build-deps.sh
@@ -75,12 +75,12 @@ if [ "x$(id -u)" != x0 ]; then
fi
# Packages needed for chromeos only
-chromeos_dev_list="libbluetooth-dev"
+chromeos_dev_list="libbluetooth-dev libbrlapi-dev"
# Packages need for development
dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
- language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
- libcairo2-dev libcups2-dev libcurl4-gnutls-dev libelf-dev
+ language-pack-fr libapache2-mod-php5 libasound2-dev libbrlapi-dev
+ libbz2-dev libcairo2-dev libcups2-dev libcurl4-gnutls-dev libelf-dev
libgconf2-dev libgl1-mesa-dev libglib2.0-dev libglu1-mesa-dev
libgnome-keyring-dev libgtk2.0-dev libkrb5-dev libnspr4-dev
libnss3-dev libpam0g-dev libpci-dev libpulse-dev libsctp-dev
@@ -151,6 +151,11 @@ if package_exists libudev1; then
else
dev_list="${dev_list} libudev0"
fi
+if package_exists libbrlapi0.6; then
+ dev_list="${dev_list} libbrlapi0.6"
+else
+ dev_list="${dev_list} libbrlapi0.5"
+fi
# Some packages are only needed, if the distribution actually supports
@@ -216,27 +221,6 @@ else
dbg_list=
fi
-# Install the Chrome OS default fonts.
-if test "$do_inst_chromeos_fonts" != "0"; then
- echo
- echo "Installing Chrome OS fonts."
- dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
- if ! sudo $dir/linux/install-chromeos-fonts.py; then
- echo "ERROR: The installation of the Chrome OS default fonts failed."
- if [ `stat -f -c %T $dir` == "nfs" ]; then
- echo "The reason is that your repo is installed on a remote file system."
- else
- echo "This is expected if your repo is installed on a remote file system."
- fi
- echo "It is recommended to install your repo on a local file system."
- echo "You can skip the installation of the Chrome OS default founts with"
- echo "the command line option: --no-chromeos-fonts."
- exit 1
- fi
-else
- echo "Skipping installation of Chrome OS fonts."
-fi
-
# When cross building for arm on 64-bit systems the host binaries
# that are part of v8 need to be compiled with -m32 which means
# that basic multilib support is needed.
@@ -300,6 +284,28 @@ else
exit 100
fi
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if test "$do_inst_chromeos_fonts" != "0"; then
+ echo
+ echo "Installing Chrome OS fonts."
+ dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+ if ! sudo $dir/linux/install-chromeos-fonts.py; then
+ echo "ERROR: The installation of the Chrome OS default fonts failed."
+ if [ `stat -f -c %T $dir` == "nfs" ]; then
+ echo "The reason is that your repo is installed on a remote file system."
+ else
+ echo "This is expected if your repo is installed on a remote file system."
+ fi
+ echo "It is recommended to install your repo on a local file system."
+ echo "You can skip the installation of the Chrome OS default founts with"
+ echo "the command line option: --no-chromeos-fonts."
+ exit 1
+ fi
+else
+ echo "Skipping installation of Chrome OS fonts."
+fi
+
# Install 32bit backwards compatibility support for 64bit systems
if [ "$(uname -m)" = "x86_64" ]; then
if test "$do_inst_lib32" != "1"
diff --git a/chromium/build/ios/grit_whitelist.txt b/chromium/build/ios/grit_whitelist.txt
index c72022c6d22..bcdb3f36462 100644
--- a/chromium/build/ios/grit_whitelist.txt
+++ b/chromium/build/ios/grit_whitelist.txt
@@ -204,7 +204,6 @@ IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_EXTRA_INFO_2
IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_TITLE
IDS_CHROME_TO_DEVICE_PRINT_TO_PHONE
IDS_CHROME_TO_DEVICE_SNAPSHOTS
-IDS_CONTENT_CONTEXT_SEARCHWEBFORIMAGE
IDS_COPY_URL_MAC
IDS_COULDNT_OPEN_PROFILE_ERROR
IDS_CRASHES_BUG_LINK_LABEL
@@ -484,6 +483,8 @@ IDS_FLAGS_DISABLE_NATIVE_AUTOFILL_UI_DESCRIPTION
IDS_FLAGS_DISABLE_NATIVE_AUTOFILL_UI_NAME
IDS_FLAGS_DISABLE_OVERSCROLL_HISTORY_NAVIGATION_DESCRIPTION
IDS_FLAGS_DISABLE_OVERSCROLL_HISTORY_NAVIGATION_NAME
+IDS_FLAGS_DISABLE_PNACL_DESCRIPTION
+IDS_FLAGS_DISABLE_PNACL_NAME
IDS_FLAGS_DISABLE_RESTORE_SESSION_STATE_DESCRIPTION
IDS_FLAGS_DISABLE_RESTORE_SESSION_STATE_NAME
IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_DESCRIPTION
@@ -565,8 +566,6 @@ IDS_FLAGS_ENABLE_PASSWORD_GENERATION_DESCRIPTION
IDS_FLAGS_ENABLE_PASSWORD_GENERATION_NAME
IDS_FLAGS_ENABLE_PINCH_SCALE_DESCRIPTION
IDS_FLAGS_ENABLE_PINCH_SCALE_NAME
-IDS_FLAGS_ENABLE_PNACL_DESCRIPTION
-IDS_FLAGS_ENABLE_PNACL_NAME
IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_DESCRIPTION
IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_NAME
IDS_FLAGS_ENABLE_RICH_NOTIFICATIONS_DESCRIPTION
@@ -603,8 +602,6 @@ IDS_FLAGS_ENABLE_TOUCH_SIDE_BEZELS_DESCRIPTION
IDS_FLAGS_ENABLE_TOUCH_SIDE_BEZELS_NAME
IDS_FLAGS_ENABLE_VP9_PLAYBACK_DESCRIPTION
IDS_FLAGS_ENABLE_VP9_PLAYBACK_NAME
-IDS_FLAGS_ENABLE_WALLET_PRODUCTION_SERVICE_DESCRIPTION
-IDS_FLAGS_ENABLE_WALLET_PRODUCTION_SERVICE_NAME
IDS_FLAGS_ENABLE_WEBGL_DESCRIPTION
IDS_FLAGS_ENABLE_WEBGL_NAME
IDS_FLAGS_ENABLE_WEBP_IN_ACCEPT_HEADER_DESCRIPTION
@@ -724,6 +721,8 @@ IDS_FLAGS_USE_CLIENT_LOGIN_SIGNIN_FLOW_DESCRIPTION
IDS_FLAGS_USE_CLIENT_LOGIN_SIGNIN_FLOW_NAME
IDS_FLAGS_VIEWS_TEXTFIELD_DESCRIPTION
IDS_FLAGS_VIEWS_TEXTFIELD_NAME
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_DESCRIPTION
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_NAME
IDS_FLAGS_WARNING_HEADER
IDS_FLAGS_WARNING_TEXT
IDS_FULLSCREEN
diff --git a/chromium/build/isolate.gypi b/chromium/build/isolate.gypi
index 15fcf4c7a48..76882c786ea 100644
--- a/chromium/build/isolate.gypi
+++ b/chromium/build/isolate.gypi
@@ -42,11 +42,8 @@
'inputs': [
# Files that are known to be involved in this step.
'<(DEPTH)/tools/swarm_client/isolate.py',
- '<(DEPTH)/tools/swarm_client/isolateserver_archive.py',
'<(DEPTH)/tools/swarm_client/run_isolated.py',
'<(DEPTH)/tools/swarm_client/googletest/run_test_cases.py',
- '<(DEPTH)/tools/swarm_client/short_expression_finder.py',
- '<(DEPTH)/tools/swarm_client/trace_inputs.py',
# Disable file tracking by the build driver for now. This means the
# project must have the proper build-time dependency for their runtime
diff --git a/chromium/build/java.gypi b/chromium/build/java.gypi
index 1635c71da40..2e03bab7cce 100644
--- a/chromium/build/java.gypi
+++ b/chromium/build/java.gypi
@@ -55,8 +55,11 @@
'additional_src_dirs': [],
'javac_includes': [],
'jar_name': '<(_target_name).jar',
- 'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
+ 'jar_dir': '<(PRODUCT_DIR)/lib.java',
+ 'jar_path': '<(intermediate_dir)/<(jar_name)',
+ 'jar_final_path': '<(jar_dir)/<(jar_name)',
'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
+ 'instr_stamp': '<(intermediate_dir)/instr.stamp',
'additional_input_paths': [],
'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
'generated_src_dirs': ['>@(generated_R_dirs)'],
@@ -70,12 +73,34 @@
'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
'classes_dir': '<(intermediate_dir)/classes',
'compile_stamp': '<(intermediate_dir)/compile.stamp',
+ 'proguard_config%': '',
+ 'proguard_preprocess%': '0',
+ 'variables': {
+ 'variables': {
+ 'proguard_preprocess%': 0,
+ 'emma_never_instrument%': 0,
+ },
+ 'conditions': [
+ ['proguard_preprocess == 1', {
+ 'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+ }, {
+ 'javac_jar_path': '<(jar_path)'
+ }],
+ ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
+ 'emma_instrument': 1,
+ }, {
+ 'emma_instrument': 0,
+ }],
+ ],
+ },
+ 'emma_instrument': '<(emma_instrument)',
+ 'javac_jar_path': '<(javac_jar_path)',
},
# This all_dependent_settings is used for java targets only. This will add the
# jar path to the classpath of dependent java targets.
'all_dependent_settings': {
'variables': {
- 'input_jars_paths': ['<(jar_path)'],
+ 'input_jars_paths': ['<(jar_final_path)'],
'library_dexed_jars_paths': ['<(dex_path)'],
},
},
@@ -218,6 +243,35 @@
},
],
}],
+ ['proguard_preprocess == 1', {
+ 'actions': [
+ {
+ 'action_name': 'proguard_<(_target_name)',
+ 'message': 'Proguard preprocessing <(_target_name) jar',
+ 'inputs': [
+ '<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/proguard.py',
+ '<(javac_jar_path)',
+ '<(proguard_config)',
+ ],
+ 'outputs': [
+ '<(jar_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/proguard.py',
+ '--proguard-path=<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '--input-path=<(javac_jar_path)',
+ '--output-path=<(jar_path)',
+ '--proguard-config=<(proguard_config)',
+ '--classpath=<(android_sdk_jar) >(input_jars_paths)',
+
+ # TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
+ '--ignore=>!(echo \'>(_inputs)\' | md5sum)',
+ ]
+ },
+ ],
+ }],
],
'actions': [
{
@@ -263,12 +317,12 @@
'<(compile_stamp)',
],
'outputs': [
- '<(jar_path)',
+ '<(javac_jar_path)',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/jar.py',
'--classes-dir=<(classes_dir)',
- '--jar-path=<(jar_path)',
+ '--jar-path=<(javac_jar_path)',
'--excluded-classes=<(jar_excluded_classes)',
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
@@ -276,21 +330,38 @@
]
},
{
+ 'action_name': 'instr_jar_<(_target_name)',
+ 'message': 'Instrumenting <(_target_name) jar',
+ 'variables': {
+ 'input_path': '<(jar_path)',
+ 'output_path': '<(jar_final_path)',
+ 'stamp_path': '<(instr_stamp)',
+ 'instr_type': 'jar',
+ },
+ 'outputs': [
+ '<(jar_final_path)',
+ ],
+ 'inputs': [
+ '<(jar_path)',
+ ],
+ 'includes': [ 'android/instr_action.gypi' ],
+ },
+ {
'action_name': 'jar_toc_<(_target_name)',
'message': 'Creating <(_target_name) jar.TOC',
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/util/md5_check.py',
'<(DEPTH)/build/android/gyp/jar_toc.py',
- '<(jar_path)',
+ '<(jar_final_path)',
],
'outputs': [
- '<(jar_path).TOC',
+ '<(jar_final_path).TOC',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
- '--jar-path=<(jar_path)',
- '--toc-path=<(jar_path).TOC',
+ '--jar-path=<(jar_final_path)',
+ '--toc-path=<(jar_final_path).TOC',
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
'--ignore=>!(echo \'>(_inputs)\' | md5sum)',
@@ -299,7 +370,12 @@
{
'action_name': 'dex_<(_target_name)',
'variables': {
- 'dex_input_paths': [ '<(jar_path)' ],
+ 'conditions': [
+ ['emma_instrument != 0', {
+ 'dex_no_locals': 1,
+ }],
+ ],
+ 'dex_input_paths': [ '<(jar_final_path)' ],
'output_path': '<(dex_path)',
},
'includes': [ 'android/dex_action.gypi' ],
diff --git a/chromium/build/java_apk.gypi b/chromium/build/java_apk.gypi
index 2457d3a1939..81984472af7 100644
--- a/chromium/build/java_apk.gypi
+++ b/chromium/build/java_apk.gypi
@@ -87,15 +87,18 @@
'native_libraries_template_data_file': '<(native_libraries_template_data_dir)/native_libraries_array.h',
'native_libraries_template_data_stamp': '<(intermediate_dir)/native_libraries_template_data.stamp',
'compile_stamp': '<(intermediate_dir)/compile.stamp',
+ 'instr_stamp': '<(intermediate_dir)/instr.stamp',
'jar_stamp': '<(intermediate_dir)/jar.stamp',
'obfuscate_stamp': '<(intermediate_dir)/obfuscate.stamp',
'strip_stamp': '<(intermediate_dir)/strip.stamp',
'classes_dir': '<(intermediate_dir)/classes',
+ 'classes_final_dir': '<(intermediate_dir)/classes_instr',
'javac_includes': [],
'jar_excluded_classes': [],
'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
'obfuscated_jar_path': '<(intermediate_dir)/obfuscated.jar',
'dex_path': '<(intermediate_dir)/classes.dex',
+ 'emma_device_jar': '<(android_sdk_root)/tools/lib/emma_device.jar',
'android_manifest_path%': '<(java_in_dir)/AndroidManifest.xml',
'push_stamp': '<(intermediate_dir)/push.stamp',
'link_stamp': '<(intermediate_dir)/link.stamp',
@@ -128,8 +131,10 @@
],
},
'native_lib_target%': '',
+ 'emma_instrument': '<(emma_coverage)',
'apk_package_native_libs_dir': '<(apk_package_native_libs_dir)',
'unsigned_standalone_apk_path': '<(unsigned_standalone_apk_path)',
+ 'extra_native_libs': [],
},
# Pass the jar path to the apk's "fake" jar target. This would be better as
# direct_dependent_settings, but a variable set by a direct_dependent_settings
@@ -178,13 +183,17 @@
'destination': '<(apk_package_native_libs_dir)/<(android_app_abi)',
'files': [
'<(android_gdbserver)',
+ '<@(extra_native_libs)',
],
},
],
'actions': [
{
'variables': {
- 'input_libraries': ['<@(native_libs_paths)'],
+ 'input_libraries': [
+ '<@(native_libs_paths)',
+ '<@(extra_native_libs)',
+ ],
},
'includes': ['../build/android/write_ordered_libraries.gypi'],
},
@@ -231,7 +240,10 @@
'variables': {
'ordered_libraries_file%': '<(ordered_libraries_file)',
'stripped_libraries_dir': '<(libraries_source_dir)',
- 'input_paths': ['<@(native_libs_paths)'],
+ 'input_paths': [
+ '<@(native_libs_paths)',
+ '<@(extra_native_libs)',
+ ],
'stamp': '<(strip_stamp)'
},
'includes': ['../build/android/strip_native_libraries.gypi'],
@@ -243,9 +255,9 @@
'libraries_top_dir': '<(intermediate_dir)/lib.stripped',
'libraries_source_dir': '<(libraries_top_dir)/lib/<(android_app_abi)',
'device_library_dir': '<(device_intermediate_dir)/lib.stripped',
+ 'configuration_name': '<(CONFIGURATION_NAME)',
},
'dependencies': [
- '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum',
'<(DEPTH)/build/android/setup.gyp:get_build_device_configurations',
],
'actions': [
@@ -274,6 +286,7 @@
'--target-dir=<(device_library_dir)',
'--apk=<(incomplete_apk_path)',
'--stamp=<(link_stamp)',
+ '--configuration-name=<(CONFIGURATION_NAME)',
],
},
],
@@ -367,10 +380,19 @@
'--apk-path=<(incomplete_apk_path)',
'--build-device-configuration=<(build_device_config_path)',
'--install-record=<(apk_install_record)',
+ '--configuration-name=<(CONFIGURATION_NAME)',
],
},
],
}],
+ ['is_test_apk == 1', {
+ 'dependencies': [
+ '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+ ]
+ }],
+ ],
+ 'dependencies': [
+ '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum',
],
'actions': [
{
@@ -461,20 +483,37 @@
],
},
{
+ 'action_name': 'instr_classes_<(_target_name)',
+ 'message': 'Instrumenting <(_target_name) classes',
+ 'variables': {
+ 'input_path': '<(classes_dir)',
+ 'output_path': '<(classes_final_dir)',
+ 'stamp_path': '<(instr_stamp)',
+ 'instr_type': 'classes',
+ },
+ 'outputs': [
+ '<(instr_stamp)',
+ ],
+ 'inputs': [
+ '<(compile_stamp)',
+ ],
+ 'includes': [ 'android/instr_action.gypi' ],
+ },
+ {
'action_name': 'jar_<(_target_name)',
'message': 'Creating <(_target_name) jar',
'inputs': [
+ '<(instr_stamp)',
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/util/md5_check.py',
'<(DEPTH)/build/android/gyp/jar.py',
- '<(compile_stamp)',
],
'outputs': [
'<(jar_stamp)',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/jar.py',
- '--classes-dir=<(classes_dir)',
+ '--classes-dir=<(classes_final_dir)',
'--jar-path=<(jar_path)',
'--excluded-classes=<(jar_excluded_classes)',
'--stamp=<(jar_stamp)',
@@ -491,7 +530,7 @@
'<(DEPTH)/build/android/ant/create-test-jar.js',
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/ant.py',
- '<(compile_stamp)',
+ '<(instr_stamp)',
'>@(proguard_flags_paths)',
],
'outputs': [
@@ -537,10 +576,16 @@
'input_paths': [ '<(obfuscate_stamp)' ],
'proguard_enabled_input_path': '<(obfuscated_jar_path)',
}],
+ ['emma_instrument != 0', {
+ 'dex_no_locals': 1,
+ }],
+ ['emma_instrument != 0 and is_test_apk == 0', {
+ 'dex_input_paths': [ '<(emma_device_jar)' ],
+ }],
],
- 'input_paths': [ '<(compile_stamp)' ],
+ 'input_paths': [ '<(instr_stamp)' ],
'dex_input_paths': [ '>@(library_dexed_jars_paths)' ],
- 'dex_generated_input_dirs': [ '<(classes_dir)' ],
+ 'dex_generated_input_dirs': [ '<(classes_final_dir)' ],
'output_path': '<(dex_path)',
},
'includes': [ 'android/dex_action.gypi' ],
@@ -625,6 +670,8 @@
'-DOUT_DIR=<(intermediate_dir)',
'-DSOURCE_DIR=<(source_dir)',
'-DUNSIGNED_APK_PATH=<(unsigned_apk_path)',
+ '-DEMMA_INSTRUMENT=<(emma_instrument)',
+ '-DEMMA_DEVICE_JAR=<(emma_device_jar)',
'-Dbasedir=.',
'-buildfile',
diff --git a/chromium/build/java_prebuilt.gypi b/chromium/build/java_prebuilt.gypi
index 80003749fc7..cec881dfd11 100644
--- a/chromium/build/java_prebuilt.gypi
+++ b/chromium/build/java_prebuilt.gypi
@@ -24,13 +24,62 @@
],
'variables': {
'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+ 'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+ 'android_jar': '<(android_sdk)/android.jar',
+ 'input_jars_paths': [ '<(android_jar)' ],
+ 'proguard_config%': '',
+ 'proguard_preprocess%': '0',
+ 'variables': {
+ 'variables': {
+ 'proguard_preprocess%': 0,
+ },
+ 'conditions': [
+ ['proguard_preprocess == 1', {
+ 'dex_input_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+ }, {
+ 'dex_input_jar_path': '<(jar_path)'
+ }],
+ ],
+ },
+ 'dex_input_jar_path': '<(dex_input_jar_path)',
},
'all_dependent_settings': {
'variables': {
- 'input_jars_paths': ['<(jar_path)'],
+ 'input_jars_paths': ['<(dex_input_jar_path)'],
'library_dexed_jars_paths': ['<(dex_path)'],
},
},
+ 'conditions' : [
+ ['proguard_preprocess == 1', {
+ 'actions': [
+ {
+ 'action_name': 'proguard_<(_target_name)',
+ 'message': 'Proguard preprocessing <(_target_name) jar',
+ 'inputs': [
+ '<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/proguard.py',
+ '<(jar_path)',
+ '<(proguard_config)',
+ ],
+ 'outputs': [
+ '<(dex_input_jar_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/proguard.py',
+ '--proguard-path=<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '--input-path=<(jar_path)',
+ '--output-path=<(dex_input_jar_path)',
+ '--proguard-config=<(proguard_config)',
+ '--classpath=>(input_jars_paths)',
+
+ # TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
+ '--ignore=>!(echo \'>(_inputs)\' | md5sum)',
+ ]
+ },
+ ],
+ }],
+ ],
'actions': [
{
'action_name': 'dex_<(_target_name)',
@@ -38,7 +87,7 @@
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/dex.py',
- '<(jar_path)',
+ '<(dex_input_jar_path)',
],
'outputs': [
'<(dex_path)',
@@ -51,7 +100,7 @@
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
'--ignore=>!(echo \'>(_inputs)\' | md5sum)',
- '<(jar_path)',
+ '<(dex_input_jar_path)',
]
},
diff --git a/chromium/build/landmine_utils.py b/chromium/build/landmine_utils.py
new file mode 100644
index 00000000000..021fc9b7113
--- /dev/null
+++ b/chromium/build/landmine_utils.py
@@ -0,0 +1,114 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+ """This decorator caches the return value of a parameterless pure function"""
+ def memoizer(func):
+ val = []
+ @functools.wraps(func)
+ def inner():
+ if not val:
+ ret = func()
+ val.append(ret if ret is not None else default)
+ if logging.getLogger().isEnabledFor(logging.INFO):
+ print '%s -> %r' % (func.__name__, val[0])
+ return val[0]
+ return inner
+ return memoizer
+
+
+@memoize()
+def IsWindows():
+ return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+ return sys.platform.startswith('linux')
+
+
+@memoize()
+def IsMac():
+ return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+ """Parses and returns GYP_DEFINES env var as a dictionary."""
+ return dict(arg.split('=', 1)
+ for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_msvs_version():
+ return os.environ.get('GYP_MSVS_VERSION', '')
+
+@memoize()
+def distributor():
+ """
+ Returns a string which is the distributed build engine in use (if any).
+ Possible values: 'goma', 'ib', ''
+ """
+ if 'goma' in gyp_defines():
+ return 'goma'
+ elif IsWindows():
+ if 'CHROME_HEADLESS' in os.environ:
+ return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+ """
+ Returns a string representing the platform this build is targetted for.
+ Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+ """
+ if 'OS' in gyp_defines():
+ if 'android' in gyp_defines()['OS']:
+ return 'android'
+ else:
+ return gyp_defines()['OS']
+ elif IsWindows():
+ return 'win'
+ elif IsLinux():
+ return 'linux'
+ else:
+ return 'mac'
+
+
+@memoize()
+def builder():
+ """
+ Returns a string representing the build engine (not compiler) to use.
+ Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+ """
+ if 'GYP_GENERATORS' in os.environ:
+ # for simplicity, only support the first explicit generator
+ generator = os.environ['GYP_GENERATORS'].split(',')[0]
+ if generator.endswith('-android'):
+ return generator.split('-')[0]
+ elif generator.endswith('-ninja'):
+ return 'ninja'
+ else:
+ return generator
+ else:
+ if platform() == 'android':
+ # Good enough for now? Do any android bots use make?
+ return 'ninja'
+ elif platform() == 'ios':
+ return 'xcode'
+ elif IsWindows():
+ return 'msvs'
+ elif IsLinux():
+ return 'ninja'
+ elif IsMac():
+ return 'xcode'
+ else:
+ assert False, 'Don\'t know what builder we\'re using!'
diff --git a/chromium/build/landmines.py b/chromium/build/landmines.py
index c09ffb887d8..91b4fa9e60d 100755
--- a/chromium/build/landmines.py
+++ b/chromium/build/landmines.py
@@ -4,9 +4,6 @@
# found in the LICENSE file.
"""
-This file holds a list of reasons why a particular build needs to be clobbered
-(or a list of 'landmines').
-
This script runs every build as a hook. If it detects that the build should
be clobbered, it will touch the file <build_dir>/.landmine_triggered. The
various build scripts will then check for the presence of this file and clobber
@@ -18,148 +15,18 @@ build is clobbered.
"""
import difflib
-import functools
import gyp_helper
import logging
import optparse
import os
-import shlex
import sys
+import subprocess
import time
-SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-
-def memoize(default=None):
- """This decorator caches the return value of a parameterless pure function"""
- def memoizer(func):
- val = []
- @functools.wraps(func)
- def inner():
- if not val:
- ret = func()
- val.append(ret if ret is not None else default)
- if logging.getLogger().isEnabledFor(logging.INFO):
- print '%s -> %r' % (func.__name__, val[0])
- return val[0]
- return inner
- return memoizer
-
-
-@memoize()
-def IsWindows():
- return sys.platform in ['win32', 'cygwin']
-
-
-@memoize()
-def IsLinux():
- return sys.platform.startswith('linux')
-
-
-@memoize()
-def IsMac():
- return sys.platform == 'darwin'
+import landmine_utils
-@memoize()
-def gyp_defines():
- """Parses and returns GYP_DEFINES env var as a dictionary."""
- return dict(arg.split('=', 1)
- for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
-
-@memoize()
-def gyp_msvs_version():
- return os.environ.get('GYP_MSVS_VERSION', '')
-
-@memoize()
-def distributor():
- """
- Returns a string which is the distributed build engine in use (if any).
- Possible values: 'goma', 'ib', ''
- """
- if 'goma' in gyp_defines():
- return 'goma'
- elif IsWindows():
- if 'CHROME_HEADLESS' in os.environ:
- return 'ib' # use (win and !goma and headless) as approximation of ib
-
-
-@memoize()
-def platform():
- """
- Returns a string representing the platform this build is targetted for.
- Possible values: 'win', 'mac', 'linux', 'ios', 'android'
- """
- if 'OS' in gyp_defines():
- if 'android' in gyp_defines()['OS']:
- return 'android'
- else:
- return gyp_defines()['OS']
- elif IsWindows():
- return 'win'
- elif IsLinux():
- return 'linux'
- else:
- return 'mac'
-
-
-@memoize()
-def builder():
- """
- Returns a string representing the build engine (not compiler) to use.
- Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
- """
- if 'GYP_GENERATORS' in os.environ:
- # for simplicity, only support the first explicit generator
- generator = os.environ['GYP_GENERATORS'].split(',')[0]
- if generator.endswith('-android'):
- return generator.split('-')[0]
- elif generator.endswith('-ninja'):
- return 'ninja'
- else:
- return generator
- else:
- if platform() == 'android':
- # Good enough for now? Do any android bots use make?
- return 'ninja'
- elif platform() == 'ios':
- return 'xcode'
- elif IsWindows():
- return 'msvs'
- elif IsLinux():
- return 'ninja'
- elif IsMac():
- return 'xcode'
- else:
- assert False, 'Don\'t know what builder we\'re using!'
-
-
-def get_landmines(target):
- """
- ALL LANDMINES ARE DEFINED HERE.
- target is 'Release' or 'Debug'
- """
- landmines = []
- add = lambda item: landmines.append(item + '\n')
-
- if (distributor() == 'goma' and platform() == 'win32' and
- builder() == 'ninja'):
- add('Need to clobber winja goma due to backend cwd cache fix.')
- if platform() == 'android':
- add('Clobber: Resources removed in r195014 require clobber.')
- if platform() == 'win' and builder() == 'ninja':
- add('Compile on cc_unittests fails due to symbols removed in r185063.')
- if platform() == 'linux' and builder() == 'ninja':
- add('Builders switching from make to ninja will clobber on this.')
- if platform() == 'mac':
- add('Switching from bundle to unbundled dylib (issue 14743002).')
- if (platform() == 'win' and builder() == 'ninja' and
- gyp_msvs_version() == '2012' and
- gyp_defines().get('target_arch') == 'x64' and
- gyp_defines().get('dcheck_always_on') == '1'):
- add("Switched win x64 trybots from VS2010 to VS2012.")
- add('Need to clobber everything due to an IDL change in r154579 (blink)')
-
- return landmines
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
def get_target_build_dir(build_tool, target, is_iphone=False):
@@ -187,16 +54,15 @@ def get_target_build_dir(build_tool, target, is_iphone=False):
return os.path.abspath(ret)
-def set_up_landmines(target):
+def set_up_landmines(target, new_landmines):
"""Does the work of setting, planting, and triggering landmines."""
- out_dir = get_target_build_dir(builder(), target, platform() == 'ios')
+ out_dir = get_target_build_dir(landmine_utils.builder(), target,
+ landmine_utils.platform() == 'ios')
landmines_path = os.path.join(out_dir, '.landmines')
if not os.path.exists(out_dir):
os.makedirs(out_dir)
- new_landmines = get_landmines(target)
-
if not os.path.exists(landmines_path):
with open(landmines_path, 'w') as f:
f.writelines(new_landmines)
@@ -217,13 +83,21 @@ def set_up_landmines(target):
os.remove(triggered)
-def main():
+def process_options():
+ """Returns a list of landmine emitting scripts."""
parser = optparse.OptionParser()
+ parser.add_option(
+ '-s', '--landmine-scripts', action='append',
+ default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
+ help='Path to the script which emits landmines to stdout. The target '
+ 'is passed to this script via option -t. Note that an extra '
+ 'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
parser.add_option('-v', '--verbose', action='store_true',
default=('LANDMINES_VERBOSE' in os.environ),
help=('Emit some extra debugging information (default off). This option '
'is also enabled by the presence of a LANDMINES_VERBOSE environment '
'variable.'))
+
options, args = parser.parse_args()
if args:
@@ -232,10 +106,25 @@ def main():
logging.basicConfig(
level=logging.DEBUG if options.verbose else logging.ERROR)
+ extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+ if extra_script:
+ return options.landmine_scripts + [extra_script]
+ else:
+ return options.landmine_scripts
+
+
+def main():
+ landmine_scripts = process_options()
gyp_helper.apply_chromium_gyp_env()
for target in ('Debug', 'Release', 'Debug_x64', 'Release_x64'):
- set_up_landmines(target)
+ landmines = []
+ for s in landmine_scripts:
+ proc = subprocess.Popen([sys.executable, s, '-t', target],
+ stdout=subprocess.PIPE)
+ output, _ = proc.communicate()
+ landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+ set_up_landmines(target, landmines)
return 0
diff --git a/chromium/build/linux/system.gyp b/chromium/build/linux/system.gyp
index 1db278f2841..68e4d36e28a 100644
--- a/chromium/build/linux/system.gyp
+++ b/chromium/build/linux/system.gyp
@@ -15,6 +15,7 @@
'linux_link_libgps%': 0,
'linux_link_libpci%': 0,
'linux_link_libspeechd%': 0,
+ 'linux_link_libbrlapi%': 0,
},
'conditions': [
[ 'os_posix==1 and OS!="mac"', {
@@ -122,9 +123,6 @@
{
'target_name': 'libgps',
'type': 'static_library',
- 'dependencies': [
- '../../base/base.gyp:base',
- ],
'all_dependent_settings': {
'defines': [
'USE_LIBGPS',
@@ -148,6 +146,9 @@
}],
],
},
+ 'include_dirs': [
+ '../..',
+ ],
'hard_dependency': 1,
'actions': [
{
@@ -202,7 +203,6 @@
['use_openssl==0 and use_system_ssl==0', {
'dependencies': [
'../../net/third_party/nss/ssl.gyp:libssl',
- '../../third_party/zlib/zlib.gyp:zlib',
],
'direct_dependent_settings': {
'include_dirs+': [
@@ -325,9 +325,6 @@
'type': 'static_library',
'conditions': [
['use_gio==1 and _toolset=="target"', {
- 'dependencies': [
- '../../base/base.gyp:base',
- ],
'cflags': [
'<!@(<(pkg-config) --cflags gio-2.0)',
],
@@ -342,6 +339,9 @@
'<(SHARED_INTERMEDIATE_DIR)',
],
},
+ 'include_dirs': [
+ '../..',
+ ],
'link_settings': {
'ldflags': [
'<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
@@ -401,9 +401,6 @@
'cflags': [
'<!@(<(pkg-config) --cflags libpci)',
],
- 'dependencies': [
- '../../base/base.gyp:base',
- ],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
@@ -421,6 +418,9 @@
}],
],
},
+ 'include_dirs': [
+ '../..',
+ ],
'hard_dependency': 1,
'actions': [
{
@@ -462,9 +462,6 @@
{
'target_name': 'libspeechd',
'type': 'static_library',
- 'dependencies': [
- '../../base/base.gyp:base',
- ],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
@@ -479,6 +476,9 @@
}],
],
},
+ 'include_dirs': [
+ '../..',
+ ],
'hard_dependency': 1,
'actions': [
{
@@ -539,6 +539,70 @@
],
},
{
+ 'target_name': 'libbrlapi',
+ 'type': 'static_library',
+ 'dependencies': [
+ '../../base/base.gyp:base',
+ ],
+ 'all_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ 'defines': [
+ 'USE_BRLAPI',
+ ],
+ 'conditions': [
+ ['linux_link_libbrlapi==1', {
+ 'link_settings': {
+ 'libraries': [
+ '-lbrlapi',
+ ],
+ }
+ }],
+ ],
+ },
+ 'hard_dependency': 1,
+ 'actions': [
+ {
+ 'variables': {
+ 'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libbrlapi.h',
+ 'output_cc': '<(INTERMEDIATE_DIR)/libbrlapi_loader.cc',
+ 'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+ },
+ 'action_name': 'generate_brlapi_loader',
+ 'inputs': [
+ '<(generator)',
+ ],
+ 'outputs': [
+ '<(output_h)',
+ '<(output_cc)',
+ ],
+ 'action': ['python',
+ '<(generator)',
+ '--name', 'LibBrlapiLoader',
+ '--output-h', '<(output_h)',
+ '--output-cc', '<(output_cc)',
+ '--header', '<brlapi.h>',
+ '--link-directly=<(linux_link_libbrlapi)',
+ 'brlapi_getHandleSize',
+ 'brlapi_error_location',
+ 'brlapi_expandKeyCode',
+ 'brlapi_strerror',
+ 'brlapi__acceptKeys',
+ 'brlapi__openConnection',
+ 'brlapi__closeConnection',
+ 'brlapi__getDisplaySize',
+ 'brlapi__enterTtyModeWithPath',
+ 'brlapi__leaveTtyMode',
+ 'brlapi__writeDots',
+ 'brlapi__readKey',
+ ],
+ 'message': 'Generating libbrlapi library loader.',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ },
+ {
'target_name': 'x11',
'type': 'none',
'toolsets': ['host', 'target'],
diff --git a/chromium/build/linux/unbundle/README b/chromium/build/linux/unbundle/README
index 7027b9ad2c1..d1b2a966eff 100644
--- a/chromium/build/linux/unbundle/README
+++ b/chromium/build/linux/unbundle/README
@@ -17,12 +17,28 @@ libraries is the norm.
Usage:
-replace_gyp_files.py <gyp-flags>
+1. remove_bundled_libraries.py <preserved-directories>
-For example: replace_gyp_files.py -Duse_system_harfbuzz=1
+ For example: remove_bundled_libraries.py third_party/mesa
-The script ignores flags other than -D for convenience. This makes it possible
-to have a variable e.g. ${myconf} with all the options, and execute:
+ The script scans sources looking for third_party directories.
+ Everything that is not explicitly preserved is removed (except for
+ gyp files), and the script fails if any directory passed on command
+ line does not exist (to ensure list is kept up to date).
-build/linux/unbundle/replace_gyp_files.py ${myconf}
-build/gyp_chromium ${myconf}
+ This is intended to be used on sources extracted from a tarball,
+ not a repository.
+
+ NOTE: by default this will not remove anything (for safety). Pass
+ --do-remove flag to actually remove files.
+
+2. replace_gyp_files.py <gyp-flags>
+
+ For example: replace_gyp_files.py -Duse_system_harfbuzz=1
+
+ The script ignores flags other than -D for convenience. This makes it
+ possible to have a variable e.g. ${myconf} with all the options, and
+ execute:
+
+ build/linux/unbundle/replace_gyp_files.py ${myconf}
+ build/gyp_chromium ${myconf}
diff --git a/chromium/build/linux/unbundle/openssl.gyp b/chromium/build/linux/unbundle/openssl.gyp
new file mode 100644
index 00000000000..d832ba7be4d
--- /dev/null
+++ b/chromium/build/linux/unbundle/openssl.gyp
@@ -0,0 +1,25 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'openssl',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(pkg-config --cflags openssl)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other openssl)',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l openssl)',
+ ],
+ },
+ }
+ ],
+}
diff --git a/chromium/build/linux/unbundle/remove_bundled_libraries.py b/chromium/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 00000000000..09a9c629fd9
--- /dev/null
+++ b/chromium/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+ my_dirname = os.path.abspath(os.path.dirname(__file__))
+ source_tree_root = os.path.abspath(
+ os.path.join(my_dirname, '..', '..', '..'))
+
+ if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+ print ('Sanity check failed: please run this script from ' +
+ 'build/linux/unbundle directory.')
+ return 1
+
+ parser = optparse.OptionParser()
+ parser.add_option('--do-remove', action='store_true')
+
+ options, args = parser.parse_args(argv)
+
+ exclusion_used = {}
+ for exclusion in args:
+ exclusion_used[exclusion] = False
+
+ for root, dirs, files in os.walk(source_tree_root, topdown=False):
+ # Only look at paths which contain a "third_party" component
+ # (note that e.g. third_party.png doesn't count).
+ root_relpath = os.path.relpath(root, source_tree_root)
+ if 'third_party' not in root_relpath.split(os.sep):
+ continue
+
+ for f in files:
+ path = os.path.join(root, f)
+ relpath = os.path.relpath(path, source_tree_root)
+
+ excluded = False
+ for exclusion in args:
+ if relpath.startswith(exclusion):
+ # Multiple exclusions can match the same path. Go through all of them
+ # and mark each one as used.
+ exclusion_used[exclusion] = True
+ excluded = True
+ if excluded:
+ continue
+
+ # Deleting gyp files almost always leads to gyp failures.
+ # These files come from Chromium project, and can be replaced if needed.
+ if f.endswith('.gyp') or f.endswith('.gypi'):
+ continue
+
+ if options.do_remove:
+ # Delete the file - best way to ensure it's not used during build.
+ os.remove(path)
+ else:
+ # By default just print paths that would be removed.
+ print path
+
+ exit_code = 0
+
+ # Fail if exclusion list contains stale entries - this helps keep it
+ # up to date.
+ for exclusion, used in exclusion_used.iteritems():
+ if not used:
+ print '%s does not exist' % exclusion
+ exit_code = 1
+
+ if not options.do_remove:
+ print ('To actually remove files printed above, please pass ' +
+ '--do-remove flag.')
+
+ return exit_code
+
+
+if __name__ == '__main__':
+ sys.exit(DoMain(sys.argv[1:]))
diff --git a/chromium/build/linux/unbundle/replace_gyp_files.py b/chromium/build/linux/unbundle/replace_gyp_files.py
index 1436711a8b3..c0fcc49f553 100755
--- a/chromium/build/linux/unbundle/replace_gyp_files.py
+++ b/chromium/build/linux/unbundle/replace_gyp_files.py
@@ -30,6 +30,7 @@ REPLACEMENTS = {
'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
'use_system_libxml': 'third_party/libxml/libxml.gyp',
'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
+ 'use_system_openssl': 'third_party/openssl/openssl.gyp',
'use_system_opus': 'third_party/opus/opus.gyp',
'use_system_re2': 'third_party/re2/re2.gyp',
'use_system_snappy': 'third_party/snappy/snappy.gyp',
diff --git a/chromium/build/mac/edit_xibs.sh b/chromium/build/mac/edit_xibs.sh
index a3054557b37..82045910f2e 100755
--- a/chromium/build/mac/edit_xibs.sh
+++ b/chromium/build/mac/edit_xibs.sh
@@ -12,6 +12,6 @@ set -e
RELSRC=$(dirname "$0")/../..
SRC=$(cd "$RELSRC" && pwd)
-GYP_GENERATORS=xcode python "$SRC/tools/gyp/gyp" "$SRC/chrome/chrome_nibs.gyp"
+GYP_GENERATORS=xcode "$SRC/tools/gyp/gyp" "$SRC/chrome/chrome_nibs.gyp"
echo "You can now edit XIB files in Xcode using:"
echo " $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/chromium/build/sanitize-png-files.sh b/chromium/build/sanitize-png-files.sh
deleted file mode 100755
index e47508e470b..00000000000
--- a/chromium/build/sanitize-png-files.sh
+++ /dev/null
@@ -1,445 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# The optimization code is based on pngslim (http://goo.gl/a0XHg)
-# and executes a similar pipleline to optimize the png file size.
-# The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
-# but this runs all other processes, including:
-# 1) various color-dependent optimizations using optipng.
-# 2) optimize the number of huffman blocks.
-# 3) randomize the huffman table.
-# 4) Further optimize using optipng and advdef (zlib stream).
-# Due to the step 3), each run may produce slightly different results.
-#
-# Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
-# for now as it does not take much time to run.
-
-readonly ALL_DIRS="
-ash/resources
-ui/resources
-chrome/app/theme
-chrome/browser/resources
-chrome/renderer/resources
-webkit/glue/resources
-remoting/resources
-remoting/webapp
-"
-
-# Files larger than this file size (in bytes) will
-# use the optimization parameters tailored for large files.
-LARGE_FILE_THRESHOLD=3000
-
-# Constants used for optimization
-readonly DEFAULT_MIN_BLOCK_SIZE=128
-readonly DEFAULT_LIMIT_BLOCKS=256
-readonly DEFAULT_RANDOM_TRIALS=100
-# Taken from the recommendation in the pngslim's readme.txt.
-readonly LARGE_MIN_BLOCK_SIZE=1
-readonly LARGE_LIMIT_BLOCKS=2
-readonly LARGE_RANDOM_TRIALS=1
-
-# Global variables for stats
-TOTAL_OLD_BYTES=0
-TOTAL_NEW_BYTES=0
-TOTAL_FILE=0
-PROCESSED_FILE=0
-
-declare -a THROBBER_STR=('-' '\\' '|' '/')
-THROBBER_COUNT=0
-
-# Show throbber character at current cursor position.
-function throbber {
- echo -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
- let THROBBER_COUNT=($THROBBER_COUNT+1)%4
-}
-
-# Usage: pngout_loop <file> <png_out_options> ...
-# Optimize the png file using pngout with the given options
-# using various block split thresholds and filter types.
-function pngout_loop {
- local file=$1
- shift
- local opts=$*
- if [ $OPTIMIZE_LEVEL == 1 ]; then
- for j in $(seq 0 5); do
- throbber
- pngout -q -k1 -s1 -f$j $opts $file
- done
- else
- for i in 0 128 256 512; do
- for j in $(seq 0 5); do
- throbber
- pngout -q -k1 -s1 -b$i -f$j $opts $file
- done
- done
- fi
-}
-
-# Usage: get_color_depth_list
-# Returns the list of color depth options for current optimization level.
-function get_color_depth_list {
- if [ $OPTIMIZE_LEVEL == 1 ]; then
- echo "-d0"
- else
- echo "-d1 -d2 -d4 -d8"
- fi
-}
-
-# Usage: process_grayscale <file>
-# Optimize grayscale images for all color bit depths.
-#
-# TODO(oshima): Experiment with -d0 w/o -c0.
-function process_grayscale {
- echo -n "|gray"
- for opt in $(get_color_depth_list); do
- pngout_loop $file -c0 $opt
- done
-}
-
-# Usage: process_grayscale_alpha <file>
-# Optimize grayscale images with alpha for all color bit depths.
-function process_grayscale_alpha {
- echo -n "|gray-a"
- pngout_loop $file -c4
- for opt in $(get_color_depth_list); do
- pngout_loop $file -c3 $opt
- done
-}
-
-# Usage: process_rgb <file>
-# Optimize rgb images with or without alpha for all color bit depths.
-function process_rgb {
- echo -n "|rgb"
- for opt in $(get_color_depth_list); do
- pngout_loop $file -c3 $opt
- done
- pngout_loop $file -c2
- pngout_loop $file -c6
-}
-
-# Usage: huffman_blocks <file>
-# Optimize the huffman blocks.
-function huffman_blocks {
- local file=$1
- echo -n "|huffman"
- local size=$(stat -c%s $file)
- local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
- local limit_blocks=$DEFAULT_LIMIT_BLOCKS
-
- if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
- min_block_size=$LARGE_MIN_BLOCK_SIZE
- limit_blocks=$LARGE_LIMIT_BLOCKS
- fi
- let max_blocks=$size/$min_block_size
- if [ $max_blocks -gt $limit_blocks ]; then
- max_blocks=$limit_blocks
- fi
-
- for i in $(seq 2 $max_blocks); do
- throbber
- pngout -q -k1 -ks -s1 -n$i $file
- done
-}
-
-# Usage: random_huffman_table_trial <file>
-# Try compressing by randomizing the initial huffman table.
-#
-# TODO(oshima): Try adjusting different parameters for large files to
-# reduce runtime.
-function random_huffman_table_trial {
- echo -n "|random"
- local file=$1
- local old_size=$(stat -c%s $file)
- local trials_count=$DEFAULT_RANDOM_TRIALS
-
- if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
- trials_count=$LARGE_RANDOM_TRIALS
- fi
- for i in $(seq 1 $trials_count); do
- throbber
- pngout -q -k1 -ks -s0 -r $file
- done
- local new_size=$(stat -c%s $file)
- if [ $new_size -lt $old_size ]; then
- random_huffman_table_trial $file
- fi
-}
-
-# Usage: final_comprssion <file>
-# Further compress using optipng and advdef.
-# TODO(oshima): Experiment with 256.
-function final_compression {
- echo -n "|final"
- local file=$1
- if [ $OPTIMIZE_LEVEL == 2 ]; then
- for i in 32k 16k 8k 4k 2k 1k 512; do
- throbber
- optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
- done
- fi
- for i in $(seq 1 4); do
- throbber
- advdef -q -z -$i $file
- done
- echo -ne "\r"
-}
-
-# Usage: get_color_type <file>
-# Returns the color type name of the png file. Here is the list of names
-# for each color type codes.
-# 0 : grayscale
-# 2 : RGB
-# 3 : colormap
-# 4 : gray+alpha
-# 6 : RGBA
-# See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
-# for details about the color type code.
-function get_color_type {
- local file=$1
- echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
-}
-
-# Usage: optimize_size <file>
-# Performs png file optimization.
-function optimize_size {
- tput el
- local file=$1
- echo -n "$file "
-
- advdef -q -z -4 $file
-
- pngout -q -s4 -c0 -force $file $file.tmp.png
- if [ -f $file.tmp.png ]; then
- rm $file.tmp.png
- process_grayscale $file
- process_grayscale_alpha $file
- else
- pngout -q -s4 -c4 -force $file $file.tmp.png
- if [ -f $file.tmp.png ]; then
- rm $file.tmp.png
- process_grayscale_alpha $file
- else
- process_rgb $file
- fi
- fi
-
- echo -n "|filter"
- local old_color_type=$(get_color_type $file)
- optipng -q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
- local new_color_type=$(get_color_type $file.tmp.png)
- # optipng may corrupt a png file when reducing the color type
- # to grayscale/grayscale+alpha. Just skip such cases until
- # the bug is fixed. See crbug.com/174505, crbug.com/174084.
- # The issue is reported in
- # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
- if [[ $old_color_type == "RGBA" && $new_color_type =~ gray.* ]] ; then
- rm $file.tmp.png
- echo -n "[skip opting]"
- else
- mv $file.tmp.png $file
- fi
- pngout -q -k1 -s1 $file
-
- huffman_blocks $file
-
- # TODO(oshima): Experiment with strategy 1.
- echo -n "|strategy"
- if [ $OPTIMIZE_LEVEL == 2 ]; then
- for i in 3 2 0; do
- pngout -q -k1 -ks -s$i $file
- done
- else
- pngout -q -k1 -ks -s1 $file
- fi
-
- if [ $OPTIMIZE_LEVEL == 2 ]; then
- random_huffman_table_trial $file
- fi
-
- final_compression $file
-}
-
-# Usage: process_file <file>
-function process_file {
- local file=$1
- local name=$(basename $file)
- # -rem alla removes all ancillary chunks except for tRNS
- pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null
-
- if [ $OPTIMIZE_LEVEL != 0 ]; then
- optimize_size $TMP_DIR/$name
- fi
-}
-
-# Usage: sanitize_file <file>
-function sanitize_file {
- local file=$1
- local name=$(basename $file)
- local old=$(stat -c%s $file)
- local tmp_file=$TMP_DIR/$name
-
- process_file $file
-
- local new=$(stat -c%s $tmp_file)
- let diff=$old-$new
- let percent=($diff*100)/$old
- let TOTAL_FILE+=1
-
- tput el
- if [ $new -lt $old ]; then
- echo -ne "$file : $old => $new ($diff bytes : $percent %)\n"
- mv "$tmp_file" "$file"
- let TOTAL_OLD_BYTES+=$old
- let TOTAL_NEW_BYTES+=$new
- let PROCESSED_FILE+=1
- else
- if [ $OPTIMIZE_LEVEL == 0 ]; then
- echo -ne "$file : skipped\r"
- fi
- rm $tmp_file
- fi
-}
-
-function sanitize_dir {
- local dir=$1
- for f in $(find $dir -name "*.png"); do
- if $using_cygwin ; then
- sanitize_file $(cygpath -w $f)
- else
- sanitize_file $f
- fi
- done
-}
-
-function install_if_not_installed {
- local program=$1
- local package=$2
- which $program > /dev/null 2>&1
- if [ "$?" != "0" ]; then
- if $using_cygwin ; then
- echo "Couldn't find $program. Please run setup.exe and install the $package package."
- exit 1
- else
- read -p "Couldn't find $program. Do you want to install? (y/n)"
- [ "$REPLY" == "y" ] && sudo apt-get install $package
- [ "$REPLY" == "y" ] || exit
- fi
- fi
-}
-
-function fail_if_not_installed {
- local program=$1
- local url=$2
- which $program > /dev/null 2>&1
- if [ $? != 0 ]; then
- echo "Couldn't find $program. Please download and install it from $url ."
- exit 1
- fi
-}
-
-function show_help {
- local program=$(basename $0)
- echo \
-"Usage: $program [options] dir ...
-
-$program is a utility to reduce the size of png files by removing
-unnecessary chunks and compressing the image.
-
-Options:
- -o<optimize_level> Specify optimization level: (default is 1)
- 0 Just run pngcrush. It removes unnecessary chunks and perform basic
- optimization on the encoded data.
- 1 Optimize png files using pngout/optipng and advdef. This can further
- reduce addtional 5~30%. This is the default level.
- 2 Aggressively optimize the size of png files. This may produce
- addtional 1%~5% reduction. Warning: this is *VERY*
- slow and can take hours to process all files.
- -h Print this help text."
- exit 1
-}
-
-if [ ! -e ../.gclient ]; then
- echo "$0 must be run in src directory"
- exit 1
-fi
-
-if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
- using_cygwin=true
-else
- using_cygwin=false
-fi
-
-OPTIMIZE_LEVEL=1
-# Parse options
-while getopts o:h opts
-do
- case $opts in
- o)
- if [[ ! "$OPTARG" =~ [012] ]]; then
- show_help
- fi
- OPTIMIZE_LEVEL=$OPTARG
- [ "$1" == "-o" ] && shift
- shift;;
- [h?])
- show_help;;
- esac
-done
-
-# Make sure we have all necessary commands installed.
-install_if_not_installed pngcrush pngcrush
-if [ $OPTIMIZE_LEVEL == 2 ]; then
- install_if_not_installed optipng optipng
-
- if $using_cygwin ; then
- fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
- else
- install_if_not_installed advdef advancecomp
- fi
-
- if $using_cygwin ; then
- pngout_url="http://www.advsys.net/ken/utils.htm"
- else
- pngout_url="http://www.jonof.id.au/kenutils"
- fi
- fail_if_not_installed pngout $pngout_url
-fi
-
-# Create tmp directory for crushed png file.
-TMP_DIR=$(mktemp -d)
-if $using_cygwin ; then
- TMP_DIR=$(cygpath -w $TMP_DIR)
-fi
-
-# Make sure we cleanup temp dir
-trap "rm -rf $TMP_DIR" EXIT
-
-# If no directories are specified, sanitize all directories.
-DIRS=$@
-set ${DIRS:=$ALL_DIRS}
-
-echo "Optimize level=$OPTIMIZE_LEVEL"
-for d in $DIRS; do
- if $using_cygwin ; then
- d=$(cygpath -w $d)
- fi
- echo "Sanitizing png files in $d"
- sanitize_dir $d
- echo
-done
-
-# Print the results.
-if [ $PROCESSED_FILE == 0 ]; then
- echo "Did not find any files (out of $TOTAL_FILE files)" \
- "that could be optimized" \
- "in $(date -u -d @$SECONDS +%T)s"
-else
- let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
- let percent=$diff*100/$TOTAL_OLD_BYTES
- echo "Processed $PROCESSED_FILE files (out of $TOTAL_FILE files)" \
- "in $(date -u -d @$SECONDS +%T)s"
- echo "Result : $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
- "($diff bytes : $percent %)"
-fi
diff --git a/chromium/build/slave/OWNERS b/chromium/build/slave/OWNERS
new file mode 100644
index 00000000000..c367f574974
--- /dev/null
+++ b/chromium/build/slave/OWNERS
@@ -0,0 +1,24 @@
+set noparent
+agable@chromium.org
+agable@google.com
+bevc@chromium.org
+bevc@google.com
+cmp@chromium.org
+cmp@google.com
+dpranke@chromium.org
+iannucci@chromium.org
+iannucci@google.com
+ilevy@chromium.org
+ilevy@google.com
+johnw@chromium.org
+johnw@google.com
+maruel@chromium.org
+maruel@google.com
+mmoss@chromium.org
+mmoss@google.com
+pschmidt@chromium.org
+pschmidt@google.com
+szager@chromium.org
+szager@google.com
+xusydoc@chromium.org
+xusydoc@google.com
diff --git a/chromium/build/slave/README b/chromium/build/slave/README
new file mode 100644
index 00000000000..e3718b2c281
--- /dev/null
+++ b/chromium/build/slave/README
@@ -0,0 +1,8 @@
+This is a directory which contains configuration information for the
+buildsystem.
+
+* Under recipes, the buildsystem should use only this directory as an
+ entry point into src/.
+
+* Scripts in this directory must not import from outside this directory or shell
+ to scripts outside this directory.
diff --git a/chromium/build/tree_truth.sh b/chromium/build/tree_truth.sh
new file mode 100755
index 00000000000..03d0523bd2f
--- /dev/null
+++ b/chromium/build/tree_truth.sh
@@ -0,0 +1,101 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag. If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+ oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+ if [ $? -eq 0 ] ; then
+ echo $oneline
+ fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+ ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+ ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+ ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+ ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+ local tag_sha1=$(tt_sha1_for_tag $1 $2)
+ local head_sha1=$(tt_sha1_for_head $1)
+ local display_name=$(echo $3 | sed 's#/#_#g')
+ if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+ return
+ fi
+ if [ "${tag_sha1}" = "" ] ; then
+ echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+ echo "NOTE: git tag was not found so we have no baseline."
+ echo "Here are some recent commits, but they may not be new for this build."
+ ( cd $1 && git log -n 10 --stat | cat)
+ else
+ echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+ ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+ fi
+}
+
+# Clean out the tree truth tags in all repos. For testing.
+tt_clean_all() {
+ for project in $@; do
+ tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+ local full_path=$CHROME_SRC/../$project
+ tt_list_commits $full_path tree_truth $project
+ tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+ echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+ for p in $@; do
+ echo $project
+ (cd $CHROME_SRC/../$p && git log -n 10 --format=" %H %s %an, %ad" | cat)
+ echo "================================================================="
+ done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/chromium/build/util/LASTCHANGE b/chromium/build/util/LASTCHANGE
index 6e98c846aae..e97726fa4a4 100644
--- a/chromium/build/util/LASTCHANGE
+++ b/chromium/build/util/LASTCHANGE
@@ -1 +1 @@
-LASTCHANGE=235169
+LASTCHANGE=238485
diff --git a/chromium/build/util/LASTCHANGE.blink b/chromium/build/util/LASTCHANGE.blink
index 2b4cc5ae4d6..ce3faad86f4 100644
--- a/chromium/build/util/LASTCHANGE.blink
+++ b/chromium/build/util/LASTCHANGE.blink
@@ -1 +1 @@
-LASTCHANGE=155942
+LASTCHANGE=163124
diff --git a/chromium/build/util/lib/common/perf_result_data_type.py b/chromium/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 00000000000..67b550a46c0
--- /dev/null
+++ b/chromium/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+ INFORMATIONAL]
+
+
+def IsValidType(datatype):
+ return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+ return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/chromium/build/util/lib/common/perf_tests_results_helper.py b/chromium/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 00000000000..733cbf91acb
--- /dev/null
+++ b/chromium/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+ perf_result_data_type.DEFAULT: '*RESULT ',
+ perf_result_data_type.INFORMATIONAL: '',
+ perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+ perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+ """Escapes |s| for use in a perf result."""
+ return re.sub('[\:|=/#&,]', '_', s)
+
+
+def _Flatten(values):
+ """Returns a simple list without sub-lists."""
+ ret = []
+ for entry in values:
+ if isinstance(entry, list):
+ ret.extend(_Flatten(entry))
+ else:
+ ret.append(entry)
+ return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+ histogram = json.loads(histogram_json)
+ # Handle empty histograms gracefully.
+ if not 'buckets' in histogram:
+ return 0.0, 0.0
+ count = 0
+ sum_of_logs = 0
+ for bucket in histogram['buckets']:
+ if 'high' in bucket:
+ bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+ else:
+ bucket['mean'] = bucket['low']
+ if bucket['mean'] > 0:
+ sum_of_logs += math.log(bucket['mean']) * bucket['count']
+ count += bucket['count']
+
+ if count == 0:
+ return 0.0, 0.0
+
+ sum_of_squares = 0
+ geom_mean = math.exp(sum_of_logs / count)
+ for bucket in histogram['buckets']:
+ if bucket['mean'] > 0:
+ sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+ return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _MeanAndStdDevFromList(values):
+ avg = None
+ sd = None
+ if len(values) > 1:
+ try:
+ value = '[%s]' % ','.join([str(v) for v in values])
+ avg = sum([float(v) for v in values]) / len(values)
+ sqdiffs = [(float(v) - avg) ** 2 for v in values]
+ variance = sum(sqdiffs) / (len(values) - 1)
+ sd = math.sqrt(variance)
+ except ValueError:
+ value = ", ".join(values)
+ else:
+ value = values[0]
+ return value, avg, sd
+
+
+def PrintPages(page_list):
+ """Prints list of pages to stdout in the format required by perf tests."""
+ print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+ result_type=perf_result_data_type.DEFAULT,
+ print_to_stdout=True):
+ """Prints numerical data to stdout in the format required by perf tests.
+
+ The string args may be empty but they must not contain any colons (:) or
+ equals signs (=).
+
+ Args:
+ measurement: A description of the quantity being measured, e.g. "vm_peak".
+ trace: A description of the particular data point, e.g. "reference".
+ values: A list of numeric measured values. An N-dimensional list will be
+ flattened and treated as a simple list.
+ units: A description of the units of measure, e.g. "bytes".
+ result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+ print_to_stdout: If True, prints the output in stdout instead of returning
+ the output to caller.
+
+ Returns:
+ String of the formated perf result.
+ """
+ assert perf_result_data_type.IsValidType(result_type), \
+ 'result type: %s is invalid' % result_type
+
+ trace_name = _EscapePerfResult(trace)
+
+ if (result_type == perf_result_data_type.UNIMPORTANT or
+ result_type == perf_result_data_type.DEFAULT or
+ result_type == perf_result_data_type.INFORMATIONAL):
+ assert isinstance(values, list)
+ assert len(values)
+ assert '/' not in measurement
+ value, avg, sd = _MeanAndStdDevFromList(_Flatten(values))
+ output = '%s%s: %s%s%s %s' % (
+ RESULT_TYPES[result_type],
+ _EscapePerfResult(measurement),
+ trace_name,
+ # Do not show equal sign if the trace is empty. Usually it happens when
+ # measurement is enough clear to describe the result.
+ '= ' if trace_name else '',
+ value,
+ units)
+ else:
+ assert perf_result_data_type.IsHistogram(result_type)
+ assert isinstance(values, list)
+ # The histograms can only be printed individually, there's no computation
+ # across different histograms.
+ assert len(values) == 1
+ value = values[0]
+ output = '%s%s: %s= %s' % (
+ RESULT_TYPES[result_type],
+ _EscapePerfResult(measurement),
+ trace_name,
+ value)
+ avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+ if avg:
+ output += '\nAvg %s: %f%s' % (measurement, avg, units)
+ if sd:
+ output += '\nSd %s: %f%s' % (measurement, sd, units)
+ if print_to_stdout:
+ print output
+ sys.stdout.flush()
+ return output
diff --git a/chromium/build/whitespace_file.txt b/chromium/build/whitespace_file.txt
index 405a489430a..83f538de0c3 100644
--- a/chromium/build/whitespace_file.txt
+++ b/chromium/build/whitespace_file.txt
@@ -57,8 +57,9 @@ wondered where the sushi came from as he attempted to wash the taste away with
a bottle of 3000¥ sake. He tries to recall the cook's face. Purple?
CHAPTER 5:
-Many years later, Mr. Usagi would laugh at the memory of the earnest,
-well-intentioned Domo-Kun. Another day in the life.
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is whe he realized that
+life goes on.
TRUISMS (1978-1983)
JENNY HOLZER
@@ -71,3 +72,8 @@ A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
NO ONE SHOULD EVER USE SVN
AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+
+31