Bug 1393119 - Add webrtc.org trunk/build/ files to support gn build; r=jesup

MozReview-Commit-ID: KaupPBeJucK

--HG--
extra : rebase_source : 304c0a281d409c1e33321fbef555196e288451f1
This commit is contained in:
Dan Minor 2017-07-27 12:42:30 -04:00
Родитель 73ad6401d3
Коммит 662432279a
280 изменённых файлов: 34935 добавлений и 1936 удалений

54
media/webrtc/trunk/.gn Normal file
Просмотреть файл

@ -0,0 +1,54 @@
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("//build/dotfile_settings.gni")
# The location of the build configuration file.
buildconfig = "//build/config/BUILDCONFIG.gn"
# The secondary source root is a parallel directory tree where
# GN build files are placed when they can not be placed directly
# in the source tree, e.g. for third party source trees.
secondary_source = "//build/secondary/"
# These are the targets to check headers for by default. The files in targets
# matching these patterns (see "gn help label_pattern" for format) will have
# their includes checked for proper dependencies when you run either
# "gn check" or "gn gen --check".
# TODO(kjellander): Keep adding paths to this list as work in webrtc:5589 is done.
check_targets = [
"//webrtc/api/*",
"//webrtc/audio/*",
"//webrtc/modules/audio_coding/*",
"//webrtc/modules/audio_conference_mixer/*",
"//webrtc/modules/audio_device/*",
"//webrtc/modules/audio_mixer/*",
"//webrtc/modules/audio_processing/*",
"//webrtc/modules/bitrate_controller/*",
"//webrtc/modules/congestion_controller/*",
"//webrtc/modules/desktop_capture/*",
"//webrtc/modules/media_file/*",
"//webrtc/modules/pacing/*",
"//webrtc/modules/rtp_rtcp/*",
"//webrtc/modules/utility/*",
"//webrtc/modules/video_capture/*",
"//webrtc/modules/video_coding/*",
"//webrtc/modules/video_processing/*",
"//webrtc/modules/remote_bitrate_estimator/*",
"//webrtc/stats:rtc_stats",
"//webrtc/voice_engine",
"//webrtc/voice_engine:audio_coder",
"//webrtc/voice_engine:file_player",
"//webrtc/voice_engine:file_recorder",
"//webrtc/voice_engine:level_indicator",
]
# These are the list of GN files that run exec_script. This whitelist exists
# to force additional review for new uses of exec_script, which is strongly
# discouraged except for gypi_to_gn calls.
exec_script_whitelist = build_dotfile_settings.exec_script_whitelist

Просмотреть файл

@ -0,0 +1,21 @@
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("webrtc/build/webrtc.gni")
group("default") {
testonly = true
deps = [
"//webrtc",
"//webrtc/examples",
"//webrtc/tools",
]
if (rtc_include_tests) {
deps += [ "//webrtc:webrtc_tests" ]
}
}

Просмотреть файл

@ -1 +1,22 @@
*
agrieve@chromium.org
dpranke@chromium.org
jbudorick@chromium.org
jochen@chromium.org
scottmg@chromium.org
thakis@chromium.org
brucedawson@chromium.org
per-file .gitignore=*
per-file mac_toolchain.py=erikchen@chromium.org
per-file mac_toolchain.py=justincohen@chromium.org
per-file package_mac_toolchain.py=erikchen@chromium.org
per-file package_mac_toolchain.py=justincohen@chromium.org
per-file whitespace_file.txt=*
per-file OWNERS.status=*
# gn-dev is probably a better team here, but the tooling won't let us
# have more than one team per component, and infra-dev is a catch-all
# for other build-related lists.
#
# TEAM: infra-dev@chromium.org
# COMPONENT: Build

Просмотреть файл

@ -0,0 +1,12 @@
# Use this file to set a global status message that should be shown whenever
# git cl owners proposes to add you as a reviewer.
#
# The status messages should be somewhat stable, so please don't use this for
# short term, or frequently changing updates.
#
# The format of the file is
#
# you@chromium.org: Single line status message.
#
jochen@chromium.org: EMEA based reviewer.

Просмотреть файл

@ -0,0 +1 @@
per-file headless.gn=file://headless/OWNERS

Просмотреть файл

@ -0,0 +1,31 @@
This directory is here to hold .gni files that contain sets of GN build
arguments for given configurations.
(Currently this directory is empty because we removed the only thing here, but
this has come up several times so I'm confident we'll need this again. If this
directory is still empty by 2017, feel free to delete it. --Brett)
Some projects or bots may have build configurations with specific combinations
of flags. Rather than making a new global flag for your specific project and
adding it all over the build to each arg it should affect, you can add a .gni
file here with the variables.
For example, for project foo you may put in build/args/foo.gni:
target_os = "android"
use_pulseaudio = false
use_ozone = true
system_libdir = "foo"
Users wanting to build this configuration would run:
$ gn args out/mybuild
And add the following line to their args for that build directory:
import("//build/args/foo.gni")
# You can set any other args here like normal.
is_component_build = false
This way everybody can agree on a set of flags for a project, and their builds
stay in sync as the flags in foo.gni are modified.

Просмотреть файл

@ -0,0 +1,44 @@
# GN args template for the Headless Chrome library
#
# Add import to arg.gn in out directory and run gn gen on the directory to use.
# E.g. for out directory out/foo:
# echo 'import("//build/args/headless.gn")' > out/foo/args.gn
# gn gen out/foo
#
# Use gn args to add your own build preference args.
use_ozone = true
ozone_auto_platforms = false
ozone_platform = "headless"
ozone_platform_headless = true
# Embed resource.pak into binary to simplify deployment.
headless_use_embedded_resources = true
# Expose headless bindings for freetype library bundled with Chromium.
headless_fontconfig_utils = true
# Remove a dependency on a system fontconfig library.
use_bundled_fontconfig = true
# In order to simplify deployment we build ICU data file
# into binary.
icu_use_data_file = false
# Use embedded data instead external files for headless in order
# to simplify deployment.
v8_use_external_startup_data = false
enable_nacl = false
enable_print_preview = false
enable_remoting = false
use_alsa = false
use_ash = false
use_cups = false
use_dbus = false
use_gconf = false
use_gio = false
use_kerberos = false
use_libpci = false
use_pulseaudio = false
use_udev = false

Просмотреть файл

@ -0,0 +1,49 @@
#!/bin/bash
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
cat <<EOF
You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
Debian or a related flavor of Linux, you may want to try running
apt-get install exuberant-ctags.
EOF
exit
fi
CHROME_SRC_DIR="$PWD"
fail() {
echo "Failed to create ctags for $1"
exit 1
}
ctags_cmd() {
echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
}
build_dir() {
local extraexcludes=""
if [[ a"$1" == "a--extra-excludes" ]]; then
extraexcludes="--exclude=third_party --exclude=build --exclude=out"
shift
fi
cd "$CHROME_SRC_DIR/$1" || fail $1
# Redirect error messages so they aren't seen because they are almost always
# errors about components that you just happen to have not built (NaCl, for
# example).
$(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
mv -f .tmp_tags tags
}
# We always build the top level but leave all submodules as optional.
build_dir --extra-excludes "" "top level"
# Build any other directies that are listed on the command line.
for dir in $@; do
build_dir "$1"
shift
done

Просмотреть файл

@ -0,0 +1,47 @@
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BUILD_BUILDFLAG_H_
#define BUILD_BUILDFLAG_H_
// These macros un-mangle the names of the build flags in a way that looks
// natural, and gives errors if the flag is not defined. Normally in the
// preprocessor it's easy to make mistakes that interpret "you haven't done
// the setup to know what the flag is" as "flag is off". Normally you would
// include the generated header rather than include this file directly.
//
// This is for use with generated headers. See build/buildflag_header.gni.
// This dance of two macros does a concatenation of two preprocessor args using
// ## doubly indirectly because using ## directly prevents macros in that
// parameter from being expanded.
#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b
#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b)
// Accessor for build flags.
//
// To test for a value, if the build file specifies:
//
// ENABLE_FOO=true
//
// Then you would check at build-time in source code with:
//
// #include "foo_flags.h" // The header the build file specified.
//
// #if BUILDFLAG(ENABLE_FOO)
// ...
// #endif
//
// There will no #define called ENABLE_FOO so if you accidentally test for
// whether that is defined, it will always be negative. You can also use
// the value in expressions:
//
// const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME);
//
// Because the flag is accessed as a preprocessor macro with (), an error
// will be thrown if the proper header defining the internal flag value has
// not been included.
#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)())
#endif // BUILD_BUILDFLAG_H_

Просмотреть файл

@ -0,0 +1,137 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Generates a header with preprocessor defines specified by the build file.
#
# The flags are converted to function-style defines with mangled names and
# code uses an accessor macro to access the values. This is to try to
# minimize bugs where code checks whether something is defined or not, and
# the proper header isn't included, meaning the answer will always be silently
# false or might vary across the code base.
#
# In the GN template, specify build flags in the template as a list
# of strings that encode key/value pairs like this:
#
# flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ]
#
# The GN values "true" and "false" will be mapped to 0 and 1 for boolean
# #if flags to be expressed naturally. This means you can't directly make a
# define that generates C++ value of true or false for use in code. If you
# REALLY need this, you can also use the string "(true)" and "(false)" to
# prevent the rewriting.
# To check the value of the flag in C code:
#
# #include "path/to/here/header_file.h"
#
# #if BUILDFLAG(ENABLE_FOO)
# ...
# #endif
#
# const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL);
#
# There will no #define called ENABLE_FOO so if you accidentally test for that
# in an ifdef it will always be negative.
#
#
# Template parameters
#
# flags [required, list of strings]
# Flag values as described above.
#
# header [required, string]
# File name for generated header. By default, this will go in the
# generated file directory for this target, and you would include it
# with:
# #include "<path_to_this_BUILD_file>/<header>"
#
# header_dir [optional, string]
# Override the default location of the generated header. The string will
# be treated as a subdirectory of the root_gen_dir. For example:
# header_dir = "foo/bar"
# Then you can include the header as:
# #include "foo/bar/baz.h"
#
# deps, public_deps, testonly, visibility
# Normal meaning.
#
#
# Grit defines
#
# If one .grd file uses a flag, just add to the grit target:
#
# defines = [
# "enable_doom_melon=$enable_doom_melon",
# ]
#
# If multiple .grd files use it, you'll want to put the defines in a .gni file
# so it can be shared. Generally this .gni file should include all grit defines
# for a given module (for some definition of "module"). Then do:
#
# defines = ui_grit_defines
#
# If you forget to do this, the flag will be implicitly false in the .grd file
# and those resources won't be compiled. You'll know because the resource
# #define won't be generated and any code that uses it won't compile. If you
# see a missing IDS_* string, this is probably the reason.
#
#
# Example
#
# buildflag_header("foo_features") {
# header = "foo_features.h"
#
# flags = [
# # This uses the GN build flag enable_doom_melon as the definition.
# "ENABLE_DOOM_MELON=$enable_doom_melon",
#
# # This force-enables the flag.
# "ENABLE_SPACE_LASER=true",
#
# # This will expand to the quoted C string when used in source code.
# "SPAM_SERVER_URL=\"http://www.example.com/\"",
# ]
# }
template("buildflag_header") {
action(target_name) {
script = "//build/write_buildflag_header.py"
if (defined(invoker.header_dir)) {
header_file = "${invoker.header_dir}/${invoker.header}"
} else {
# Compute the path from the root to this file.
header_file = rebase_path(".", "//") + "/${invoker.header}"
}
outputs = [
"$root_gen_dir/$header_file",
]
# Always write --flags to the file so it's not empty. Empty will confuse GN
# into thinking the response file isn't used.
response_file_contents = [ "--flags" ]
if (defined(invoker.flags)) {
response_file_contents += invoker.flags
}
args = [
"--output",
header_file, # Not rebased, Python script puts it inside gen-dir.
"--rulename",
get_label_info(":$target_name", "label_no_toolchain"),
"--gen-dir",
rebase_path(root_gen_dir, root_build_dir),
"--definitions",
"{{response_file_name}}",
]
forward_variables_from(invoker,
[
"deps",
"public_deps",
"testonly",
"visibility",
])
}
}

Просмотреть файл

@ -0,0 +1,234 @@
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Find header files missing in GN.
This script gets all the header files from ninja_deps, which is from the true
dependency generated by the compiler, and report if they don't exist in GN.
"""
import argparse
import json
import os
import re
import shutil
import subprocess
import sys
import tempfile
from multiprocessing import Process, Queue
def GetHeadersFromNinja(out_dir, q):
"""Return all the header files from ninja_deps"""
def NinjaSource():
cmd = ['ninja', '-C', out_dir, '-t', 'deps']
# A negative bufsize means to use the system default, which usually
# means fully buffered.
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1)
for line in iter(popen.stdout.readline, ''):
yield line.rstrip()
popen.stdout.close()
return_code = popen.wait()
if return_code:
raise subprocess.CalledProcessError(return_code, cmd)
ans, err = set(), None
try:
ans = ParseNinjaDepsOutput(NinjaSource())
except Exception as e:
err = str(e)
q.put((ans, err))
def ParseNinjaDepsOutput(ninja_out):
"""Parse ninja output and get the header files"""
all_headers = set()
prefix = '..' + os.sep + '..' + os.sep
is_valid = False
for line in ninja_out:
if line.startswith(' '):
if not is_valid:
continue
if line.endswith('.h') or line.endswith('.hh'):
f = line.strip()
if f.startswith(prefix):
f = f[6:] # Remove the '../../' prefix
# build/ only contains build-specific files like build_config.h
# and buildflag.h, and system header files, so they should be
# skipped.
if not f.startswith('build'):
all_headers.add(f)
else:
is_valid = line.endswith('(VALID)')
return all_headers
def GetHeadersFromGN(out_dir, q):
"""Return all the header files from GN"""
tmp = None
ans, err = set(), None
try:
tmp = tempfile.mkdtemp()
shutil.copy2(os.path.join(out_dir, 'args.gn'),
os.path.join(tmp, 'args.gn'))
# Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
subprocess.check_call(['gn', 'gen', tmp, '--ide=json', '-q'])
gn_json = json.load(open(os.path.join(tmp, 'project.json')))
ans = ParseGNProjectJSON(gn_json, out_dir, tmp)
except Exception as e:
err = str(e)
finally:
if tmp:
shutil.rmtree(tmp)
q.put((ans, err))
def ParseGNProjectJSON(gn, out_dir, tmp_out):
"""Parse GN output and get the header files"""
all_headers = set()
for _target, properties in gn['targets'].iteritems():
sources = properties.get('sources', [])
public = properties.get('public', [])
# Exclude '"public": "*"'.
if type(public) is list:
sources += public
for f in sources:
if f.endswith('.h') or f.endswith('.hh'):
if f.startswith('//'):
f = f[2:] # Strip the '//' prefix.
if f.startswith(tmp_out):
f = out_dir + f[len(tmp_out):]
all_headers.add(f)
return all_headers
def GetDepsPrefixes(q):
"""Return all the folders controlled by DEPS file"""
prefixes, err = set(), None
try:
gclient_out = subprocess.check_output(
['gclient', 'recurse', '--no-progress', '-j1',
'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'])
for i in gclient_out.split('\n'):
if i.startswith('src/'):
i = i[4:]
prefixes.add(i)
except Exception as e:
err = str(e)
q.put((prefixes, err))
def ParseWhiteList(whitelist):
out = set()
for line in whitelist.split('\n'):
line = re.sub(r'#.*', '', line).strip()
if line:
out.add(line)
return out
def FilterOutDepsedRepo(files, deps):
return {f for f in files if not any(f.startswith(d) for d in deps)}
def GetNonExistingFiles(lst):
out = set()
for f in lst:
if not os.path.isfile(f):
out.add(f)
return out
def main():
parser = argparse.ArgumentParser(description='''
NOTE: Use ninja to build all targets in OUT_DIR before running
this script.''')
parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
help='output directory of the build')
parser.add_argument('--json',
help='JSON output filename for missing headers')
parser.add_argument('--whitelist', help='file containing whitelist')
args, _extras = parser.parse_known_args()
if not os.path.isdir(args.out_dir):
parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
d_q = Queue()
d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,))
d_p.start()
gn_q = Queue()
gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,))
gn_p.start()
deps_q = Queue()
deps_p = Process(target=GetDepsPrefixes, args=(deps_q,))
deps_p.start()
d, d_err = d_q.get()
gn, gn_err = gn_q.get()
missing = d - gn
nonexisting = GetNonExistingFiles(gn)
deps, deps_err = deps_q.get()
missing = FilterOutDepsedRepo(missing, deps)
nonexisting = FilterOutDepsedRepo(nonexisting, deps)
d_p.join()
gn_p.join()
deps_p.join()
if d_err:
parser.error(d_err)
if gn_err:
parser.error(gn_err)
if deps_err:
parser.error(deps_err)
if len(GetNonExistingFiles(d)) > 0:
parser.error('''Found non-existing files in ninja deps. You should
build all in OUT_DIR.''')
if len(d) == 0:
parser.error('OUT_DIR looks empty. You should build all there.')
if any((('/gen/' in i) for i in nonexisting)):
parser.error('OUT_DIR looks wrong. You should build all there.')
if args.whitelist:
whitelist = ParseWhiteList(open(args.whitelist).read())
missing -= whitelist
missing = sorted(missing)
nonexisting = sorted(nonexisting)
if args.json:
with open(args.json, 'w') as f:
json.dump(missing, f)
if len(missing) == 0 and len(nonexisting) == 0:
return 0
if len(missing) > 0:
print '\nThe following files should be included in gn files:'
for i in missing:
print i
if len(nonexisting) > 0:
print '\nThe following non-existing files should be removed from gn files:'
for i in nonexisting:
print i
return 1
if __name__ == '__main__':
sys.exit(main())

Просмотреть файл

@ -0,0 +1,117 @@
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import json
import os
import unittest
import check_gn_headers
ninja_input = r'''
obj/a.o: #deps 1, deps mtime 123 (VALID)
../../a.cc
../../dir/path/b.h
../../c.hh
obj/b.o: #deps 1, deps mtime 123 (STALE)
../../b.cc
../../dir2/path/b.h
../../c2.hh
obj/c.o: #deps 1, deps mtime 123 (VALID)
../../c.cc
../../build/a.h
gen/b.h
../../dir3/path/b.h
../../c3.hh
'''
ninja_input_win = ninja_input.replace('/', '\\')
gn_input = json.loads(r'''
{
"others": [],
"targets": {
"//:All": {
},
"//:base": {
"public": [ "//base/p.h" ],
"sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ],
"visibility": [ "*" ]
},
"//:star_public": {
"public": "*",
"sources": [ "//base/c.h", "//tmp/gen/a.h" ],
"visibility": [ "*" ]
}
}
}
''')
whitelist = r'''
white-front.c
a/b/c/white-end.c # comment
dir/white-both.c #more comment
# empty line above
a/b/c
'''
class CheckGnHeadersTest(unittest.TestCase):
def testNinja(self):
headers = check_gn_headers.ParseNinjaDepsOutput(ninja_input.split('\n'))
expected = set([
'dir/path/b.h',
'c.hh',
'dir3/path/b.h',
'c3.hh',
])
self.assertEquals(headers, expected)
def testNinjaWin(self):
old_sep = os.sep
os.sep = '\\'
headers = check_gn_headers.ParseNinjaDepsOutput(
ninja_input_win.split('\n'))
expected = set([
'dir\\path\\b.h',
'c.hh',
'dir3\\path\\b.h',
'c3.hh',
])
self.assertEquals(headers, expected)
os.sep = old_sep
def testGn(self):
headers = check_gn_headers.ParseGNProjectJSON(gn_input,
'out/Release', 'tmp')
expected = set([
'base/a.h',
'base/b.hh',
'base/c.h',
'base/p.h',
'out/Release/gen/a.h',
])
self.assertEquals(headers, expected)
def testWhitelist(self):
output = check_gn_headers.ParseWhiteList(whitelist)
expected = set([
'white-front.c',
'a/b/c/white-end.c',
'dir/white-both.c',
'a/b/c',
])
self.assertEquals(output, expected)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main(verbosity=2)

Просмотреть файл

@ -0,0 +1,17 @@
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This program wraps an arbitrary command and prints "1" if the command ran
successfully."""
import os
import subprocess
import sys
devnull = open(os.devnull, 'wb')
if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
print 1
else:
print 0

Просмотреть файл

@ -0,0 +1,132 @@
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script provides methods for clobbering build directories."""
import argparse
import os
import shutil
import subprocess
import sys
def extract_gn_build_commands(build_ninja_file):
"""Extracts from a build.ninja the commands to run GN.
The commands to run GN are the gn rule and build.ninja build step at the
top of the build.ninja file. We want to keep these when deleting GN builds
since we want to preserve the command-line flags to GN.
On error, returns the empty string."""
result = ""
with open(build_ninja_file, 'r') as f:
# Read until the second blank line. The first thing GN writes to the file
# is the "rule gn" and the second is the section for "build build.ninja",
# separated by blank lines.
num_blank_lines = 0
while num_blank_lines < 2:
line = f.readline()
if len(line) == 0:
return '' # Unexpected EOF.
result += line
if line[0] == '\n':
num_blank_lines = num_blank_lines + 1
return result
def delete_dir(build_dir):
if os.path.islink(build_dir):
return
# For unknown reasons (anti-virus?) rmtree of Chromium build directories
# often fails on Windows.
if sys.platform.startswith('win'):
subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
else:
shutil.rmtree(build_dir)
def delete_build_dir(build_dir):
# GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
if not os.path.exists(build_ninja_d_file):
delete_dir(build_dir)
return
# GN builds aren't automatically regenerated when you sync. To avoid
# messing with the GN workflow, erase everything but the args file, and
# write a dummy build.ninja file that will automatically rerun GN the next
# time Ninja is run.
build_ninja_file = os.path.join(build_dir, 'build.ninja')
build_commands = extract_gn_build_commands(build_ninja_file)
try:
gn_args_file = os.path.join(build_dir, 'args.gn')
with open(gn_args_file, 'r') as f:
args_contents = f.read()
except IOError:
args_contents = ''
e = None
try:
# delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
# and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
# the exception and rethrow it later.
delete_dir(build_dir)
os.mkdir(build_dir)
except Exception as e:
pass
# Put back the args file (if any).
if args_contents != '':
with open(gn_args_file, 'w') as f:
f.write(args_contents)
# Write the build.ninja file sufficiently to regenerate itself.
with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
if build_commands != '':
f.write(build_commands)
else:
# Couldn't parse the build.ninja file, write a default thing.
f.write('''rule gn
command = gn -q gen //out/%s/
description = Regenerating ninja files
build build.ninja: gn
generator = 1
depfile = build.ninja.d
''' % (os.path.split(build_dir)[1]))
# Write a .d file for the build which references a nonexistant file. This
# will make Ninja always mark the build as dirty.
with open(build_ninja_d_file, 'w') as f:
f.write('build.ninja: nonexistant_file.gn\n')
if e:
# Rethrow the exception we caught earlier.
raise e
def clobber(out_dir):
"""Clobber contents of build directory.
Don't delete the directory itself: some checkouts have the build directory
mounted."""
for f in os.listdir(out_dir):
path = os.path.join(out_dir, f)
if os.path.isfile(path):
os.unlink(path)
elif os.path.isdir(path):
delete_build_dir(path)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out_dir', help='The output directory to clobber')
args = parser.parse_args()
clobber(args.out_dir)
return 0
if __name__ == '__main__':
sys.exit(main())

Просмотреть файл

@ -57,7 +57,7 @@
},
# Don't include output dirs
{
'regexp' : '.*/(Debug|Release|sconsbuild|out|xcodebuild)/',
'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
'include' : 0,
},
# Don't include third-party source
@ -89,7 +89,7 @@
},
{
'stat' : '100.0 * files_instrumented / files_executable',
'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g',
'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
},
{
'stat' : 'lines_executable',
@ -111,16 +111,16 @@
},
{
'stat' : '100.0 * lines_covered / lines_executable',
'format' : '*RESULT PercentCovered: percent_covered= %g',
'format' : '*RESULT PercentCovered: percent_covered= %g percent',
},
{
'stat' : '100.0 * lines_covered / lines_executable',
'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g',
'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
'group' : 'source',
},
{
'stat' : '100.0 * lines_covered / lines_executable',
'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g',
'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
'group' : 'test',
},
],

Просмотреть файл

@ -0,0 +1,170 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file introduces two related templates that act like action and
# action_foreach but instead of running a Python script, it will compile a
# given tool in the host toolchain and run that (either once or over the list
# of inputs, depending on the variant).
#
# Parameters
#
# tool (required)
# [label] Label of the tool to run. This should be an executable, and
# this label should not include a toolchain (anything in parens). The
# host compile of this tool will be used.
#
# outputs (required)
# [list of files] Like the outputs of action (if using "compiled_action",
# this would be just the list of outputs), or action_foreach (if using
# "compiled_action_foreach", this would contain source expansions mapping
# input to output files).
#
# args (required)
# [list of strings] Same meaning as action/action_foreach.
#
# inputs (optional)
# Files the binary takes as input. The step will be re-run whenever any
# of these change. If inputs is empty, the step will run only when the
# binary itself changes.
#
# visibility
# deps
# args (all optional)
# Same meaning as action/action_foreach.
#
#
# Example of usage:
#
# compiled_action("run_my_tool") {
# tool = "//tools/something:mytool"
# outputs = [
# "$target_gen_dir/mysource.cc",
# "$target_gen_dir/mysource.h",
# ]
#
# # The tool takes this input.
# inputs = [ "my_input_file.idl" ]
#
# # In this case, the tool takes as arguments the input file and the output
# # build dir (both relative to the "cd" that the script will be run in)
# # and will produce the output files listed above.
# args = [
# rebase_path("my_input_file.idl", root_build_dir),
# "--output-dir", rebase_path(target_gen_dir, root_build_dir),
# ]
# }
#
# You would typically declare your tool like this:
# if (host_toolchain == current_toolchain) {
# executable("mytool") {
# ...
# }
# }
# The if statement around the executable is optional. That says "I only care
# about this target in the host toolchain". Usually this is what you want, and
# saves unnecessarily compiling your tool for the target platform. But if you
# need a target build of your tool as well, just leave off the if statement.
if (host_os == "win") {
_host_executable_suffix = ".exe"
} else {
_host_executable_suffix = ""
}
template("compiled_action") {
assert(defined(invoker.tool), "tool must be defined for $target_name")
assert(defined(invoker.outputs), "outputs must be defined for $target_name")
assert(defined(invoker.args), "args must be defined for $target_name")
assert(!defined(invoker.sources),
"compiled_action doesn't take a sources arg. Use inputs instead.")
action(target_name) {
forward_variables_from(invoker,
[
"deps",
"inputs",
"outputs",
"testonly",
"visibility",
])
if (!defined(deps)) {
deps = []
}
if (!defined(inputs)) {
inputs = []
}
script = "//build/gn_run_binary.py"
# Constuct the host toolchain version of the tool.
host_tool = invoker.tool + "($host_toolchain)"
# Get the path to the executable. Currently, this assumes that the tool
# does not specify output_name so that the target name is the name to use.
# If that's not the case, we'll need another argument to the script to
# specify this, since we can't know what the output name is (it might be in
# another file not processed yet).
host_executable =
get_label_info(host_tool, "root_out_dir") + "/" +
get_label_info(host_tool, "name") + _host_executable_suffix
# Add the executable itself as an input.
inputs += [ host_executable ]
deps += [ host_tool ]
# The script takes as arguments the binary to run, and then the arguments
# to pass it.
args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
}
}
template("compiled_action_foreach") {
assert(defined(invoker.sources), "sources must be defined for $target_name")
assert(defined(invoker.tool), "tool must be defined for $target_name")
assert(defined(invoker.outputs), "outputs must be defined for $target_name")
assert(defined(invoker.args), "args must be defined for $target_name")
action_foreach(target_name) {
forward_variables_from(invoker,
[
"deps",
"inputs",
"outputs",
"sources",
"testonly",
"visibility",
])
if (!defined(deps)) {
deps = []
}
if (!defined(inputs)) {
inputs = []
}
script = "//build/gn_run_binary.py"
# Constuct the host toolchain version of the tool.
host_tool = invoker.tool + "($host_toolchain)"
# Get the path to the executable. Currently, this assumes that the tool
# does not specify output_name so that the target name is the name to use.
# If that's not the case, we'll need another argument to the script to
# specify this, since we can't know what the output name is (it might be in
# another file not processed yet).
host_executable =
get_label_info(host_tool, "root_out_dir") + "/" +
get_label_info(host_tool, "name") + _host_executable_suffix
# Add the executable itself as an input.
inputs += [ host_executable ]
deps += [ host_tool ]
# The script takes as arguments the binary to run, and then the arguments
# to pass it.
args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
}
}

Просмотреть файл

@ -14,63 +14,115 @@ import re
import subprocess
import sys
def GetVersion(compiler):
compiler_version_cache = {} # Map from (compiler, tool) -> version.
def Usage(program_name):
print '%s MODE TOOL' % os.path.basename(program_name)
print 'MODE: host or target.'
print 'TOOL: assembler or compiler or linker.'
return 1
def ParseArgs(args):
if len(args) != 2:
raise Exception('Invalid number of arguments')
mode = args[0]
tool = args[1]
if mode not in ('host', 'target'):
raise Exception('Invalid mode: %s' % mode)
if tool not in ('assembler',):
raise Exception('Invalid tool: %s' % tool)
return mode, tool
def GetEnvironFallback(var_list, default):
"""Look up an environment variable from a possible list of variable names."""
for var in var_list:
if var in os.environ:
return os.environ[var]
return default
def GetVersion(compiler, tool):
tool_output = tool_error = None
cache_key = (compiler, tool)
cached_version = compiler_version_cache.get(cache_key)
if cached_version:
return cached_version
try:
# Note that compiler could be something tricky like "distcc g++".
compiler = compiler + " -dumpversion"
pipe = subprocess.Popen(compiler, shell=True,
if tool == "assembler":
compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
# Unmodified: GNU assembler (GNU Binutils) 2.24
# Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
# Fedora: GNU assembler version 2.23.2
version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
else:
raise Exception("Unknown tool %s" % tool)
# Force the locale to C otherwise the version string could be localized
# making regex matching fail.
env = os.environ.copy()
env["LC_ALL"] = "C"
pipe = subprocess.Popen(compiler, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
gcc_output, gcc_error = pipe.communicate()
tool_output, tool_error = pipe.communicate()
if pipe.returncode:
raise subprocess.CalledProcessError(pipe.returncode, compiler)
result = re.match(r"(\d+)\.(\d+)", gcc_output)
return result.group(1) + result.group(2)
parsed_output = version_re.match(tool_output)
result = parsed_output.group(1) + parsed_output.group(2)
compiler_version_cache[cache_key] = result
return result
except Exception, e:
if gcc_error:
sys.stderr.write(gcc_error)
if tool_error:
sys.stderr.write(tool_error)
print >> sys.stderr, "compiler_version.py failed to execute:", compiler
print >> sys.stderr, e
return ""
def GetVersionFromEnvironment(compiler_env):
""" Returns the version of compiler
If the compiler was set by the given environment variable and exists,
return its version, otherwise None is returned.
"""
cxx = os.getenv(compiler_env, None)
if cxx:
cxx_version = GetVersion(cxx)
if cxx_version != "":
return cxx_version
return None
def main(args):
try:
(mode, tool) = ParseArgs(args[1:])
except Exception, e:
sys.stderr.write(e.message + '\n\n')
return Usage(args[0])
def main():
# Check if CXX_target or CXX environment variable exists an if it does use
# that compiler.
# TODO: Fix ninja (see http://crbug.com/140900) instead and remove this code
# In ninja's cross compile mode, the CXX_target is target compiler, while
# the CXX is host. The CXX_target needs be checked first, though the target
# and host compiler have different version, there seems no issue to use the
# target compiler's version number as gcc_version in Android.
cxx_version = GetVersionFromEnvironment("CXX_target")
if cxx_version:
print cxx_version
return 0
ret_code, result = ExtractVersion(mode, tool)
if ret_code == 0:
print result
return ret_code
cxx_version = GetVersionFromEnvironment("CXX")
if cxx_version:
print cxx_version
return 0
# Otherwise we check the g++ version.
gccversion = GetVersion("g++")
if gccversion != "":
print gccversion
return 0
def DoMain(args):
"""Hook to be called from gyp without starting a separate python
interpreter."""
(mode, tool) = ParseArgs(args)
ret_code, result = ExtractVersion(mode, tool)
if ret_code == 0:
return result
raise Exception("Failed to extract compiler version for args: %s" % args)
def ExtractVersion(mode, tool):
# Check if various CXX environment variables exist and use them if they
# exist. The preferences and fallback order is a close approximation of
# GenerateOutputForConfig() in GYP's ninja generator.
# The main difference being not supporting GYP's make_global_settings.
environments = ['CXX_target', 'CXX']
if mode == 'host':
environments = ['CXX_host'] + environments;
compiler = GetEnvironFallback(environments, 'c++')
if compiler:
compiler_version = GetVersion(compiler, tool)
if compiler_version != "":
return (0, compiler_version)
return (1, None)
return 1
if __name__ == "__main__":
sys.exit(main())
sys.exit(main(sys.argv))

Просмотреть файл

@ -0,0 +1,383 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/allocator.gni")
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/crypto.gni")
import("//build/config/dcheck_always_on.gni")
import("//build/config/features.gni")
import("//build/config/pch.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/ui.gni")
import("//build/toolchain/goma.gni")
declare_args() {
# When set (the default) enables C++ iterator debugging in debug builds.
# Iterator debugging is always off in release builds (technically, this flag
# affects the "debug" config, which is always available but applied by
# default only in debug builds).
#
# Iterator debugging is generally useful for catching bugs. But it can
# introduce extra locking to check the state of an iterator against the state
# of the current object. For iterator- and thread-heavy code, this can
# significantly slow execution.
enable_iterator_debugging = true
}
# ==============================================
# PLEASE DO NOT ADD MORE THINGS TO THIS LIST
# ==============================================
#
# Legacy feature defines applied to all targets.
#
# These are applied to every single compile in the build and most of them are
# only relevant to a few files. This bloats command lines and causes
# unnecessary recompiles when flags are flipped.
#
# To pass defines to source code from the build, use the buildflag system which
# will write headers containing the defines you need. This isolates the define
# and means its definition can participate in the build graph, only recompiling
# things when it actually changes.
#
# See //build/buildflag_header.gni for inntructions on generating headers.
#
# This will also allow you to scope your build flag to a BUILD.gn file (or a
# .gni file if you need it from more than one place) rather than making global
# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
# build flags.
config("feature_flags") {
# Don't use deprecated V8 APIs anywhere.
defines = [ "V8_DEPRECATION_WARNINGS" ]
if (dcheck_always_on) {
defines += [ "DCHECK_ALWAYS_ON=1" ]
}
if (use_udev) {
# TODO(brettw) should probably be "=1".
defines += [ "USE_UDEV" ]
}
if (use_ash) {
defines += [ "USE_ASH=1" ]
}
if (use_aura) {
defines += [ "USE_AURA=1" ]
}
if (use_pango) {
defines += [ "USE_PANGO=1" ]
}
if (use_cairo) {
defines += [ "USE_CAIRO=1" ]
}
if (use_glib) {
defines += [ "USE_GLIB=1" ]
}
if (use_openssl_certs) {
defines += [ "USE_OPENSSL_CERTS=1" ]
}
if (use_nss_certs) {
defines += [ "USE_NSS_CERTS=1" ]
}
if (use_ozone) {
defines += [ "USE_OZONE=1" ]
}
if (use_x11) {
defines += [ "USE_X11=1" ]
}
if (use_allocator != "tcmalloc") {
defines += [ "NO_TCMALLOC" ]
}
if (is_asan || is_lsan || is_tsan || is_msan) {
defines += [
"MEMORY_TOOL_REPLACES_ALLOCATOR",
"MEMORY_SANITIZER_INITIAL_SIZE",
]
}
if (is_asan) {
defines += [ "ADDRESS_SANITIZER" ]
}
if (is_lsan) {
defines += [ "LEAK_SANITIZER" ]
}
if (is_tsan) {
defines += [
"THREAD_SANITIZER",
"DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
"WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
]
}
if (is_msan) {
defines += [ "MEMORY_SANITIZER" ]
}
if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
defines += [ "UNDEFINED_SANITIZER" ]
}
if (!enable_nacl) {
defines += [ "DISABLE_NACL" ]
}
if (safe_browsing_mode == 1) {
defines += [ "FULL_SAFE_BROWSING" ]
defines += [ "SAFE_BROWSING_CSD" ]
defines += [ "SAFE_BROWSING_DB_LOCAL" ]
} else if (safe_browsing_mode == 2) {
defines += [ "SAFE_BROWSING_DB_REMOTE" ]
}
if (is_official_build) {
defines += [ "OFFICIAL_BUILD" ]
}
if (is_chrome_branded) {
defines += [ "GOOGLE_CHROME_BUILD" ]
} else {
defines += [ "CHROMIUM_BUILD" ]
}
if (is_syzyasan) {
defines += [
"SYZYASAN",
"MEMORY_SANITIZER_INITIAL_SIZE",
]
}
if (!fieldtrial_testing_like_official_build && !is_chrome_branded) {
defines += [ "FIELDTRIAL_TESTING_ENABLED" ]
}
# ==============================================
# PLEASE DO NOT ADD MORE THINGS TO THIS LIST
# ==============================================
#
# See the comment at the top.
}
# Debug/release ----------------------------------------------------------------
config("debug") {
defines = [
"_DEBUG",
"DYNAMIC_ANNOTATIONS_ENABLED=1",
"WTF_USE_DYNAMIC_ANNOTATIONS=1",
]
if (is_nacl) {
defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
}
if (is_win) {
if (!enable_iterator_debugging) {
# Iterator debugging is enabled by default by the compiler on debug
# builds, and we have to tell it to turn it off.
defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
}
} else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) {
# Enable libstdc++ debugging facilities to help catch problems early, see
# http://crbug.com/65151 .
# TODO(phajdan.jr): Should we enable this for all of POSIX?
defines += [ "_GLIBCXX_DEBUG=1" ]
}
}
config("release") {
defines = [ "NDEBUG" ]
# Sanitizers.
if (is_tsan) {
defines += [
"DYNAMIC_ANNOTATIONS_ENABLED=1",
"WTF_USE_DYNAMIC_ANNOTATIONS=1",
]
} else {
defines += [ "NVALGRIND" ]
if (!is_nacl) {
# NaCl always enables dynamic annotations. Currently this value is set to
# 1 for all .nexes.
defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
}
}
if (is_ios) {
# Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
# follows XCode's default behavior for Release builds.
defines += [ "NS_BLOCK_ASSERTIONS=1" ]
}
}
# Default libraries ------------------------------------------------------------
# This config defines the default libraries applied to all targets.
config("default_libs") {
if (is_win) {
# TODO(brettw) this list of defaults should probably be smaller, and
# instead the targets that use the less common ones (e.g. wininet or
# winspool) should include those explicitly.
libs = [
"advapi32.lib",
"comdlg32.lib",
"dbghelp.lib",
"delayimp.lib",
"dnsapi.lib",
"gdi32.lib",
"kernel32.lib",
"msimg32.lib",
"odbc32.lib",
"odbccp32.lib",
"ole32.lib",
"oleaut32.lib",
"psapi.lib",
"shell32.lib",
"shlwapi.lib",
"user32.lib",
"usp10.lib",
"uuid.lib",
"version.lib",
"wininet.lib",
"winmm.lib",
"winspool.lib",
"ws2_32.lib",
# Please don't add more stuff here. We should actually be making this
# list smaller, since all common things should be covered. If you need
# some extra libraries, please just add a libs = [ "foo.lib" ] to your
# target that needs it.
]
} else if (is_android) {
libs = [
"dl",
"m",
]
} else if (is_mac) {
# Targets should choose to explicitly link frameworks they require. Since
# linking can have run-time side effects, nothing should be listed here.
libs = []
} else if (is_ios) {
# The libraries listed here will be specified for both the target and the
# host. Only the common ones should be listed here.
libs = [
"CoreFoundation.framework",
"CoreGraphics.framework",
"CoreText.framework",
"Foundation.framework",
]
} else if (is_linux) {
libs = [
"dl",
"rt",
]
}
}
# Dependencies that all executables and shared libraries should have.
group("exe_and_shlib_deps") {
public_deps = []
if (using_sanitizer) {
public_deps += [ "//build/config/sanitizers:deps" ]
}
if (use_custom_libcxx) {
public_deps += [ "//buildtools/third_party/libc++:libcxx_proxy" ]
}
if (use_afl) {
public_deps += [ "//third_party/afl" ]
}
}
# Executable configs -----------------------------------------------------------
# Windows linker setup for EXEs and DLLs.
if (is_win) {
_windows_linker_configs = [
"//build/config/win:sdk_link",
"//build/config/win:common_linker_setup",
]
}
# This config defines the configs applied to all executables.
config("executable_config") {
configs = []
if (is_win) {
configs += _windows_linker_configs
# Currently only turn on linker CFI for executables.
configs += [ "//build/config/win:cfi_linker" ]
} else if (is_mac) {
configs += [
"//build/config/mac:mac_dynamic_flags",
"//build/config/mac:mac_executable_flags",
]
} else if (is_ios) {
configs += [
"//build/config/ios:ios_dynamic_flags",
"//build/config/ios:ios_executable_flags",
]
} else if (is_linux || is_android || current_os == "aix") {
configs += [ "//build/config/gcc:executable_ldconfig" ]
if (is_android) {
configs += [ "//build/config/android:executable_config" ]
} else if (is_chromecast) {
configs += [ "//build/config/chromecast:executable_config" ]
}
}
# If we're using the prebuilt instrumented libraries with the sanitizers, we
# need to add ldflags to every binary to make sure they are picked up.
if (prebuilt_instrumented_libraries_available) {
configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
}
if (use_locally_built_instrumented_libraries) {
configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
}
configs += [ "//build/config/sanitizers:link_executable" ]
}
# Shared library configs -------------------------------------------------------
# This config defines the configs applied to all shared libraries.
config("shared_library_config") {
configs = []
if (is_win) {
configs += _windows_linker_configs
} else if (is_mac) {
configs += [ "//build/config/mac:mac_dynamic_flags" ]
} else if (is_ios) {
configs += [ "//build/config/ios:ios_dynamic_flags" ]
} else if (is_chromecast) {
configs += [ "//build/config/chromecast:shared_library_config" ]
}
# If we're using the prebuilt instrumented libraries with the sanitizers, we
# need to add ldflags to every binary to make sure they are picked up.
if (prebuilt_instrumented_libraries_available) {
configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
}
if (use_locally_built_instrumented_libraries) {
configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
}
configs += [ "//build/config/sanitizers:link_shared_library" ]
}
# Add this config to your target to enable precompiled headers.
#
# Precompiled headers are done on a per-target basis. If you have just a couple
# of files, the time it takes to precompile (~2 seconds) can actually be longer
# than the time saved. On a Z620, a 100 file target compiles about 2 seconds
# faster with precompiled headers, with greater savings for larger targets.
#
# Recommend precompiled headers for targets with more than 50 .cc files.
config("precompiled_headers") {
if (enable_precompiled_headers) {
if (is_win) {
# This is a string rather than a file GN knows about. It has to match
# exactly what's in the /FI flag below, and what might appear in the
# source code in quotes for an #include directive.
precompiled_header = "build/precompile.h"
# This is a file that GN will compile with the above header. It will be
# implicitly added to the sources (potentially multiple times, with one
# variant for each language used in the target).
precompiled_source = "//build/precompile.cc"
# Force include the header.
cflags = [ "/FI$precompiled_header" ]
} else if (is_mac) {
precompiled_source = "//build/precompile.h"
}
}
}

Просмотреть файл

@ -0,0 +1,699 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# =============================================================================
# WHAT IS THIS FILE?
# =============================================================================
#
# This is the master GN build configuration. This file is loaded after the
# build args (args.gn) for the build directory and after the toplevel ".gn"
# file (which points to this file as the build configuration).
#
# This file will be executed and the resulting context will be used to execute
# every other file in the build. So variables declared here (that don't start
# with an underscore) will be implicitly global.
# =============================================================================
# PLATFORM SELECTION
# =============================================================================
#
# There are two main things to set: "os" and "cpu". The "toolchain" is the name
# of the GN thing that encodes combinations of these things.
#
# Users typically only set the variables "target_os" and "target_cpu" in "gn
# args", the rest are set up by our build and internal to GN.
#
# There are three different types of each of these things: The "host"
# represents the computer doing the compile and never changes. The "target"
# represents the main thing we're trying to build. The "current" represents
# which configuration is currently being defined, which can be either the
# host, the target, or something completely different (like nacl). GN will
# run the same build file multiple times for the different required
# configuration in the same build.
#
# This gives the following variables:
# - host_os, host_cpu, host_toolchain
# - target_os, target_cpu, default_toolchain
# - current_os, current_cpu, current_toolchain.
#
# Note the default_toolchain isn't symmetrical (you would expect
# target_toolchain). This is because the "default" toolchain is a GN built-in
# concept, and "target" is something our build sets up that's symmetrical with
# its GYP counterpart. Potentially the built-in default_toolchain variable
# could be renamed in the future.
#
# When writing build files, to do something only for the host:
# if (current_toolchain == host_toolchain) { ...
if (target_os == "") {
target_os = host_os
}
if (target_cpu == "") {
if (target_os == "android") {
# If we're building for Android, we should assume that we want to
# build for ARM by default, not the host_cpu (which is likely x64).
# This allows us to not have to specify both target_os and target_cpu
# on the command line.
target_cpu = "arm"
} else {
target_cpu = host_cpu
}
}
if (current_cpu == "") {
current_cpu = target_cpu
}
if (current_os == "") {
current_os = target_os
}
# =============================================================================
# BUILD FLAGS
# =============================================================================
#
# This block lists input arguments to the build, along with their default
# values.
#
# If a value is specified on the command line, it will overwrite the defaults
# given in a declare_args block, otherwise the default will be used.
#
# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
# the build to declare build flags. If you need a flag for a single component,
# you can just declare it in the corresponding BUILD.gn file.
#
# - If your feature is a single target, say //components/foo, you can put
# a declare_args() block in //components/foo/BUILD.gn and use it there.
# Nobody else in the build needs to see the flag.
#
# - Defines based on build variables should be implemented via the generated
# build flag header system. See //build/buildflag_header.gni. You can put
# the buildflag_header target in the same file as the build flag itself. You
# should almost never set "defines" directly.
#
# - If your flag toggles a target on and off or toggles between different
# versions of similar things, write a "group" target that forwards to the
# right target (or no target) depending on the value of the build flag. This
# group can be in the same BUILD.gn file as the build flag, and targets can
# depend unconditionally on the group rather than duplicating flag checks
# across many targets.
#
# - If a semi-random set of build files REALLY needs to know about a define and
# the above pattern for isolating the build logic in a forwarding group
# doesn't work, you can put the argument in a .gni file. This should be put
# in the lowest level of the build that knows about this feature (which should
# almost always be outside of the //build directory!).
#
# Other flag advice:
#
# - Use boolean values when possible. If you need a default value that expands
# to some complex thing in the default case (like the location of the
# compiler which would be computed by a script), use a default value of -1 or
# the empty string. Outside of the declare_args block, conditionally expand
# the default value as necessary.
#
# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
# your feature) rather than just "foo".
#
# - Write good comments directly above the declaration with no blank line.
# These comments will appear as documentation in "gn args --list".
#
# - Don't call exec_script inside declare_args. This will execute the script
# even if the value is overridden, which is wasteful. See first bullet.
declare_args() {
# Set to enable the official build level of optimization. This has nothing
# to do with branding, but enables an additional level of optimization above
# release (!is_debug). This might be better expressed as a tri-state
# (debug, release, official) but for historical reasons there are two
# separate flags.
is_official_build = false
# Whether we're a traditional desktop unix.
is_desktop_linux = current_os == "linux"
# Set to true when compiling with the Clang compiler. Typically this is used
# to configure warnings.
is_clang =
current_os == "mac" || current_os == "ios" || current_os == "chromeos" ||
current_os == "fuchsia" ||
(current_os == "linux" && current_cpu != "s390x" &&
current_cpu != "s390" && current_cpu != "ppc64" && current_cpu != "ppc")
# Allows the path to a custom target toolchain to be injected as a single
# argument, and set as the default toolchain.
custom_toolchain = ""
# This should not normally be set as a build argument. It's here so that
# every toolchain can pass through the "global" value via toolchain_args().
host_toolchain = ""
# DON'T ADD MORE FLAGS HERE. Read the comment above.
}
declare_args() {
# Debug build. Enabling official builds automatically sets is_debug to false.
is_debug = !is_official_build
}
declare_args() {
# Component build. Setting to true compiles targets declared as "components"
# as shared libraries loaded dynamically. This speeds up development time.
# When false, components will be linked statically.
#
# For more information see
# https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
is_component_build =
is_debug && current_os != "ios" && current_os != "fuchsia"
}
assert(!(is_debug && is_official_build), "Can't do official debug builds")
# ==============================================================================
# TOOLCHAIN SETUP
# ==============================================================================
#
# Here we set the default toolchain, as well as the variable host_toolchain
# which will identify the toolchain corresponding to the local system when
# doing cross-compiles. When not cross-compiling, this will be the same as the
# default toolchain.
#
# We do this before anything else to make sure we complain about any
# unsupported os/cpu combinations as early as possible.
if (host_toolchain == "") {
# This should only happen in the top-level context.
# In a specific toolchain context, the toolchain_args()
# block should have propagated a value down.
# TODO(dpranke): Add some sort of assert here that verifies that
# no toolchain omitted host_toolchain from its toolchain_args().
if (host_os == "linux") {
if (target_os != "linux") {
# TODO(dpranke) - is_clang normally applies only to the target
# build, and there is no way to indicate that you want to override
# it for both the target build *and* the host build. Do we need to
# support this?
host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
} else if (is_clang) {
host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
} else {
host_toolchain = "//build/toolchain/linux:$host_cpu"
}
} else if (host_os == "mac") {
host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
} else if (host_os == "win") {
# On Windows always use the target CPU for host builds. On the
# configurations we support this will always work and it saves build steps.
if (is_clang) {
host_toolchain = "//build/toolchain/win:clang_$target_cpu"
} else {
host_toolchain = "//build/toolchain/win:$target_cpu"
}
} else if (host_os == "aix") {
host_toolchain = "//build/toolchain/aix:$host_cpu"
} else {
assert(false, "Unsupported host_os: $host_os")
}
}
_default_toolchain = ""
if (target_os == "android") {
assert(host_os == "linux" || host_os == "mac",
"Android builds are only supported on Linux and Mac hosts.")
if (is_clang) {
_default_toolchain = "//build/toolchain/android:android_clang_$target_cpu"
} else {
_default_toolchain = "//build/toolchain/android:android_$target_cpu"
}
} else if (target_os == "chromeos" || target_os == "linux") {
# See comments in build/toolchain/cros/BUILD.gn about board compiles.
if (is_clang) {
_default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
} else {
_default_toolchain = "//build/toolchain/linux:$target_cpu"
}
} else if (target_os == "fuchsia") {
_default_toolchain = "//build/toolchain/fuchsia:$target_cpu"
} else if (target_os == "ios") {
_default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu"
} else if (target_os == "mac") {
assert(host_os == "mac", "Mac cross-compiles are unsupported.")
_default_toolchain = host_toolchain
} else if (target_os == "win") {
# On Windows we use the same toolchain for host and target by default.
assert(target_os == host_os, "Win cross-compiles only work on win hosts.")
if (is_clang) {
_default_toolchain = "//build/toolchain/win:clang_$target_cpu"
} else {
_default_toolchain = "//build/toolchain/win:$target_cpu"
}
} else if (target_os == "aix") {
_default_toolchain = "//build/toolchain/aix:$target_cpu"
} else if (target_os == "winrt_81" || target_os == "winrt_81_phone" ||
target_os == "winrt_10") {
_default_toolchain = "//build/toolchain/win:winrt_$target_cpu"
} else {
assert(false, "Unsupported target_os: $target_os")
}
# If a custom toolchain has been set in the args, set it as default. Otherwise,
# set the default toolchain for the platform (if any).
if (custom_toolchain != "") {
set_default_toolchain(custom_toolchain)
} else if (_default_toolchain != "") {
set_default_toolchain(_default_toolchain)
}
# =============================================================================
# OS DEFINITIONS
# =============================================================================
#
# We set these various is_FOO booleans for convenience in writing OS-based
# conditions.
#
# - is_android, is_chromeos, is_ios, and is_win should be obvious.
# - is_mac is set only for desktop Mac. It is not set on iOS.
# - is_posix is true for mac and any Unix-like system (basically everything
# except Windows).
# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
# generally too different despite being based on the Linux kernel).
#
# Do not add more is_* variants here for random lesser-used Unix systems like
# aix or one of the BSDs. If you need to check these, just check the
# current_os value directly.
if (current_os == "win" || current_os == "winrt_81" ||
current_os == "winrt_81_phone" || current_os == "winrt_10") {
is_android = false
is_chromeos = false
is_fuchsia = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = false
is_posix = false
is_win = true
} else if (current_os == "mac") {
is_android = false
is_chromeos = false
is_fuchsia = false
is_ios = false
is_linux = false
is_mac = true
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "android") {
is_android = true
is_chromeos = false
is_fuchsia = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "chromeos") {
is_android = false
is_chromeos = true
is_fuchsia = false
is_ios = false
is_linux = true
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "nacl") {
# current_os == "nacl" will be passed by the nacl toolchain definition.
# It is not set by default or on the command line. We treat is as a
# Posix variant.
is_android = false
is_chromeos = false
is_fuchsia = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = true
is_posix = true
is_win = false
} else if (current_os == "fuchsia") {
is_android = false
is_chromeos = false
is_fuchsia = true
is_ios = false
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "ios") {
is_android = false
is_chromeos = false
is_fuchsia = false
is_ios = true
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "linux") {
is_android = false
is_chromeos = false
is_fuchsia = false
is_ios = false
is_linux = true
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "aix") {
is_android = false
is_chromeos = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
}
# =============================================================================
# SOURCES FILTERS
# =============================================================================
#
# These patterns filter out platform-specific files when assigning to the
# sources variable. The magic variable |sources_assignment_filter| is applied
# to each assignment or appending to the sources variable and matches are
# automatically removed.
#
# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
# boundary = end of string or slash) are supported, and the entire string
# must match the pattern (so you need "*.cc" to match all .cc files, for
# example).
# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
# below.
sources_assignment_filter = []
if (!is_posix) {
sources_assignment_filter += [
"*_posix.h",
"*_posix.cc",
"*_posix_unittest.h",
"*_posix_unittest.cc",
"*\bposix/*",
]
}
if (!is_win) {
sources_assignment_filter += [
"*_win.cc",
"*_win.h",
"*_win_unittest.cc",
"*\bwin/*",
"*.def",
"*.rc",
]
}
if (!is_mac) {
sources_assignment_filter += [
"*_mac.h",
"*_mac.cc",
"*_mac.mm",
"*_mac_unittest.h",
"*_mac_unittest.cc",
"*_mac_unittest.mm",
"*\bmac/*",
"*_cocoa.h",
"*_cocoa.cc",
"*_cocoa.mm",
"*_cocoa_unittest.h",
"*_cocoa_unittest.cc",
"*_cocoa_unittest.mm",
"*\bcocoa/*",
]
}
if (!is_ios) {
sources_assignment_filter += [
"*_ios.h",
"*_ios.cc",
"*_ios.mm",
"*_ios_unittest.h",
"*_ios_unittest.cc",
"*_ios_unittest.mm",
"*\bios/*",
]
}
if (!is_mac && !is_ios) {
sources_assignment_filter += [ "*.mm" ]
}
if (!is_linux) {
sources_assignment_filter += [
"*_linux.h",
"*_linux.cc",
"*_linux_unittest.h",
"*_linux_unittest.cc",
"*\blinux/*",
]
}
if (!is_android) {
sources_assignment_filter += [
"*_android.h",
"*_android.cc",
"*_android_unittest.h",
"*_android_unittest.cc",
"*\bandroid/*",
]
}
if (!is_chromeos) {
sources_assignment_filter += [
"*_chromeos.h",
"*_chromeos.cc",
"*_chromeos_unittest.h",
"*_chromeos_unittest.cc",
"*\bchromeos/*",
]
}
# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
# below.
# Actually save this list.
#
# These patterns are executed for every file in the source tree of every run.
# Therefore, adding more patterns slows down the build for everybody. We should
# only add automatic patterns for configurations affecting hundreds of files
# across many projects in the tree.
#
# Therefore, we only add rules to this list corresponding to platforms on the
# Chromium waterfall. This is not for non-officially-supported platforms
# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
# write a conditional in the target to remove the file(s) from the list when
# your platform/toolkit/feature doesn't apply.
set_sources_assignment_filter(sources_assignment_filter)
# =============================================================================
# TARGET DEFAULTS
# =============================================================================
#
# Set up the default configuration for every build target of the given type.
# The values configured here will be automatically set on the scope of the
# corresponding target. Target definitions can add or remove to the settings
# here as needed.
# Holds all configs used for running the compiler.
default_compiler_configs = [
"//build/config:feature_flags",
"//build/config/compiler:afdo",
"//build/config/compiler:compiler",
"//build/config/compiler:pthread",
"//build/config/compiler:clang_stackrealign",
"//build/config/compiler:compiler_arm_fpu",
"//build/config/compiler:compiler_arm_thumb",
"//build/config/compiler:chromium_code",
"//build/config/compiler:default_include_dirs",
"//build/config/compiler:default_optimization",
"//build/config/compiler:default_stack_frames",
"//build/config/compiler:default_symbols",
"//build/config/compiler:no_rtti",
"//build/config/compiler:runtime_library",
"//build/config/sanitizers:default_sanitizer_flags",
]
if (is_win) {
default_compiler_configs += [
"//build/config/win:default_crt",
"//build/config/win:lean_and_mean",
"//build/config/win:nominmax",
"//build/config/win:unicode",
"//build/config/win:winver",
"//build/config/win:vs_code_analysis",
]
}
if (current_os == "winrt_81" || current_os == "winrt_81_phone" ||
current_os == "winrt_10") {
default_compiler_configs += [ "//build/config/win:target_winrt" ]
}
if (is_posix) {
default_compiler_configs += [ "//build/config/gcc:no_exceptions" ]
if (current_os != "aix") {
default_compiler_configs +=
[ "//build/config/gcc:symbol_visibility_hidden" ]
}
}
if (is_android) {
default_compiler_configs +=
[ "//build/config/android:default_cygprofile_instrumentation" ]
}
if (is_clang && !is_nacl) {
default_compiler_configs += [
"//build/config/clang:find_bad_constructs",
"//build/config/clang:extra_warnings",
]
}
# Debug/release-related defines.
if (is_debug) {
default_compiler_configs += [ "//build/config:debug" ]
} else {
default_compiler_configs += [ "//build/config:release" ]
}
# Static libraries and source sets use only the compiler ones.
set_defaults("static_library") {
configs = default_compiler_configs
}
set_defaults("source_set") {
configs = default_compiler_configs
}
# Compute the set of configs common to all linked targets (shared libraries,
# loadable modules, executables) to avoid duplication below.
if (is_win) {
# Many targets remove these configs, so they are not contained within
# //build/config:executable_config for easy removal.
_linker_configs = [
"//build/config/win:default_incremental_linking",
# Default to console-mode apps. Most of our targets are tests and such
# that shouldn't use the windows subsystem.
"//build/config/win:console",
]
} else if (is_mac) {
_linker_configs = [ "//build/config/mac:strip_all" ]
} else {
_linker_configs = []
}
# Executable defaults.
default_executable_configs = default_compiler_configs + [
"//build/config:default_libs",
"//build/config:executable_config",
] + _linker_configs
set_defaults("executable") {
configs = default_executable_configs
}
# Shared library and loadable module defaults (also for components in component
# mode).
default_shared_library_configs = default_compiler_configs + [
"//build/config:default_libs",
"//build/config:shared_library_config",
] + _linker_configs
if (is_android) {
# Strip native JNI exports from shared libraries by default. Binaries that
# want this can remove this config.
default_shared_library_configs +=
[ "//build/config/android:hide_all_but_jni_onload" ]
}
set_defaults("shared_library") {
configs = default_shared_library_configs
}
set_defaults("loadable_module") {
configs = default_shared_library_configs
# loadable_modules are generally used by other libs, not just via JNI.
if (is_android) {
configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
}
}
# ==============================================================================
# COMPONENT SETUP
# ==============================================================================
# Defines a component, which equates to a shared_library when
# is_component_build == true and a static_library otherwise.
#
# Use static libraries for the static build rather than source sets because
# many of of our test binaries link many large dependencies but often don't
# use large portions of them. The static libraries are much more efficient to
# link in this situation since only the necessary object files are linked.
#
# The invoker can override the type of the target in the non-component-build
# case by setting static_component_type to either "source_set" or
# "static_library". If unset, the default will be used.
template("component") {
if (is_component_build) {
_component_mode = "shared_library"
} else if (defined(invoker.static_component_type)) {
assert(invoker.static_component_type == "static_library" ||
invoker.static_component_type == "source_set")
_component_mode = invoker.static_component_type
} else if (is_android || !defined(invoker.sources)) {
# When there are no sources defined, use a source set to avoid creating
# an empty static library (which generally don't work).
#
# When we changed components to default from source sets to static
# libraries, an Android benchmark regressed slightly
# (https://crbug.com/619593). We don't have a good theory on why this might
# be since theoretically it should be the same. It could be something as
# silly as random code locality luck.
#
# There seems to be no build-time performance hit to using source sets on
# Android (the normal reason for defaulting to static libraries), so we
# make the default on Android to be source set.
#
# If it's been a long time since this was added and you're skeptical,
# please feel free to remove the Android exception and see if any
# benchmarks obviously regress. If not, it would be great to standardize
# with the rest of the platforms.
_component_mode = "source_set"
} else {
_component_mode = "static_library"
}
target(_component_mode, target_name) {
# Explicitly forward visibility, implicitly forward everything else.
# Forwarding "*" doesn't recurse into nested scopes (to avoid copying all
# globals into each template invocation), so won't pick up file-scoped
# variables. Normally this isn't too bad, but visibility is commonly
# defined at the file scope. Explicitly forwarding visibility and then
# excluding it from the "*" set works around this problem.
# See http://crbug.com/594610
forward_variables_from(invoker, [ "visibility" ])
forward_variables_from(invoker, "*", [ "visibility" ])
# All shared libraries must have the sanitizer deps to properly link in
# asan mode (this target will be empty in other cases).
if (!defined(deps)) {
deps = []
}
deps += [ "//build/config:exe_and_shlib_deps" ]
}
}
# Component defaults
set_defaults("component") {
if (is_component_build) {
configs = default_shared_library_configs
if (is_android) {
configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
}
} else {
configs = default_compiler_configs
}
}

Просмотреть файл

@ -0,0 +1,6 @@
brettw@chromium.org
dpranke@chromium.org
scottmg@chromium.org
per-file BUILDCONFIG.gn=brettw@chromium.org
per-file BUILDCONFIG.gn=set noparent

Просмотреть файл

@ -0,0 +1,50 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/sanitizers/sanitizers.gni")
import("//build/toolchain/toolchain.gni")
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic.
config("compiler") {
# These flags are shared between the C compiler and linker.
defines = [
"_LINUX_SOURCE_COMPAT=1",
"__STDC_FORMAT_MACROS",
"_ALL_SOURCE=1",
]
cflags = [
"-Wall",
"-Wno-unused-parameter",
"-pthread",
"-Wmissing-field-initializers",
"-Wno-uninitialized",
"-mcpu=power5+",
"-mfprnd",
"-mno-popcntb",
"-maix64",
"-fdata-sections",
"-ffunction-sections",
"-O3",
# "-Werror"
# We need to find a way to fix the TOC warnings if we want to enable this.
]
cflags_cc = [
"-std=gnu++11",
"-fno-rtti",
"-fno-exceptions",
"-Wno-narrowing",
"-Wnon-virtual-dtor",
]
ldflags = [
"-pthread",
"-maix64",
"-Wl,-bbigtoc",
]
}

Просмотреть файл

@ -0,0 +1,53 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/sanitizers/sanitizers.gni")
# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors.
if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan ||
is_lsan || is_tsan || is_msan || is_win || is_syzyasan || is_fuchsia ||
(is_linux && target_cpu == "arm64")) {
_default_allocator = "none"
} else {
_default_allocator = "tcmalloc"
}
# The debug CRT on Windows has some debug features that are incompatible with
# the shim. NaCl in particular does seem to link some binaries statically
# against the debug CRT with "is_nacl=false".
if ((is_linux || is_android || is_mac ||
(is_win && !is_component_build && !is_debug)) && !is_asan && !is_lsan &&
!is_tsan && !is_msan) {
_default_use_allocator_shim = true
} else {
_default_use_allocator_shim = false
}
declare_args() {
# Memory allocator to use. Set to "none" to use default allocator.
use_allocator = _default_allocator
# Causes all the allocations to be routed via allocator_shim.cc.
use_allocator_shim = _default_use_allocator_shim
}
if (is_nacl) {
# Turn off the build flag for NaCL builds to minimize confusion, as NaCL
# doesn't support the heap shim.
use_allocator_shim = false
}
assert(use_allocator == "none" || use_allocator == "tcmalloc")
assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.")
assert(!is_mac || use_allocator == "none", "Tcmalloc doesn't work on macOS.")
assert(
!use_allocator_shim || is_linux || is_android || is_win || is_mac,
"use_allocator_shim is supported only on Linux, Android, Windows and macOS targets")
if (is_win && use_allocator_shim) {
assert(!is_component_build,
"The allocator shim doesn't work for the component build on Windows.")
}

Просмотреть файл

@ -0,0 +1,237 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/android/config.gni")
import("//build/config/sanitizers/sanitizers.gni")
assert(is_android)
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic that is
# Android-only.
config("compiler") {
cflags = [
"-ffunction-sections",
"-fno-short-enums",
]
defines = [
"ANDROID",
# The NDK has these things, but doesn't define the constants to say that it
# does. Define them here instead.
"HAVE_SYS_UIO_H",
# Forces full rebuilds on NDK rolls.
"ANDROID_NDK_VERSION=${android_ndk_version}",
]
if (is_clang) {
if (current_cpu == "mips64el") {
cflags += [
# Have to force IAS for mips64.
"-fintegrated-as",
]
}
} else {
# Clang doesn't support these flags.
cflags += [ "-finline-limit=64" ]
}
ldflags = [
"-Wl,--no-undefined",
# Don't allow visible symbols from libgcc or libc++ to be
# re-exported.
"-Wl,--exclude-libs=libgcc.a",
"-Wl,--exclude-libs=libc++_static.a",
# Don't allow visible symbols from libraries that contain
# assembly code with symbols that aren't hidden properly.
# http://crbug.com/448386
"-Wl,--exclude-libs=libvpx_assembly_arm.a",
]
if (is_clang) {
if (current_cpu == "arm") {
abi_target = "arm-linux-androideabi"
} else if (current_cpu == "x86") {
abi_target = "i686-linux-androideabi"
} else if (current_cpu == "arm64") {
abi_target = "aarch64-linux-android"
} else if (current_cpu == "x64") {
# Place holder for x64 support, not tested.
# TODO: Enable clang support for Android x64. http://crbug.com/539781
abi_target = "x86_64-linux-androideabi"
} else if (current_cpu == "mipsel") {
abi_target = "mipsel-linux-android"
} else if (current_cpu == "mips64el") {
# Place holder for mips64 support, not tested.
abi_target = "mips64el-linux-androideabi"
} else {
assert(false, "Architecture not supported")
}
cflags += [ "--target=$abi_target" ]
ldflags += [ "--target=$abi_target" ]
}
# Assign any flags set for the C compiler to asmflags so that they are sent
# to the assembler.
asmflags = cflags
}
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is Android-only. Please see that target for advice on what should go in
# :runtime_library vs. :compiler.
config("runtime_library") {
# NOTE: The libc++ header include paths below are specified in cflags_cc
# rather than include_dirs because they need to come after include_dirs.
# Think of them like system headers, but don't use '-isystem' because the
# arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
# strange errors. The include ordering here is important; change with
# caution.
cflags_cc = []
if (android_ndk_major_version >= 13) {
libcxx_include_path =
rebase_path("$android_libcpp_root/include", root_build_dir)
libcxxabi_include_path =
rebase_path("$android_ndk_root/sources/cxx-stl/llvm-libc++abi/include",
root_build_dir)
if (!is_clang) {
# Per the release notes, GCC is not supported in the NDK starting with
# r13. It's still present, though, and has conflicting declarations of
# float abs(float).
cflags_cc += [ "-Wno-attributes" ]
}
} else {
libcxx_include_path =
rebase_path("$android_libcpp_root/libcxx/include", root_build_dir)
libcxxabi_include_path = rebase_path(
"$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
root_build_dir)
}
cflags_cc += [
"-isystem" + libcxx_include_path,
"-isystem" + libcxxabi_include_path,
"-isystem" +
rebase_path("$android_ndk_root/sources/android/support/include",
root_build_dir),
]
defines = [ "__GNU_SOURCE=1" ] # Necessary for clone().
ldflags = [ "-nostdlib" ]
lib_dirs = [ android_libcpp_lib_dir ]
# The libc++ runtime library (must come first).
# ASan needs to dynamically link to libc++ even in static builds so
# that it can interpose operator new.
if (is_component_build || is_asan) {
libs = [ "c++_shared" ]
} else {
libs = [ "c++_static" ]
}
libs += [
"c++abi",
"android_support",
]
# arm builds of libc++ starting in NDK r12 depend on unwind.
if (current_cpu == "arm") {
libs += [ "unwind" ]
}
# Manually link the libgcc.a that the cross compiler uses. This is
# absolute because the linker will look inside the sysroot if it's not.
libs += [
rebase_path(android_libgcc_file),
"c",
]
# Clang with libc++ does not require an explicit atomic library reference.
if (!is_clang) {
libs += [ "atomic" ]
}
if (is_clang) {
# Work around incompatibilities between bionic and clang headers.
defines += [
"__compiler_offsetof=__builtin_offsetof",
"nan=__builtin_nan",
]
if (current_cpu == "x64" || current_cpu == "arm64" ||
current_cpu == "mips64el") {
# 64-bit targets build with NDK 21, 32-bit targets with NDK 16
# (see ./config.gni). When using clang, NDK 21 defines snprintf to
# something for a kind of for of _FORTIFY_SOURCE support, see
# third_party/android_tools/ndk/platforms/android-21/arch-x86_64/usr/include/stdio.h
# Making snprintf a macro breaks base/strings/string_utils.h which
# defines base::snprintf(). So define snprintf to itself to force the
# NDK to not redefine it. This disables _chk for snprintf, but since
# 32-bit versions use NDK 16 which doesn't have any fortify support, that
# seems ok. b/32067310 tracks better fortify support with clang.
# TODO(thakis): Remove this once b/32067310 is fixed.
defines += [ "snprintf=snprintf" ]
}
}
# TODO(jdduke) Re-enable on mips after resolving linking
# issues with libc++ (crbug.com/456380).
if (current_cpu != "mipsel" && current_cpu != "mips64el") {
ldflags += [ "-Wl,--warn-shared-textrel" ]
}
}
config("executable_config") {
cflags = [ "-fPIE" ]
asmflags = [ "-fPIE" ]
ldflags = [ "-pie" ]
}
config("hide_all_but_jni_onload") {
ldflags = [ "-Wl,--version-script=" + rebase_path(
"//build/android/android_only_explicit_jni_exports.lst") ]
}
config("hide_all_but_jni") {
ldflags = [ "-Wl,--version-script=" +
rebase_path("//build/android/android_only_jni_exports.lst") ]
}
# Instrumentation -------------------------------------------------------------
#
# The BUILDCONFIG file sets the "default_cygprofile_instrumentation" config on
# targets by default. You can override whether the cygprofile instrumentation is
# used on a per-target basis:
#
# configs -= [ "//build/config/android:default_cygprofile_instrumentation" ]
# configs += [ "//build/config/android:no_cygprofile_instrumentation" ]
config("default_cygprofile_instrumentation") {
if (use_order_profiling) {
configs = [ ":cygprofile_instrumentation" ]
} else {
configs = [ ":no_cygprofile_instrumentation" ]
}
}
config("cygprofile_instrumentation") {
defines = [ "CYGPROFILE_INSTRUMENTATION=1" ]
cflags = [ "-finstrument-functions" ]
if (!is_clang) {
cflags += [
# Allow mmx intrinsics to inline, so that the compiler can expand the intrinsics.
"-finstrument-functions-exclude-file-list=mmintrin.h",
# Avoid errors with current NDK:
# "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant"
"-finstrument-functions-exclude-file-list=arm_neon.h",
]
}
}
config("no_cygprofile_instrumentation") {
}

Просмотреть файл

@ -0,0 +1,3 @@
agrieve@chromium.org
# COMPONENT: Build

Просмотреть файл

@ -0,0 +1,374 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file contains common system config stuff for the Android build.
if (is_android) {
import("//build_overrides/build.gni")
has_chrome_android_internal =
exec_script("//build/dir_exists.py",
[ rebase_path("//clank", root_build_dir) ],
"string") == "True"
# We are using a separate declare_args block for only this argument so that
# we can decide if we have to pull in definitions from the internal config
# early.
declare_args() {
# Enables using the internal Chrome for Android repository. The default
# value depends on whether the repository is available, and if it's not but
# this argument is manually set to True, the generation will fail.
# The main purpose of this argument is to avoid having to maintain 2
# repositories to support both public only and internal builds.
enable_chrome_android_internal = has_chrome_android_internal
}
if (enable_chrome_android_internal) {
import("//clank/config.gni")
}
if (!defined(extra_chrome_shared_library_configs)) {
extra_chrome_shared_library_configs = []
}
if (!defined(default_android_ndk_root)) {
default_android_ndk_root = "//third_party/android_tools/ndk"
default_android_ndk_version = "r12b"
default_android_ndk_major_version = 12
} else {
assert(defined(default_android_ndk_version))
assert(defined(default_android_ndk_major_version))
}
if (!defined(default_android_sdk_root)) {
default_android_sdk_root = "//third_party/android_tools/sdk"
default_android_sdk_version = "25"
default_android_sdk_build_tools_version = "25.0.2"
}
if (!defined(default_lint_android_sdk_root)) {
# Purposefully repeated so that downstream can change
# default_android_sdk_root without changing lint version.
default_lint_android_sdk_root = "//third_party/android_tools/sdk"
default_lint_android_sdk_version = "25"
}
if (!defined(default_extras_android_sdk_root)) {
# Purposefully repeated so that downstream can change
# default_android_sdk_root without changing where we load the SDK extras
# from. (Google Play services, etc.)
default_extras_android_sdk_root = "//third_party/android_tools/sdk"
}
if (!defined(default_android_keystore_path)) {
default_android_keystore_path =
"//build/android/ant/chromium-debug.keystore"
default_android_keystore_name = "chromiumdebugkey"
default_android_keystore_password = "chromium"
}
# TODO(paulmiller): Remove; superseded by google_play_services_package.
if (!defined(google_play_services_library)) {
google_play_services_library =
"//third_party/android_tools:google_play_services_default_java"
}
# TODO(paulmiller): Remove; superseded by google_play_services_package.
if (!defined(google_play_services_resources)) {
google_play_services_resources =
"//third_party/android_tools:google_play_services_default_resources"
}
# google_play_services_package contains the path where individual client
# targets (e.g. google_play_services_base_java) are located.
if (!defined(google_play_services_package)) {
google_play_services_package = "//third_party/android_tools"
}
webview_public_framework_jar =
"//third_party/android_platform/webview/frameworks_7.1.1_r28.jar"
if (!defined(webview_framework_jar)) {
webview_framework_jar = webview_public_framework_jar
}
declare_args() {
android_ndk_root = default_android_ndk_root
android_ndk_version = default_android_ndk_version
android_ndk_major_version = default_android_ndk_major_version
android_sdk_root = default_android_sdk_root
android_sdk_version = default_android_sdk_version
android_sdk_build_tools_version = default_android_sdk_build_tools_version
lint_android_sdk_root = default_lint_android_sdk_root
lint_android_sdk_version = default_lint_android_sdk_version
# Libc++ library directory. Override to use a custom libc++ binary.
android_libcpp_lib_dir = ""
# Android versionCode for android_apk()s that don't expclitly set one.
android_default_version_code = "1"
# Android versionName for android_apk()s that don't expclitly set one.
android_default_version_name = "Developer Build"
# The path to the keystore to use for signing builds.
android_keystore_path = default_android_keystore_path
# The name of the keystore to use for signing builds.
android_keystore_name = default_android_keystore_name
# The password for the keystore to use for signing builds.
android_keystore_password = default_android_keystore_password
# Set to true to run findbugs on JAR targets.
run_findbugs = false
# Set to true to enable verbose findbugs logging. This does nothing if
# run_findbugs is false.
findbugs_verbose = false
# Enables verbose proguard output (summaries and unfiltered output).
proguard_verbose = false
# Java debug on Android. Having this on enables multidexing, and turning it
# off will enable proguard.
is_java_debug = is_debug
# Set to true to enable the Errorprone compiler
use_errorprone_java_compiler = false
# Enables EMMA Java code coverage. Instruments classes during build to
# produce .ec files during runtime
emma_coverage = false
# EMMA filter string consisting of a list of inclusion/exclusion patterns
# separated with whitespace and/or comma. Only has effect if
# emma_coverage==true
emma_filter = ""
# Disables process isolation when building _incremental targets.
# Required for Android M+ due to SELinux policies (stronger sandboxing).
disable_incremental_isolated_processes = false
# Speeds up incremental compiles by compiling only changed files.
enable_incremental_javac = false
# Adds intrumentation to each function. Writes a file with the order that
# functions are called at startup.
use_order_profiling = false
# Builds secondary abi for APKs, supports build 32-bit arch as secondary
# abi in 64-bit Monochrome and WebView.
build_apk_secondary_abi = true
# Enables java8 language features (via retrolambda).
# work-in-progress (http://crbug.com/642600)
use_java8 = false
# Build incremental targets whenever possible.
# Ex. with this arg set to true, the chrome_public_apk target result in
# chrome_public_apk_incremental being built.
incremental_apk_by_default = false
}
# We need a second declare_args block to make sure we are using the overridden
# value of the arguments set above.
declare_args() {
# Speed up dexing using dx --incremental.
enable_incremental_dx = is_java_debug
}
# Neither of these should ever be used for release builds since they are
# somewhat experimental and dx --incremental is known to not produce
# byte-for-byte identical output.
assert(!(enable_incremental_dx && !is_java_debug))
assert(!(enable_incremental_javac && !is_java_debug))
# Host stuff -----------------------------------------------------------------
# Defines the name the Android build gives to the current host CPU
# architecture, which is different than the names GN uses.
if (host_cpu == "x64") {
android_host_arch = "x86_64"
} else if (host_cpu == "x86") {
android_host_arch = "x86"
} else {
assert(false, "Need Android toolchain support for your build CPU arch.")
}
# Defines the name the Android build gives to the current host CPU
# architecture, which is different than the names GN uses.
if (host_os == "linux") {
android_host_os = "linux"
} else if (host_os == "mac") {
android_host_os = "darwin"
} else {
assert(false, "Need Android toolchain support for your build OS.")
}
# Directories and files ------------------------------------------------------
#
# We define may of the dirs strings here for each output architecture (rather
# than just the current one) since these are needed by the Android toolchain
# file to define toolchains for all possible targets in one pass.
android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
# Path to the Android NDK and SDK.
android_ndk_include_dir = "$android_ndk_root/usr/include"
android_sdk_tools = "${android_sdk_root}/tools"
android_sdk_build_tools =
"${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
# Path to the SDK's android.jar
android_sdk_jar = "$android_sdk/android.jar"
zipalign_path = "$android_sdk_build_tools/zipalign"
# Subdirectories inside android_ndk_root that contain the sysroot for the
# associated platform.
# If you raise this, reevaluate the snprintf=snprintf in ./BUILD.gn.
_android_api_level = 16
x86_android_sysroot_subdir =
"platforms/android-${_android_api_level}/arch-x86"
arm_android_sysroot_subdir =
"platforms/android-${_android_api_level}/arch-arm"
mips_android_sysroot_subdir =
"platforms/android-${_android_api_level}/arch-mips"
# If you raise this, reevaluate the snprintf=snprintf in ./BUILD.gn.
_android64_api_level = 21
x86_64_android_sysroot_subdir =
"platforms/android-${_android64_api_level}/arch-x86_64"
arm64_android_sysroot_subdir =
"platforms/android-${_android64_api_level}/arch-arm64"
mips64_android_sysroot_subdir =
"platforms/android-${_android64_api_level}/arch-mips64"
# Toolchain root directory for each build. The actual binaries are inside
# a "bin" directory inside of these.
_android_toolchain_version = "4.9"
_android_toolchain_detailed_version = "4.9.x"
x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
# Location of libgcc. This is only needed for the current GN toolchain, so we
# only need to define the current one, rather than one for every platform
# like the toolchain roots.
if (current_cpu == "x86") {
android_prebuilt_arch = "android-x86"
_binary_prefix = "i686-linux-android"
android_toolchain_root = "$x86_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
} else if (current_cpu == "arm") {
android_prebuilt_arch = "android-arm"
_binary_prefix = "arm-linux-androideabi"
android_toolchain_root = "$arm_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_detailed_version}/libgcc.a"
} else if (current_cpu == "mipsel") {
android_prebuilt_arch = "android-mips"
_binary_prefix = "mipsel-linux-android"
android_toolchain_root = "$mips_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
} else if (current_cpu == "x64") {
android_prebuilt_arch = "android-x86_64"
_binary_prefix = "x86_64-linux-android"
android_toolchain_root = "$x86_64_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
} else if (current_cpu == "arm64") {
android_prebuilt_arch = "android-arm64"
_binary_prefix = "aarch64-linux-android"
android_toolchain_root = "$arm64_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
} else if (current_cpu == "mips64el") {
android_prebuilt_arch = "android-mips64"
_binary_prefix = "mips64el-linux-android"
android_toolchain_root = "$mips64_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
} else {
assert(false, "Need android libgcc support for your target arch.")
}
android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
android_readelf = "${android_tool_prefix}readelf"
android_objcopy = "${android_tool_prefix}objcopy"
android_gdbserver =
"$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
# Toolchain stuff ------------------------------------------------------------
android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
# ABI ------------------------------------------------------------------------
if (current_cpu == "x86") {
android_app_abi = "x86"
} else if (current_cpu == "arm") {
import("//build/config/arm.gni")
if (arm_version < 7) {
android_app_abi = "armeabi"
} else {
android_app_abi = "armeabi-v7a"
}
} else if (current_cpu == "mipsel") {
android_app_abi = "mips"
} else if (current_cpu == "x64") {
android_app_abi = "x86_64"
} else if (current_cpu == "arm64") {
android_app_abi = "arm64-v8a"
} else if (current_cpu == "mips64el") {
android_app_abi = "mips64"
} else {
assert(false, "Unknown Android ABI: " + current_cpu)
}
if (android_libcpp_lib_dir == "") {
android_libcpp_lib_dir = "${android_libcpp_root}/libs/${android_app_abi}"
}
# Secondary ABI -------------------------------------------------------------
if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") {
android_64bit_target_cpu = true
} else if (target_cpu == "arm" || target_cpu == "x86" ||
target_cpu == "mipsel") {
android_64bit_target_cpu = false
} else {
assert(false, "Unknown target CPU: $target_cpu")
}
# Intentionally do not define android_app_secondary_abi_cpu and
# android_app_secondary_abi for 32-bit target_cpu, since they are not used.
if (target_cpu == "arm64") {
android_secondary_abi_cpu = "arm"
android_app_secondary_abi = "armeabi-v7a"
} else if (target_cpu == "x64") {
android_secondary_abi_cpu = "x86"
android_app_secondary_abi = "x86"
} else if (target_cpu == "mips64el") {
android_secondary_abi_cpu = "mipsel"
android_app_secondary_abi = "mips"
}
if (defined(android_secondary_abi_cpu)) {
if (is_clang) {
android_secondary_abi_toolchain =
"//build/toolchain/android:android_clang_${android_secondary_abi_cpu}"
} else {
android_secondary_abi_toolchain =
"//build/toolchain/android:android_${android_secondary_abi_cpu}"
}
}
}
declare_args() {
# Enables used resource whitelist generation. Set for official builds only
# as a large amount of build output is generated.
enable_resource_whitelist_generation = is_android && is_official_build
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,126 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/v8_target_cpu.gni")
# These are primarily relevant in current_cpu == "arm" contexts, where
# ARM code is being compiled. But they can also be relevant in the
# other contexts when the code will change its behavior based on the
# cpu it wants to generate code for.
if (current_cpu == "arm" || v8_current_cpu == "arm") {
declare_args() {
# Version of the ARM processor when compiling on ARM. Ignored on non-ARM
# platforms.
arm_version = 7
# The ARM architecture. This will be a string like "armv6" or "armv7-a".
# An empty string means to use the default for the arm_version.
arm_arch = ""
# The ARM floating point hardware. This will be a string like "neon" or
# "vfpv3". An empty string means to use the default for the arm_version.
arm_fpu = ""
# The ARM floating point mode. This is either the string "hard", "soft", or
# "softfp". An empty string means to use the default one for the
# arm_version.
arm_float_abi = ""
# The ARM variant-specific tuning mode. This will be a string like "armv6"
# or "cortex-a15". An empty string means to use the default for the
# arm_version.
arm_tune = ""
# Whether to use the neon FPU instruction set or not.
arm_use_neon = ""
# Whether to enable optional NEON code paths.
arm_optionally_use_neon = false
# Thumb is a reduced instruction set available on some ARM processors that
# has increased code density.
arm_use_thumb = true
}
assert(arm_float_abi == "" || arm_float_abi == "hard" ||
arm_float_abi == "soft" || arm_float_abi == "softfp")
if (arm_use_neon == "") {
if (current_os == "linux" && target_cpu != v8_target_cpu) {
# Don't use neon on V8 simulator builds as a default.
arm_use_neon = false
} else {
arm_use_neon = true
}
}
if (arm_version == 6) {
if (arm_arch == "") {
arm_arch = "armv6"
}
if (arm_tune != "") {
arm_tune = ""
}
if (arm_float_abi == "") {
arm_float_abi = "softfp"
}
if (arm_fpu == "") {
arm_fpu = "vfp"
}
arm_use_thumb = false
} else if (arm_version == 7) {
if (arm_arch == "") {
arm_arch = "armv7-a"
}
if (arm_tune == "") {
arm_tune = "generic-armv7-a"
}
if (arm_float_abi == "") {
if (current_os == "android" || target_os == "android") {
arm_float_abi = "softfp"
} else if (current_os == "linux" && target_cpu != v8_target_cpu) {
# Default to the same as Android for V8 simulator builds.
arm_float_abi = "softfp"
} else {
arm_float_abi = "hard"
}
}
if (arm_fpu == "") {
if (arm_use_neon) {
arm_fpu = "neon"
} else {
arm_fpu = "vfpv3-d16"
}
}
} else if (arm_version == 8) {
if (arm_arch == "") {
arm_arch = "armv8-a"
}
if (arm_tune == "") {
arm_tune = "generic-armv8-a"
}
if (arm_float_abi == "") {
if (current_os == "android" || target_os == "android") {
arm_float_abi = "softfp"
} else {
arm_float_abi = "hard"
}
}
if (arm_fpu == "") {
if (arm_use_neon) {
arm_fpu = "neon"
} else {
arm_fpu = "vfpv3-d16"
}
}
}
} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
# arm64 supports only "hard".
arm_float_abi = "hard"
arm_use_neon = true
}

Просмотреть файл

@ -0,0 +1,22 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
# Select the desired branding flavor. False means normal Chromium branding,
# true means official Google Chrome branding (requires extra Google-internal
# resources).
is_chrome_branded = false
# Break chrome.dll into multple pieces based on process type. Only available
# on Windows.
is_multi_dll_chrome = is_win && !is_component_build
}
# Refers to the subdirectory for branding in various places including
# chrome/app/theme.
if (is_chrome_branded) {
branding_path_component = "google_chrome"
} else {
branding_path_component = "chromium"
}

Просмотреть файл

@ -0,0 +1,85 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/chromecast_build.gni")
assert(is_chromecast)
config("static_config") {
if (!is_clang) {
ldflags = [
# Don't allow visible symbols from libraries that contain
# assembly code with symbols that aren't hidden properly.
# http://b/26390825
"-Wl,--exclude-libs=libffmpeg.a",
]
if (!is_android) {
ldflags += [
# We want to statically link libstdc++/libgcc on Linux.
# (On Android, libstdc++ and libgcc aren't used.)
"-static-libstdc++",
"-static-libgcc",
]
}
}
}
config("ldconfig") {
visibility = [ ":*" ]
# Chromecast executables depend on several shared libraries in
# /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
# This is explicitly disabled in Chrome for security reasons (see comments in
# //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may
# override the default libraries shipped in the Cast receiver package.
ldflags = [
"-Wl,-rpath=/oem_cast_shlib",
"-Wl,-rpath=\$ORIGIN/lib",
"-Wl,-rpath=\$ORIGIN",
]
# Binaries which don't live in the same directory as Chrome component
# libraries may still depend on them. Explicitly add the component library
# directory to the rpath for the component build.
if (is_component_build) {
ldflags += [ "-Wl,-rpath=/system/chrome" ]
}
}
config("executable_config") {
configs = [ ":ldconfig" ]
if (!is_clang && current_cpu == "arm") {
ldflags = [
# Export stdlibc++ and libgcc symbols to force shlibs to refer to these
# symbols from the executable.
"-Wl,--export-dynamic",
"-lm", # stdlibc++ requires math.h
# In case we redefined stdlibc++ symbols (e.g. tc_malloc)
"-Wl,--allow-multiple-definition",
"-Wl,--whole-archive",
"-l:libstdc++.a",
"-l:libgcc.a",
"-Wl,--no-whole-archive",
]
# Despite including libstdc++/libgcc archives, we still need to specify
# static linking for them in order to prevent the executable from having a
# dynamic dependency on them.
configs += [ ":static_config" ]
}
}
# Shared libaries should not have RPATH or RUNPATH set. This allows the
# shared libs to inherit RPATH from the parent executable that is loading
# the shared library. (See internal b/37514052 for more details.)
config("shared_library_config") {
if (current_cpu == "arm") {
configs = [ ":static_config" ]
}
}

Просмотреть файл

@ -0,0 +1,36 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# The args declared in this file should be referenced by components outside of
# //chromecast. Args needed only in //chromecast should be declared in
# //chromecast/chromecast.gni.
declare_args() {
# Set this true for a Chromecast build. Chromecast builds are supported on
# Linux and Android.
is_chromecast = false
# Set this true for an audio-only Chromecast build.
is_cast_audio_only = false
}
# Note(slan): This arg depends on the value of is_chromecast, and thus must be
# declared in a separate block. These blocks can be combined when/if
# crbug.com/542846 is resolved.
declare_args() {
# True if Chromecast build is targeted for linux desktop. This type of build
# is useful for testing and development, but currently supports only a subset
# of Cast functionality. Though this defaults to true for x86 Linux devices,
# this should be overriden manually for an embedded x86 build.
# TODO(slan): Remove instances of this when x86 is a fully supported platform.
is_cast_desktop_build = is_chromecast && target_os == "linux" &&
(target_cpu == "x86" || target_cpu == "x64")
}
# Assert that Chromecast is being built for a supported platform.
assert(is_linux || is_android || !is_chromecast,
"Chromecast builds are not supported on $target_os")
# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a
# non-Chromecast build.
assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build))

Просмотреть файл

@ -0,0 +1,65 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("clang.gni")
config("find_bad_constructs") {
if (clang_use_chrome_plugins) {
cflags = []
# On Windows, the plugin is built directly into clang, so there's
# no need to load it dynamically.
if (is_mac || is_ios) {
cflags += [
"-Xclang",
"-load",
"-Xclang",
rebase_path("${clang_base_path}/lib/libFindBadConstructs.dylib",
root_build_dir),
]
} else if (is_linux || is_android) {
cflags += [
"-Xclang",
"-load",
"-Xclang",
rebase_path("${clang_base_path}/lib/libFindBadConstructs.so",
root_build_dir),
]
}
cflags += [
"-Xclang",
"-add-plugin",
"-Xclang",
"find-bad-constructs",
"-Xclang",
"-plugin-arg-find-bad-constructs",
"-Xclang",
"check-auto-raw-pointer",
]
if (is_linux || is_android) {
cflags += [
"-Xclang",
"-plugin-arg-find-bad-constructs",
"-Xclang",
"check-ipc",
]
}
}
}
# Enables some extra Clang-specific warnings. Some third-party code won't
# compile with these so may want to remove this config.
config("extra_warnings") {
cflags = [
"-Wheader-hygiene",
# Warns when a const char[] is converted to bool.
"-Wstring-conversion",
"-Wtautological-overlap-compare",
]
}

Просмотреть файл

@ -0,0 +1,13 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/toolchain/toolchain.gni")
declare_args() {
# Indicates if the build should use the Chrome-specific plugins for enforcing
# coding guidelines, etc. Only used when compiling with Clang.
clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
clang_base_path = "//third_party/llvm-build/Release+Asserts"
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,182 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/android/config.gni")
import("//build/config/arm.gni")
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/compiler/pgo/pgo.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/toolchain/goma.gni")
import("//build/toolchain/toolchain.gni")
import("//build_overrides/build.gni")
declare_args() {
# How many symbols to include in the build. This affects the performance of
# the build since the symbols are large and dealing with them is slow.
# 2 means regular build with symbols.
# 1 means minimal symbols, usually enough for backtraces only. Symbols with
# internal linkage (static functions or those in anonymous namespaces) may not
# appear when using this level.
# 0 means no symbols.
# -1 means auto-set according to debug/release and platform.
symbol_level = -1
# Compile in such a way as to enable profiling of the generated code. For
# example, don't omit the frame pointer and leave in symbols.
enable_profiling = false
# use_debug_fission: whether to use split DWARF debug info
# files. This can reduce link time significantly, but is incompatible
# with some utilities such as icecc and ccache. Requires gold and
# gcc >= 4.8 or clang.
# http://gcc.gnu.org/wiki/DebugFission
#
# This is a placeholder value indicating that the code below should set
# the default. This is necessary to delay the evaluation of the default
# value expression until after its input values such as use_gold have
# been set, e.g. by a toolchain_args() block.
use_debug_fission = "default"
# Tell VS to create a PDB that references information in .obj files rather
# than copying it all. This should improve linker performance. mspdbcmf.exe
# can be used to convert a fastlink pdb to a normal one.
is_win_fastlink = false
# Whether or not we should turn on incremental WPO. Only affects the VS
# Windows build.
use_incremental_wpo = false
# Root directory that will store the MSVC link repro. This should only be
# used for debugging purposes on the builders where a MSVC linker flakyness
# has been observed. The targets for which a link repro should be generated
# should add somethink like this to their configuration:
# if (linkrepro_root_dir != "") {
# ldflags = ["/LINKREPRO:" + linkrepro_root_dir + "/" + target_name]
# }
#
# Note that doing a link repro uses a lot of disk space and slows down the
# build, so this shouldn't be enabled on too many targets.
#
# See crbug.com/669854.
linkrepro_root_dir = ""
# Whether or not we should use position independent code.
use_pic = true
}
# Determine whether to enable or disable frame pointers, based on the platform
# and build arguments.
if (is_mac || is_ios) {
enable_frame_pointers = true
} else if (is_win) {
# 64-bit Windows ABI doesn't support frame pointers.
if (target_cpu == "x64") {
enable_frame_pointers = false
} else {
enable_frame_pointers = true
}
} else if (is_chromeos) {
# ChromeOS requires frame pointers in x64 builds, to support CWP.
# TODO(711784): Building ARM Thumb without frame pointers can lead to code
# in ChromeOS which triggers some ARM A12/A17 errata. They can be disabled
# on non-x64 once that is resolved.
enable_frame_pointers = true
} else if (current_cpu == "arm64") {
# Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
enable_frame_pointers = true
} else {
# Explicitly ask for frame pointers, otherwise:
# * Stacks may be missing for sanitizer and profiling builds.
# * Debug tcmalloc can crash (crbug.com/636489).
enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
}
# In general assume that if we have frame pointers then we can use them to
# unwind the stack. However, this requires that they are enabled by default for
# most translation units, that they are emitted correctly, and that the
# compiler or platform provides a way to access them.
can_unwind_with_frame_pointers = enable_frame_pointers
if (current_cpu == "arm" && arm_use_thumb) {
# We cannot currently unwind ARM Thumb frame pointers correctly.
can_unwind_with_frame_pointers = false
} else if (is_win) {
# Windows 32-bit does provide frame pointers, but the compiler does not
# provide intrinsics to access them, so we don't use them.
can_unwind_with_frame_pointers = false
}
assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
declare_args() {
# Whether or not the official builds should be built with full WPO. Enabled by
# default for the PGO and the x64 builds.
if (chrome_pgo_phase > 0) {
full_wpo_on_official = true
} else {
full_wpo_on_official = false
}
}
declare_args() {
# Whether to use the gold linker from binutils instead of lld or bfd.
use_gold =
!use_lld && !(is_chromecast && is_linux &&
(current_cpu == "arm" || current_cpu == "mipsel")) &&
((is_linux && (current_cpu == "x64" || current_cpu == "x86" ||
current_cpu == "arm" || current_cpu == "mipsel")) ||
(is_android && (current_cpu == "x86" || current_cpu == "x64" ||
current_cpu == "arm" || current_cpu == "arm64")) ||
is_fuchsia)
}
# If it wasn't manually set, set to an appropriate default.
assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
if (symbol_level == -1) {
if (is_android && use_order_profiling) {
# With instrumentation enabled, debug info puts libchrome.so over 4gb, which
# causes the linker to produce an invalid ELF. http://crbug.com/574476
symbol_level = 0
} else if (is_android && !is_component_build &&
!(android_64bit_target_cpu && !build_apk_secondary_abi)) {
# Reduce symbol level when it will cause invalid elf files to be created
# (due to file size). https://crbug.com/648948.
symbol_level = 1
} else if (is_win && use_goma && !is_clang) {
# goma doesn't support PDB files, so we disable symbols during goma
# compilation because otherwise the redundant debug information generated
# by visual studio (repeated in every .obj file) makes linker
# memory consumption and link times unsustainable (crbug.com/630074).
# Clang on windows does not have this issue.
# If you use is_win_fastlink = true then you can set symbol_level = 2 when
# using goma.
symbol_level = 1
} else if ((!is_nacl && !is_linux) || is_debug || is_official_build ||
is_chromecast) {
# Linux builds slower by having symbols as part of the target binary,
# whereas Mac and Windows have them separate, so in Release Linux, default
# them off, but keep them on for Official builds and Chromecast builds.
symbol_level = 2
} else if (using_sanitizer) {
# Sanitizers require symbols for filename suppressions to work.
symbol_level = 1
} else {
symbol_level = 0
}
} else if (symbol_level == 2) {
if (is_win) {
# See crbug.com/630074
assert(is_win_fastlink || !use_goma,
"Goma builds that use symbol_level 2 must use is_win_fastlink.")
}
}
# Assert that the configuration isn't going to hit https://crbug.com/648948.
assert(ignore_elf32_limitations || !is_android ||
(android_64bit_target_cpu && !build_apk_secondary_abi) ||
is_component_build || symbol_level < 2,
"Android 32-bit non-component builds cannot have symbol_level=2 " +
"due to 4GiB file size limit, see https://crbug.com/648948. " +
"If you really want to try this out, " +
"set ignore_elf32_limitations=true.")

Просмотреть файл

@ -0,0 +1,101 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/clang/clang.gni")
import("//build/config/compiler/compiler.gni")
import("//build/config/compiler/pgo/pgo.gni")
# Configuration that enables PGO instrumentation.
config("pgo_instrumentation_flags") {
visibility = [ ":default_pgo_flags" ]
cflags = []
ldflags = []
# Only add flags when chrome_pgo_phase == 1, so that variables we would use
# are not required to be defined when we're not actually using PGO.
if (chrome_pgo_phase == 1) {
if (is_clang) {
cflags = [ "-fprofile-instr-generate" ]
if (is_win) {
# Normally, we pass -fprofile-instr-generate to the compiler and it
# automatically passes the right flags to the linker.
# However, on Windows, we call the linker directly, without going
# through the compiler driver. This means we need to pass the right
# flags ourselves.
_clang_rt_base_path =
"$clang_base_path/lib/clang/$clang_version/lib/windows"
if (target_cpu == "x86") {
_clang_rt_suffix = "-i386.lib"
} else if (target_cpu == "x64") {
_clang_rt_suffix = "-x86_64.lib"
}
assert(_clang_rt_suffix != "", "target CPU $target_cpu not supported")
ldflags += [ "$_clang_rt_base_path/clang_rt.profile$_clang_rt_suffix" ]
} else {
ldflags += [ "-fprofile-instr-generate" ]
}
} else if (is_win) {
ldflags = [
# In MSVC, we must use /LTCG when using PGO.
"/LTCG",
# Make sure that enough memory gets allocated for the PGO profiling
# buffers and also cap this memory. Usually a PGI instrumented build
# of chrome_child.dll requires ~55MB of memory for storing its counter
# etc, normally the linker should automatically choose an appropriate
# amount of memory but it doesn't always do a good estimate and
# sometime allocates too little or too much (and so the instrumented
# image fails to start). Making sure that the buffer has a size in the
# [128 MB, 512 MB] range should prevent this from happening.
"/GENPROFILE:MEMMIN=134217728",
"/GENPROFILE:MEMMAX=536870912",
"/PogoSafeMode",
]
}
}
}
# Configuration that enables optimization using profile data.
config("pgo_optimization_flags") {
visibility = [ ":default_pgo_flags" ]
cflags = []
ldflags = []
# Only add flags when chrome_pgo_phase == 2, so that variables we would use
# are not required to be defined when we're not actually using PGO.
if (chrome_pgo_phase == 2) {
if (is_clang) {
assert(pgo_data_path != "",
"Please set pgo_data_path to point at the profile data")
cflags += [
"-fprofile-instr-use=$pgo_data_path",
# It's possible to have some profile data legitimately missing,
# and at least some profile data always ends up being considered
# out of date, so make sure we don't error for those cases.
"-Wno-profile-instr-unprofiled",
"-Wno-error=profile-instr-out-of-date",
]
} else if (is_win) {
ldflags += [
# In MSVC, we must use /LTCG when using PGO.
"/LTCG",
"/USEPROFILE",
]
}
}
}
# Applies flags necessary when profile-guided optimization is used.
# Flags are only added if PGO is enabled, so that this config is safe to
# include by default.
config("default_pgo_flags") {
if (chrome_pgo_phase == 0) {
# Nothing. This config should be a no-op when chrome_pgo_phase == 0.
} else if (chrome_pgo_phase == 1) {
configs = [ ":pgo_instrumentation_flags" ]
} else if (chrome_pgo_phase == 2) {
configs = [ ":pgo_optimization_flags" ]
}
}

Просмотреть файл

@ -0,0 +1,17 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
# Specify the current PGO phase.
# Here's the different values that can be used:
# 0 : Means that PGO is turned off.
# 1 : Used during the PGI (instrumentation) phase.
# 2 : Used during the PGO (optimization) phase.
#
# TODO(sebmarchand): Add support for the PGU (update) phase.
chrome_pgo_phase = 0
# When using chrome_pgo_phase = 2, read profile data from this path.
pgo_data_path = ""
}

Просмотреть файл

@ -0,0 +1,23 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file declares build flags for the SSL library configuration.
#
# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
# the global build dependency on it should be removed.
#
# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
# required. See the declare_args block of BUILDCONFIG.gn for advice on how
# to set up feature flags.
# True when we're using OpenSSL for representing certificates. When targeting
# Android, the platform certificate library is used for certificate
# verification. On NaCl, verification isn't supported. On other targets, this
# flag also enables OpenSSL for certificate verification, but this configuration
# is unsupported.
use_openssl_certs = is_android || is_nacl
# True if NSS is used for certificate handling. It is possible to use OpenSSL
# for the crypto library, but NSS for the platform certificate library.
use_nss_certs = is_linux

Просмотреть файл

@ -0,0 +1,8 @@
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
# Set to true to enable dcheck in Release builds.
dcheck_always_on = false
}

Просмотреть файл

@ -0,0 +1,73 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# =============================================
# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
# =============================================
#
# These flags are effectively global. Your feature flag should go near the
# code it controls. Most of these items are here now because they control
# legacy global #defines passed to the compiler (now replaced with generated
# buildflag headers -- see //build/buildflag_header.gni).
#
# There is more advice on where to put build flags in the "Build flag" section
# of //build/config/BUILDCONFIG.gn.
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
if (is_android) {
import("//build/config/android/config.gni")
}
declare_args() {
# Enables Native Client support.
# Temporarily disable nacl on arm64 linux to get rid of compilation errors.
# TODO(mcgrathr): When mipsel-nacl-clang is available, drop the exclusion.
enable_nacl = !is_ios && !is_android && !is_chromecast &&
current_cpu != "mipsel" && !(is_linux && target_cpu == "arm64")
# Non-SFI is not yet supported on mipsel
enable_nacl_nonsfi = current_cpu != "mipsel"
# Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4.
# We always build Google Chrome and Chromecast with proprietary codecs.
#
# Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it
# out of //build will require using the build_overrides directory.
proprietary_codecs = is_chrome_branded || is_chromecast
# Variable safe_browsing is used to control the build time configuration for
# safe browsing feature. Safe browsing can be compiled in 3 different levels:
# 0 disables it, 1 enables it fully, and 2 enables mobile protection via an
# external API.
if (is_ios || is_chromecast) {
safe_browsing_mode = 0
} else if (is_android) {
safe_browsing_mode = 2
} else {
safe_browsing_mode = 1
}
# Set to true make a build that disables activation of field trial tests
# specified in testing/variations/fieldtrial_testing_config_*.json.
# Note: this setting is ignored if is_chrome_branded.
fieldtrial_testing_like_official_build = is_chrome_branded
# libudev usage. This currently only affects the content layer.
use_udev = is_linux && !is_chromecast
use_dbus = is_linux && !is_chromecast
# Option controlling the use of GConf (the classic GNOME configuration
# system).
use_gconf = is_linux && !is_chromeos && !is_chromecast
use_gio = is_linux && !is_chromeos && !is_chromecast
}
#
# =============================================
# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
# =============================================
#
# See comment at the top.

Просмотреть файл

@ -0,0 +1,16 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
import("//build/config/freetype/freetype.gni")
group("freetype") {
if (use_system_freetype) {
public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
} else {
public_deps = [
"//third_party/freetype",
]
}
}

Просмотреть файл

@ -0,0 +1,2 @@
bungeman@chromium.org
drott@chromium.org

Просмотреть файл

@ -0,0 +1,14 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
# Blink needs a recent and properly build-configured FreeType version to
# support OpenType variations, color emoji and avoid security bugs. By default
# we ship and link such a version as part of Chrome. For distributions that
# prefer to keep linking to the version the system, FreeType must be newer
# than version 2.7.1 and have color bitmap support compiled in. WARNING:
# System FreeType configurations other than as described WILL INTRODUCE TEXT
# RENDERING AND SECURITY REGRESSIONS.
use_system_freetype = false
}

Просмотреть файл

@ -0,0 +1,40 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/fuchsia/config.gni")
import("//build/config/sysroot.gni")
assert(is_fuchsia)
assert(is_posix)
config("compiler") {
defines = [ "SYSROOT_VERSION=$sysroot_version" ]
cflags = []
ldflags = []
if (current_cpu == "arm64") {
cflags += [ "--target=aarch64-fuchsia" ]
ldflags += [ "--target=aarch64-fuchsia" ]
} else if (current_cpu == "x64") {
cflags += [ "--target=x86_64-fuchsia" ]
ldflags += [ "--target=x86_64-fuchsia" ]
} else {
assert(false, "Unsupported architecture")
}
asmflags = cflags
# TODO(thakis): Once Fuchsia's libclang_rt.builtin no longer has upstream
# patches, we might want to make tools/clang/scripts/update.py build it
# and bundle it with the clang package instead of using the library from
# the SDK, https://crbug.com/724204
ldflags += [
"-resource-dir",
rebase_path(fuchsia_sdk, root_build_dir) + "/toolchain_libs/clang/5.0.0",
]
libs = [
"mxio",
"magenta",
"unwind",
]
}

Просмотреть файл

@ -0,0 +1 @@
scottmg@chromium.org

Просмотреть файл

@ -0,0 +1,10 @@
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
assert(current_os == "fuchsia")
declare_args() {
# Path to Fuchsia SDK.
fuchsia_sdk = "//third_party/fuchsia-sdk"
}

Просмотреть файл

@ -0,0 +1,151 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/compiler/compiler.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/sysroot.gni")
import("//build/toolchain/toolchain.gni")
declare_args() {
# When non empty, overrides the target rpath value. This allows a user to
# make a Chromium build where binaries and shared libraries are meant to be
# installed into separate directories, like /usr/bin/chromium and
# /usr/lib/chromium for instance. It is useful when a build system that
# generates a whole target root filesystem (like Yocto) is used on top of gn,
# especially when cross-compiling.
# Note: this gn arg is similar to gyp target_rpath generator flag.
gcc_target_rpath = ""
}
# This config causes functions not to be automatically exported from shared
# libraries. By default, all symbols are exported but this means there are
# lots of exports that slow everything down. In general we explicitly mark
# which functiosn we want to export from components.
#
# Some third_party code assumes all functions are exported so this is separated
# into its own config so such libraries can remove this config to make symbols
# public again.
#
# See http://gcc.gnu.org/wiki/Visibility
config("symbol_visibility_hidden") {
# Note that -fvisibility-inlines-hidden is set globally in the compiler
# config since that can almost always be applied.
cflags = [ "-fvisibility=hidden" ]
# Visibility attribute is not supported on AIX.
if (current_os != "aix") {
cflags_cc = [
# Not exporting C++ inline functions can generally be applied anywhere
# so we do so here. Normal function visibility is controlled by
# //build/config/gcc:symbol_visibility_hidden.
"-fvisibility-inlines-hidden",
]
}
}
# This config is usually set when :symbol_visibility_hidden is removed.
# It's often a good idea to set visibility explicitly, as there're flags
# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
config("symbol_visibility_default") {
cflags = [ "-fvisibility=default" ]
}
# The rpath is the dynamic library search path. Setting this config on a link
# step will put the directory where the build generates shared libraries into
# the rpath.
#
# It's important that this *not* be used for release builds we push out.
# Chrome uses some setuid binaries, and hard links preserve setuid bits. An
# unprivileged user could gain root privileges by hardlinking a setuid
# executable and then adding in whatever binaries they want to run into the lib
# directory.
#
# Example bug: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=520126
#
# This is required for component builds since the build generates many shared
# libraries in the build directory that we expect to be automatically loaded.
# It will be automatically applied in this case by :executable_ldconfig.
#
# In non-component builds, certain test binaries may expect to load dynamic
# libraries from the current directory. As long as these aren't distributed,
# this is OK. For these cases use something like this:
#
# if (is_linux && !is_component_build) {
# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
# }
config("rpath_for_built_shared_libraries") {
if (!is_android) {
# Note: Android doesn't support rpath.
if (shlib_subdir != ".") {
rpath_link = "${shlib_subdir}/"
} else {
rpath_link = "."
}
if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
ldflags = [
# Want to pass "\$". GN will re-escape as required for ninja.
"-Wl,-rpath=\$ORIGIN/${rpath_link}",
"-Wl,-rpath-link=${rpath_link}",
]
} else {
ldflags = [
"-Wl,-rpath=${gcc_target_rpath}",
"-Wl,-rpath-link=${rpath_link}",
]
}
}
}
# Settings for executables.
config("executable_ldconfig") {
# WARNING! //sandbox/linux:chrome_sandbox will not pick up this
# config, because it is a setuid binary that needs special flags.
# If you add things to this config, make sure you check to see
# if they should be added to that target as well.
ldflags = []
if (is_android) {
ldflags += [
"-Bdynamic",
"-Wl,-z,nocopyreloc",
]
} else {
# See the rpath_for... config above for why this is necessary for component
# builds. Sanitizers use a custom libc++ where this is also necessary.
if (is_component_build || using_sanitizer) {
configs = [ ":rpath_for_built_shared_libraries" ]
}
if (current_cpu == "mipsel") {
ldflags += [ "-pie" ]
}
}
if ((!is_android || !use_gold) && current_os != "aix") {
# Find the path containing shared libraries for this toolchain
# relative to the build directory. ${root_out_dir} will be a
# subdirectory of ${root_build_dir} when cross compiling.
_rpath_link = rebase_path(root_out_dir, root_build_dir)
if (shlib_subdir != ".") {
_rpath_link += "/$shlib_subdir"
}
if (is_android) {
_rebased_sysroot = rebase_path(sysroot, root_build_dir)
_rpath_link += ":$_rebased_sysroot/usr/lib"
}
ldflags += [
"-Wl,-rpath-link=$_rpath_link",
# TODO(GYP): Do we need a check on the binutils version here?
#
# Newer binutils don't set DT_RPATH unless you disable "new" dtags
# and the new DT_RUNPATH doesn't work without --no-as-needed flag.
"-Wl,--disable-new-dtags",
]
}
}
config("no_exceptions") {
cflags_cc = [ "-fno-exceptions" ]
cflags_objcc = cflags_cc
}

Просмотреть файл

@ -0,0 +1,11 @@
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Get Byteorder of host architecture"""
import sys
print sys.byteorder

Просмотреть файл

@ -0,0 +1,27 @@
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This header file defines the "host_byteorder" variable.
# Not that this is currently used only for building v8.
# The chromium code generally assumes little-endianness.
declare_args() {
host_byteorder = "undefined"
}
# Detect host byteorder
# ppc64 can be either BE or LE
if (host_cpu == "ppc64") {
if (current_os == "aix") {
host_byteorder = "big"
} else {
# Only use the script when absolutely necessary
host_byteorder =
exec_script("//build/config/get_host_byteorder.py", [], "trim string")
}
} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" ||
host_cpu == "mips" || host_cpu == "mips64") {
host_byteorder = "big"
} else {
host_byteorder = "little"
}

Просмотреть файл

@ -0,0 +1,155 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/ios/ios_sdk.gni")
import("//build/config/sysroot.gni")
import("//build/toolchain/toolchain.gni")
declare_args() {
# Enabling this option makes clang compile to an intermediate
# representation ("bitcode"), and not to native code. This is preferred
# when including WebRTC in the apps that will be sent to Apple's App Store
# and mandatory for the apps that run on watchOS or tvOS.
# The option only works when building with Xcode (use_xcode_clang = true).
# Mimicking how Xcode handles it, the production builds (is_debug = false)
# get real bitcode sections added, while the debug builds (is_debug = true)
# only get bitcode-section "markers" added in them.
# NOTE: This option is ignored when building versions for the iOS simulator,
# where a part of libvpx is compiled from the assembly code written using
# Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
# That is not a limitation for now as Xcode mandates the presence of bitcode
# only when building bitcode-enabled projects for real devices (ARM CPUs).
enable_ios_bitcode = false
}
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic.
config("compiler") {
# These flags are shared between the C compiler and linker.
common_ios_flags = []
# CPU architecture.
if (current_cpu == "x64") {
common_ios_flags += [
"-arch",
"x86_64",
]
} else if (current_cpu == "x86") {
common_ios_flags += [
"-arch",
"i386",
]
} else if (current_cpu == "armv7" || current_cpu == "arm") {
common_ios_flags += [
"-arch",
"armv7",
]
} else if (current_cpu == "arm64") {
common_ios_flags += [
"-arch",
"arm64",
]
}
# This is here so that all files get recompiled after an Xcode update.
# (defines are passed via the command line, and build system rebuild things
# when their commandline changes). Nothing should ever read this define.
defines = [ "CR_XCODE_VERSION=$xcode_version" ]
asmflags = common_ios_flags
cflags = common_ios_flags
# Without this, the constructors and destructors of a C++ object inside
# an Objective C struct won't be called, which is very bad.
cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
cflags_c = [ "-std=c99" ]
cflags_objc = cflags_c
ldflags = common_ios_flags
}
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is iOS-only. Please see that target for advice on what should go in
# :runtime_library vs. :compiler.
config("runtime_library") {
common_flags = [
"-isysroot",
sysroot,
"-stdlib=libc++",
]
if (use_ios_simulator) {
common_flags += [ "-mios-simulator-version-min=$ios_deployment_target" ]
} else {
common_flags += [ "-miphoneos-version-min=$ios_deployment_target" ]
}
if (use_xcode_clang && enable_ios_bitcode && !use_ios_simulator) {
if (is_debug) {
common_flags += [ "-fembed-bitcode-marker" ]
} else {
common_flags += [ "-fembed-bitcode" ]
}
}
asmflags = common_flags
cflags = common_flags
ldflags = common_flags
# TODO(crbug.com/634373): Remove once Xcode's libc++ has LLVM r256325. Most
# likely this means one Xcode 8 is released and required.
if (use_xcode_clang && get_path_info(ios_sdk_version, "name") != "10") {
common_cc_flags = [
"-isystem",
rebase_path("//third_party/llvm-build/Release+Asserts/include/c++/v1",
root_build_dir),
]
cflags_cc = common_cc_flags
cflags_objcc = common_cc_flags
}
if (ios_enable_coverage) {
configs = [ ":enable_coverage" ]
}
}
config("ios_executable_flags") {
}
config("ios_dynamic_flags") {
ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and class.
}
config("xctest_config") {
common_flags = [
"-F",
"$ios_sdk_platform_path/Developer/Library/Frameworks",
]
cflags = common_flags
ldflags = common_flags
libs = [
"Foundation.framework",
"XCTest.framework",
]
}
# This enables support for LLVM code coverage. See
# http://llvm.org/docs/CoverageMappingFormat.html.
config("enable_coverage") {
cflags = [
"-fprofile-instr-generate",
"-fcoverage-mapping",
]
ldflags = [ "-fprofile-instr-generate" ]
}
group("xctest") {
public_configs = [ ":xctest_config" ]
}

Просмотреть файл

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>BuildMachineOSBuild</key>
<string>${BUILD_MACHINE_OS_BUILD}</string>
<key>CFBundleSupportedPlatforms</key>
<array>
<string>${IOS_SUPPORTED_PLATFORM}</string>
</array>
<key>DTCompiler</key>
<string>${GCC_VERSION}</string>
<key>DTPlatformName</key>
<string>${IOS_PLATFORM_NAME}</string>
<key>DTPlatformVersion</key>
<string>${IOS_PLATFORM_VERSION}</string>
<key>DTPlatformBuild</key>
<string>${IOS_PLATFORM_BUILD}</string>
<key>DTSDKBuild</key>
<string>${IOS_SDK_BUILD}</string>
<key>DTSDKName</key>
<string>${IOS_SDK_NAME}</string>
<key>MinimumOSVersion</key>
<string>${IOS_DEPLOYMENT_TARGET}</string>
<key>DTXcode</key>
<string>${XCODE_VERSION}</string>
<key>DTXcodeBuild</key>
<string>${XCODE_BUILD}</string>
<key>UIDeviceFamily</key>
<array>
<integer>1</integer>
<integer>2</integer>
</array>
</dict>
</plist>

Просмотреть файл

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UILaunchImages</key>
<array>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>7.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{320, 480}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>7.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{320, 568}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>8.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{375, 667}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>8.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{414, 736}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>8.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Landscape</string>
<key>UILaunchImageSize</key>
<string>{414, 736}</string>
</dict>
</array>
<key>UILaunchImages~ipad</key>
<array>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>7.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{768, 1024}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>7.0</string>
<key>UILaunchImageName</key>
<string>Default</string>
<key>UILaunchImageOrientation</key>
<string>Landscape</string>
<key>UILaunchImageSize</key>
<string>{768, 1024}</string>
</dict>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

Просмотреть файл

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${MODULE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}.${MODULE_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>BNDL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
</dict>
</plist>

Просмотреть файл

@ -0,0 +1 @@
file://build/config/mac/OWNERS

Просмотреть файл

@ -0,0 +1,437 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import datetime
import fnmatch
import glob
import os
import plistlib
import shutil
import subprocess
import sys
import tempfile
def GetProvisioningProfilesDir():
"""Returns the location of the installed mobile provisioning profiles.
Returns:
The path to the directory containing the installed mobile provisioning
profiles as a string.
"""
return os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
def LoadPlistFile(plist_path):
"""Loads property list file at |plist_path|.
Args:
plist_path: path to the property list file to load.
Returns:
The content of the property list file as a python object.
"""
return plistlib.readPlistFromString(subprocess.check_output([
'xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path]))
class Bundle(object):
"""Wraps a bundle."""
def __init__(self, bundle_path):
"""Initializes the Bundle object with data from bundle Info.plist file."""
self._path = bundle_path
self._data = LoadPlistFile(os.path.join(self._path, 'Info.plist'))
@property
def path(self):
return self._path
@property
def identifier(self):
return self._data['CFBundleIdentifier']
@property
def binary_path(self):
return os.path.join(self._path, self._data['CFBundleExecutable'])
class ProvisioningProfile(object):
"""Wraps a mobile provisioning profile file."""
def __init__(self, provisioning_profile_path):
"""Initializes the ProvisioningProfile with data from profile file."""
self._path = provisioning_profile_path
self._data = plistlib.readPlistFromString(subprocess.check_output([
'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA',
'-i', provisioning_profile_path]))
@property
def path(self):
return self._path
@property
def application_identifier_pattern(self):
return self._data.get('Entitlements', {}).get('application-identifier', '')
@property
def team_identifier(self):
return self._data.get('TeamIdentifier', [''])[0]
@property
def entitlements(self):
return self._data.get('Entitlements', {})
@property
def expiration_date(self):
return self._data.get('ExpirationDate', datetime.datetime.now())
def ValidToSignBundle(self, bundle_identifier):
"""Checks whether the provisioning profile can sign bundle_identifier.
Args:
bundle_identifier: the identifier of the bundle that needs to be signed.
Returns:
True if the mobile provisioning profile can be used to sign a bundle
with the corresponding bundle_identifier, False otherwise.
"""
return fnmatch.fnmatch(
'%s.%s' % (self.team_identifier, bundle_identifier),
self.application_identifier_pattern)
def Install(self, installation_path):
"""Copies mobile provisioning profile info to |installation_path|."""
shutil.copy2(self.path, installation_path)
class Entitlements(object):
"""Wraps an Entitlement plist file."""
def __init__(self, entitlements_path):
"""Initializes Entitlements object from entitlement file."""
self._path = entitlements_path
self._data = LoadPlistFile(self._path)
@property
def path(self):
return self._path
def ExpandVariables(self, substitutions):
self._data = self._ExpandVariables(self._data, substitutions)
def _ExpandVariables(self, data, substitutions):
if isinstance(data, str):
for key, substitution in substitutions.iteritems():
data = data.replace('$(%s)' % (key,), substitution)
return data
if isinstance(data, dict):
for key, value in data.iteritems():
data[key] = self._ExpandVariables(value, substitutions)
return data
if isinstance(data, list):
for i, value in enumerate(data):
data[i] = self._ExpandVariables(value, substitutions)
return data
def LoadDefaults(self, defaults):
for key, value in defaults.iteritems():
if key not in self._data:
self._data[key] = value
def WriteTo(self, target_path):
plistlib.writePlist(self._data, target_path)
def FindProvisioningProfile(bundle_identifier, required):
"""Finds mobile provisioning profile to use to sign bundle.
Args:
bundle_identifier: the identifier of the bundle to sign.
Returns:
The ProvisioningProfile object that can be used to sign the Bundle
object or None if no matching provisioning profile was found.
"""
provisioning_profile_paths = glob.glob(
os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision'))
# Iterate over all installed mobile provisioning profiles and filter those
# that can be used to sign the bundle, ignoring expired ones.
now = datetime.datetime.now()
valid_provisioning_profiles = []
one_hour = datetime.timedelta(0, 3600)
for provisioning_profile_path in provisioning_profile_paths:
provisioning_profile = ProvisioningProfile(provisioning_profile_path)
if provisioning_profile.expiration_date - now < one_hour:
sys.stderr.write(
'Warning: ignoring expired provisioning profile: %s.\n' %
provisioning_profile_path)
continue
if provisioning_profile.ValidToSignBundle(bundle_identifier):
valid_provisioning_profiles.append(provisioning_profile)
if not valid_provisioning_profiles:
if required:
sys.stderr.write(
'Error: no mobile provisioning profile found for "%s".\n' %
bundle_identifier)
sys.exit(1)
return None
# Select the most specific mobile provisioning profile, i.e. the one with
# the longest application identifier pattern (prefer the one with the latest
# expiration date as a secondary criteria).
selected_provisioning_profile = max(
valid_provisioning_profiles,
key=lambda p: (len(p.application_identifier_pattern), p.expiration_date))
one_week = datetime.timedelta(7)
if selected_provisioning_profile.expiration_date - now < 2 * one_week:
sys.stderr.write(
'Warning: selected provisioning profile will expire soon: %s' %
selected_provisioning_profile.path)
return selected_provisioning_profile
def CodeSignBundle(bundle_path, identity, extra_args):
process = subprocess.Popen(['xcrun', 'codesign', '--force', '--sign',
identity, '--timestamp=none'] + list(extra_args) + [bundle_path],
stderr=subprocess.PIPE)
_, stderr = process.communicate()
if process.returncode:
sys.stderr.write(stderr)
sys.exit(process.returncode)
for line in stderr.splitlines():
if line.endswith(': replacing existing signature'):
# Ignore warning about replacing existing signature as this should only
# happen when re-signing system frameworks (and then it is expected).
continue
sys.stderr.write(line)
sys.stderr.write('\n')
def InstallSystemFramework(framework_path, bundle_path, args):
"""Install framework from |framework_path| to |bundle| and code-re-sign it."""
installed_framework_path = os.path.join(
bundle_path, 'Frameworks', os.path.basename(framework_path))
if os.path.exists(installed_framework_path):
shutil.rmtree(installed_framework_path)
shutil.copytree(framework_path, installed_framework_path)
CodeSignBundle(installed_framework_path, args.identity,
['--deep', '--preserve-metadata=identifier,entitlements'])
def GenerateEntitlements(path, provisioning_profile, bundle_identifier):
"""Generates an entitlements file.
Args:
path: path to the entitlements template file
provisioning_profile: ProvisioningProfile object to use, may be None
bundle_identifier: identifier of the bundle to sign.
"""
entitlements = Entitlements(path)
if provisioning_profile:
entitlements.LoadDefaults(provisioning_profile.entitlements)
app_identifier_prefix = provisioning_profile.team_identifier + '.'
else:
app_identifier_prefix = '*.'
entitlements.ExpandVariables({
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
})
return entitlements
class Action(object):
"""Class implementing one action supported by the script."""
@classmethod
def Register(cls, subparsers):
parser = subparsers.add_parser(cls.name, help=cls.help)
parser.set_defaults(func=cls._Execute)
cls._Register(parser)
class CodeSignBundleAction(Action):
"""Class implementing the code-sign-bundle action."""
name = 'code-sign-bundle'
help = 'perform code signature for a bundle'
@staticmethod
def _Register(parser):
parser.add_argument(
'--entitlements', '-e', dest='entitlements_path',
help='path to the entitlements file to use')
parser.add_argument(
'path', help='path to the iOS bundle to codesign')
parser.add_argument(
'--identity', '-i', required=True,
help='identity to use to codesign')
parser.add_argument(
'--binary', '-b', required=True,
help='path to the iOS bundle binary')
parser.add_argument(
'--framework', '-F', action='append', default=[], dest='frameworks',
help='install and resign system framework')
parser.add_argument(
'--disable-code-signature', action='store_true', dest='no_signature',
help='disable code signature')
parser.add_argument(
'--platform', '-t', required=True,
help='platform the signed bundle is targetting')
parser.set_defaults(no_signature=False)
@staticmethod
def _Execute(args):
if not args.identity:
args.identity = '-'
bundle = Bundle(args.path)
# Delete existing embedded mobile provisioning.
embedded_provisioning_profile = os.path.join(
bundle.path, 'embedded.mobileprovision')
if os.path.isfile(embedded_provisioning_profile):
os.unlink(embedded_provisioning_profile)
# Delete existing code signature.
signature_file = os.path.join(args.path, '_CodeSignature', 'CodeResources')
if os.path.isfile(signature_file):
shutil.rmtree(os.path.dirname(signature_file))
# Install system frameworks if requested.
for framework_path in args.frameworks:
InstallSystemFramework(framework_path, args.path, args)
# Copy main binary into bundle.
if os.path.isfile(bundle.binary_path):
os.unlink(bundle.binary_path)
shutil.copy(args.binary, bundle.binary_path)
if args.no_signature:
return
codesign_extra_args = []
# Find mobile provisioning profile and embeds it into the bundle (if a code
# signing identify has been provided, fails if no valid mobile provisioning
# is found).
provisioning_profile_required = args.identity != '-'
provisioning_profile = FindProvisioningProfile(
bundle.identifier, provisioning_profile_required)
if provisioning_profile and args.platform != 'iphonesimulator':
provisioning_profile.Install(embedded_provisioning_profile)
temporary_entitlements_file = tempfile.NamedTemporaryFile(suffix='.xcent')
codesign_extra_args.extend(
['--entitlements', temporary_entitlements_file.name])
entitlements = GenerateEntitlements(
args.entitlements_path, provisioning_profile, bundle.identifier)
entitlements.WriteTo(temporary_entitlements_file.name)
CodeSignBundle(bundle.path, args.identity, codesign_extra_args)
class CodeSignFileAction(Action):
"""Class implementing code signature for a single file."""
name = 'code-sign-file'
help = 'code-sign a single file'
@staticmethod
def _Register(parser):
parser.add_argument(
'path', help='path to the file to codesign')
parser.add_argument(
'--identity', '-i', required=True,
help='identity to use to codesign')
parser.add_argument(
'--output', '-o',
help='if specified copy the file to that location before signing it')
parser.set_defaults(sign=True)
@staticmethod
def _Execute(args):
if not args.identity:
args.identity = '-'
install_path = args.path
if args.output:
if os.path.isfile(args.output):
os.unlink(args.output)
elif os.path.isdir(args.output):
shutil.rmtree(args.output)
if os.path.isfile(args.path):
shutil.copy(args.path, args.output)
elif os.path.isdir(args.path):
shutil.copytree(args.path, args.output)
install_path = args.output
CodeSignBundle(install_path, args.identity,
['--deep', '--preserve-metadata=identifier,entitlements'])
class GenerateEntitlementsAction(Action):
"""Class implementing the generate-entitlements action."""
name = 'generate-entitlements'
help = 'generate entitlements file'
@staticmethod
def _Register(parser):
parser.add_argument(
'--entitlements', '-e', dest='entitlements_path',
help='path to the entitlements file to use')
parser.add_argument(
'path', help='path to the entitlements file to generate')
parser.add_argument(
'--info-plist', '-p', required=True,
help='path to the bundle Info.plist')
@staticmethod
def _Execute(args):
info_plist = LoadPlistFile(args.info_plist)
bundle_identifier = info_plist['CFBundleIdentifier']
provisioning_profile = FindProvisioningProfile(bundle_identifier, False)
entitlements = GenerateEntitlements(
args.entitlements_path, provisioning_profile, bundle_identifier)
entitlements.WriteTo(args.path)
def Main():
parser = argparse.ArgumentParser('codesign iOS bundles')
parser.add_argument('--developer_dir', required=False,
help='Path to Xcode.')
subparsers = parser.add_subparsers()
actions = [
CodeSignBundleAction,
CodeSignFileAction,
GenerateEntitlementsAction,
]
for action in actions:
action.Register(subparsers)
args = parser.parse_args()
if args.developer_dir:
os.environ['DEVELOPER_DIR'] = args.developer_dir
args.func(args)
if __name__ == '__main__':
sys.exit(Main())

Просмотреть файл

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>application-identifier</key>
<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
<key>keychain-access-groups</key>
<array>
<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
</array>
</dict>
</plist>

Просмотреть файл

@ -0,0 +1,47 @@
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import subprocess
import sys
import re
def ListIdentities():
return subprocess.check_output([
'xcrun',
'security',
'find-identity',
'-v',
'-p',
'codesigning',
])
def FindValidIdentity(identity_description):
lines = list(map(str.strip, ListIdentities().splitlines()))
# Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
exp = re.compile('[0-9]+\) ([A-F0-9]+) "([^"]*)"')
for line in lines:
res = exp.match(line)
if res is None:
continue
if identity_description in res.group(2):
yield res.group(1)
if __name__ == '__main__':
parser = argparse.ArgumentParser('codesign iOS bundles')
parser.add_argument(
'--developer_dir', required=False,
help='Path to Xcode.')
parser.add_argument(
'--identity-description', required=True,
help='Text description used to select the code signing identity.')
args = parser.parse_args()
if args.developer_dir:
os.environ['DEVELOPER_DIR'] = args.developer_dir
for identity in FindValidIdentity(args.identity_description):
print identity

Просмотреть файл

@ -0,0 +1,168 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/toolchain/toolchain.gni")
declare_args() {
# SDK path to use. When empty this will use the default SDK based on the
# value of use_ios_simulator.
ios_sdk_path = ""
ios_sdk_name = ""
ios_sdk_version = ""
ios_sdk_platform = ""
ios_sdk_platform_path = ""
xcode_version = ""
xcode_build = ""
machine_os_build = ""
# Version of iOS that we're targeting.
ios_deployment_target = "9.0"
# The iOS Code signing identity to use
# TODO(GYP), TODO(sdfresne): Consider having a separate
# ios_enable_code_signing_flag=<bool> flag to make the invocation clearer.
ios_enable_code_signing = true
ios_code_signing_identity = ""
ios_code_signing_identity_description = "iPhone Developer"
# Prefix for CFBundleIdentifier property of iOS bundles (correspond to the
# "Organization Identifier" in Xcode). Code signing will fail if no mobile
# provisioning for the selected code signing identify support that prefix.
ios_app_bundle_id_prefix = "org.chromium"
# If true, then allow using Xcode to automatically manage certificates. This
# requires loading a separate Xcode project and enable automatically managed
# certificates. When true, all test application will use the same bundle id
# to avoid running out of certificates if using a free account.
ios_automatically_manage_certs = true
# Enabling this option makes clang compile for profiling to gather code
# coverage metrics.
ios_enable_coverage = false
# If non-empty, this list must contain valid cpu architecture, and the final
# build will be a multi-architecture build (aka fat build) supporting the
# main $target_cpu architecture and all of $additional_target_cpus.
#
# For example to build an application that will run on both arm64 and armv7
# devices, you would use the following in args.gn file when running "gn args":
#
# target_os = "ios"
# target_cpu = "arm64"
# additional_target_cpus = [ "arm" ]
#
# You can also pass the value via "--args" parameter for "gn gen" command by
# using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
additional_target_cpus = []
}
assert(custom_toolchain == "" || additional_target_cpus == [],
"cannot define both custom_toolchain and additional_target_cpus")
use_ios_simulator = current_cpu == "x86" || current_cpu == "x64"
ios_generic_test_bundle_id_suffix = "generic-unit-test"
# Initialize additional_toolchains from additional_target_cpus. Assert here
# that the list does not contains $target_cpu nor duplicates as this would
# cause weird errors during the build.
additional_toolchains = []
if (additional_target_cpus != []) {
foreach(_additional_target_cpu, additional_target_cpus) {
assert(_additional_target_cpu != target_cpu,
"target_cpu must not be listed in additional_target_cpus")
_toolchain = "//build/toolchain/mac:ios_clang_$_additional_target_cpu"
foreach(_additional_toolchain, additional_toolchains) {
assert(_toolchain != _additional_toolchain,
"additional_target_cpus must not contains duplicate values")
}
additional_toolchains += [ _toolchain ]
}
}
if (ios_sdk_path == "") {
# Compute default target.
if (use_ios_simulator) {
ios_sdk_name = "iphonesimulator"
ios_sdk_platform = "iPhoneSimulator"
} else {
ios_sdk_name = "iphoneos"
ios_sdk_platform = "iPhoneOS"
}
ios_sdk_info_args = []
if (!use_system_xcode) {
ios_sdk_info_args += [
"--developer_dir",
hermetic_xcode_path,
]
}
ios_sdk_info_args += [ ios_sdk_name ]
script_name = "//build/config/mac/sdk_info.py"
_ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope")
ios_sdk_path = _ios_sdk_result.sdk_path
ios_sdk_version = _ios_sdk_result.sdk_version
ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path
ios_sdk_build = _ios_sdk_result.sdk_build
xcode_version = _ios_sdk_result.xcode_version
xcode_build = _ios_sdk_result.xcode_build
machine_os_build = _ios_sdk_result.machine_os_build
if (use_ios_simulator) {
# This is weird, but Xcode sets DTPlatformBuild to an empty field for
# simulator builds.
ios_platform_build = ""
} else {
ios_platform_build = ios_sdk_build
}
}
if (ios_enable_code_signing && !use_ios_simulator) {
find_signing_identity_args = [
"--identity-description",
ios_code_signing_identity_description,
]
if (!use_system_xcode) {
find_signing_identity_args += [
"--developer_dir",
hermetic_xcode_path,
]
}
# If an identity is not provided, look for one on the host
if (ios_code_signing_identity == "") {
_ios_identities = exec_script("find_signing_identity.py",
find_signing_identity_args,
"list lines")
if (_ios_identities == []) {
print("Tried to prepare a device build without specifying a code signing")
print("identity and could not detect one automatically either.")
print("TIP: Simulator builds don't require code signing...")
assert(false)
} else {
_ios_identities_len = 0
foreach(_, _ios_identities) {
_ios_identities_len += 1
}
ios_code_signing_identity = _ios_identities[0]
if (_ios_identities_len != 1) {
print("Warning: Multiple codesigning identities match " +
"\"$ios_code_signing_identity_description\"")
foreach(_ios_identity, _ios_identities) {
_selected = ""
if (ios_code_signing_identity == _ios_identity) {
_selected = " (selected)"
}
print("Warning: - $_ios_identity$_selected")
}
print("Warning: Please use either ios_code_signing_identity or ")
print("Warning: ios_code_signing_identity_description variable to ")
print("Warning: control which identity is selected.")
print()
}
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,97 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import struct
import sys
def Main(args):
if len(args) < 4:
print >> sys.stderr, "Usage: %s output.hmap Foo.framework header1.h..." %\
(args[0])
return 1
(out, framework, all_headers) = args[1], args[2], args[3:]
framework_name = os.path.basename(framework).split('.')[0]
all_headers = map(os.path.abspath, all_headers)
filelist = {}
for header in all_headers:
filename = os.path.basename(header)
filelist[filename] = header
filelist[os.path.join(framework_name, filename)] = header
WriteHmap(out, filelist)
return 0
def NextGreaterPowerOf2(x):
return 2**(x).bit_length()
def WriteHmap(output_name, filelist):
"""Generates a header map based on |filelist|.
Per Mark Mentovai:
A header map is structured essentially as a hash table, keyed by names used
in #includes, and providing pathnames to the actual files.
The implementation below and the comment above comes from inspecting:
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
while also looking at the implementation in clang in:
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
"""
magic = 1751998832
version = 1
_reserved = 0
count = len(filelist)
capacity = NextGreaterPowerOf2(count)
strings_offset = 24 + (12 * capacity)
max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
out = open(output_name, 'wb')
out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
count, capacity, max_value_length))
# Create empty hashmap buckets.
buckets = [None] * capacity
for file, path in filelist.items():
key = 0
for c in file:
key += ord(c.lower()) * 13
# Fill next empty bucket.
while buckets[key & capacity - 1] is not None:
key = key + 1
buckets[key & capacity - 1] = (file, path)
next_offset = 1
for bucket in buckets:
if bucket is None:
out.write(struct.pack('<LLL', 0, 0, 0))
else:
(file, path) = bucket
key_offset = next_offset
prefix_offset = key_offset + len(file) + 1
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
next_offset = suffix_offset + len(os.path.basename(path)) + 1
out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
# Pad byte since next offset starts at 1.
out.write(struct.pack('<x'))
for bucket in buckets:
if bucket is not None:
(file, path) = bucket
out.write(struct.pack('<%ds' % len(file), file))
out.write(struct.pack('<s', '\0'))
base = os.path.dirname(path) + os.sep
out.write(struct.pack('<%ds' % len(base), base))
out.write(struct.pack('<s', '\0'))
path = os.path.basename(path)
out.write(struct.pack('<%ds' % len(path), path))
out.write(struct.pack('<s', '\0'))
if __name__ == '__main__':
sys.exit(Main(sys.argv))

Просмотреть файл

@ -0,0 +1,26 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
def Main(framework):
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
module_path = os.path.join(framework, 'Modules');
if not os.path.exists(module_path):
os.mkdir(module_path)
module_template = 'framework module %s {\n' \
' umbrella header "%s.h"\n' \
'\n' \
' export *\n' \
' module * { export * }\n' \
'}\n' % (binary, binary)
module_file = open(os.path.join(module_path, 'module.modulemap'), 'w')
module_file.write(module_template)
module_file.close()
if __name__ == '__main__':
Main(sys.argv[1])

Просмотреть файл

@ -0,0 +1,19 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import <UIKit/UIKit.h>
#import <XCTest/XCTest.h>
// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all
// our build configurations (Debug, Release, ...). In addition, the symbols
// visibility is configured to private by default. To simplify testing with
// those constraints, our tests are compiled in the TEST_HOST target instead
// of the .xctest bundle that all link against this single test (just there to
// ensure that the bundle is not empty).
@interface XCTestShellEmptyClass : NSObject
@end
@implementation XCTestShellEmptyClass
@end

Просмотреть файл

@ -0,0 +1,95 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
import("//build/config/ui.gni")
group("linux") {
visibility = [ "//:optimize_gn_gen" ]
}
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic that is
# Linux-only. This is not applied to Android, but is applied to ChromeOS.
config("compiler") {
}
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is Linux-only. Please see that target for advice on what should go in
# :runtime_library vs. :compiler.
config("runtime_library") {
# Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
# OS_LINUX and the like.
if (is_chromeos) {
defines = [ "OS_CHROMEOS" ]
}
}
config("x11") {
libs = [
"X11",
"X11-xcb",
"xcb",
"Xcomposite",
"Xcursor",
"Xdamage",
"Xext",
"Xfixes",
"Xi",
"Xrender",
"Xtst",
]
}
config("xcomposite") {
libs = [ "Xcomposite" ]
}
config("xext") {
libs = [ "Xext" ]
}
config("xrandr") {
libs = [ "Xrandr" ]
}
config("xscrnsaver") {
libs = [ "Xss" ]
}
config("xfixes") {
libs = [ "Xfixes" ]
}
config("libcap") {
libs = [ "cap" ]
}
config("xi") {
libs = [ "Xi" ]
}
config("xtst") {
libs = [ "Xtst" ]
}
config("libresolv") {
libs = [ "resolv" ]
}
if (use_glib) {
pkg_config("glib") {
packages = [
"glib-2.0",
"gmodule-2.0",
"gobject-2.0",
"gthread-2.0",
]
defines = [
"GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_32",
"GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26",
]
}
}

Просмотреть файл

@ -0,0 +1,47 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
import("//build/config/ui.gni")
# CrOS doesn't install GTK, gconf or any gnome packages.
assert(!is_chromeos)
# These packages should _only_ be expected when building for a target.
# If these extra checks are not run, gconf is required when building host
# tools for a CrOS build.
assert(current_toolchain == default_toolchain)
if (use_atk) {
assert(use_glib, "use_atk=true requires that use_glib=true")
}
pkg_config("atk_base") {
packages = [ "atk" ]
atk_lib_dir = exec_script(pkg_config_script,
pkg_config_args + [
"--libdir",
"atk",
],
"string")
defines = [ "ATK_LIB_DIR=\"$atk_lib_dir\"" ]
}
# gn orders flags on a target before flags from configs. The default config
# adds -Wall, and these flags have to be after -Wall -- so they need to
# come from a config and can't be on the target directly.
config("atk") {
configs = [ ":atk_base" ]
cflags = [
# glib uses the pre-c++11 typedef-as-static_assert hack.
"-Wno-unused-local-typedef",
# G_DEFINE_TYPE automatically generates a *get_instance_private
# inline function after glib 2.37. That's unused. Prevent to
# complain about it.
"-Wno-unused-function",
]
}

Просмотреть файл

@ -0,0 +1,14 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
assert(use_dbus)
# Note: if your target also depends on //dbus, you don't need to add this
# config (it will get added automatically if you depend on //dbus).
pkg_config("dbus") {
packages = [ "dbus-1" ]
}

Просмотреть файл

@ -0,0 +1,19 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
# CrOS doesn't install GTK, gconf or any gnome packages.
assert(!is_chromeos && use_gconf)
# These packages should _only_ be expected when building for a target.
# If these extra checks are not run, gconf is required when building host
# tools for a CrOS build.
assert(current_toolchain == default_toolchain)
pkg_config("gconf") {
packages = [ "gconf-2.0" ]
defines = [ "USE_GCONF" ]
}

Просмотреть файл

@ -0,0 +1,39 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/gtk/gtk.gni")
import("//build/config/linux/pkg_config.gni")
assert(is_linux, "This file should only be referenced on Linux")
# The target in this file will automatically reference GTK2 or GTK3 depending
# on the state of the build flag. Some builds reference both 2 and 3, and some
# builds reference neither, so both need to be available but in different
# directories so pkg-config is only run when necessary.
# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
# parts that explicitly need GTK are whitelisted on this target.
group("gtk") {
visibility = [
"//chrome/test:interactive_ui_tests",
"//gpu/gles2_conform_support:gles2_conform_test_windowless",
"//remoting/host",
"//remoting/host/linux",
"//remoting/host/it2me:common",
"//remoting/host/it2me:remote_assistance_host",
"//remoting/host:remoting_me2me_host_static",
"//remoting/test:it2me_standalone_host_main",
"//webrtc/examples:peerconnection_client",
]
if (use_gtk3) {
public_deps = [
"//build/config/linux/gtk3",
]
} else {
public_deps = [
"//build/config/linux/gtk2",
]
}
}

Просмотреть файл

@ -0,0 +1,12 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Include this file if you need to know at build time whether we're compiling
# against GTK 2 or 3. But in most cases you can just depend on
# //build/config/linux/gtk and it will switch for you.
declare_args() {
# Whether to compile against GTKv3 instead of GTKv2.
use_gtk3 = true
}

Просмотреть файл

@ -0,0 +1,44 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
assert(is_linux, "This file should only be referenced on Linux")
# Depend on //build/config/linux/gtk2 to use GTKv2. Depend on
# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
#
# GN doesn't check visibility for configs so we give this an obviously internal
# name to discourage random targets from accidentally depending on this and
# bypassing the GTK target's visibility.
pkg_config("gtk2_internal_config") {
# Gtk requires gmodule, but it does not list it as a dependency in some
# misconfigured systems.
packages = [
"gmodule-2.0",
"gtk+-2.0",
"gthread-2.0",
]
}
# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
# parts that explicitly need GTK2 are whitelisted on this target.
group("gtk2") {
visibility = [
"//gpu/gles2_conform_support:gles2_conform_test_windowless",
"//build/config/linux/gtk",
"//chrome/browser/ui/libgtkui:*",
]
public_configs = [ ":gtk2_internal_config" ]
}
# Depend on "gtkprint" to get this.
pkg_config("gtkprint2_internal_config") {
packages = [ "gtk+-unix-print-2.0" ]
}
group("gtkprint2") {
visibility = [ "//chrome/browser/ui/libgtkui:libgtk2ui" ]
public_configs = [ ":gtkprint2_internal_config" ]
}

Просмотреть файл

@ -0,0 +1,43 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
assert(is_linux, "This file should only be referenced on Linux")
# Depend on //build/config/linux/gtk3 to use GTKv3. Depend on
# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
#
# GN doesn't check visibility for configs so we give this an obviously internal
# name to discourage random targets from accidentally depending on this and
# bypassing the GTK target's visibility.
pkg_config("gtk3_internal_config") {
# Gtk requires gmodule, but it does not list it as a dependency in some
# misconfigured systems.
packages = [
"gmodule-2.0",
"gtk+-3.0",
"gthread-2.0",
]
}
# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
# parts that explicitly need GTK3 are whitelisted on this target.
group("gtk3") {
visibility = [
"//build/config/linux/gtk",
"//chrome/browser/ui/libgtkui:*",
]
public_configs = [ ":gtk3_internal_config" ]
}
# Depend on "gtkprint3" to get this.
pkg_config("gtkprint3_internal_config") {
packages = [ "gtk+-unix-print-3.0" ]
}
group("gtkprint3") {
visibility = [ "//chrome/browser/ui/libgtkui:libgtk3ui" ]
public_configs = [ ":gtkprint3_internal_config" ]
}

Просмотреть файл

@ -0,0 +1,9 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
pkg_config("libffi") {
packages = [ "libffi" ]
}

Просмотреть файл

@ -0,0 +1,16 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
pkg_config("pangocairo") {
packages = [ "pangocairo" ]
# We don't want pkgconfig for pangocairo to explicitly request FreeType to get
# linked, because we control which FreeType to link to.
extra_args = [
"-v",
"freetype",
]
}

Просмотреть файл

@ -0,0 +1,219 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import subprocess
import sys
import re
from optparse import OptionParser
# This script runs pkg-config, optionally filtering out some results, and
# returns the result.
#
# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
# where each member is itself a list of strings.
#
# You can filter out matches using "-v <regexp>" where all results from
# pkgconfig matching the given regular expression will be ignored. You can
# specify more than one regular expression my specifying "-v" more than once.
#
# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
# system path to the sysroot used for compiling. This script will attempt to
# generate correct paths for the sysroot.
#
# When using a sysroot, you must also specify the architecture via
# "-a <arch>" where arch is either "x86" or "x64".
#
# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
# depending on whether the systemroot is for a 32 or 64 bit architecture. They
# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this
# variable to this script with the "--system_libdir <system_libdir>" flag. If no
# flag is provided, then pkgconfig files are assumed to come from
# <systemroot>/usr/lib/pkgconfig.
#
# Additionally, you can specify the option --atleast-version. This will skip
# the normal outputting of a dictionary and instead print true or false,
# depending on the return value of pkg-config for the given package.
def SetConfigPath(options):
"""Set the PKG_CONFIG_LIBDIR environment variable.
This takes into account any sysroot and architecture specification from the
options on the given command line.
"""
sysroot = options.sysroot
assert sysroot
# Compute the library path name based on the architecture.
arch = options.arch
if sysroot and not arch:
print "You must specify an architecture via -a if using a sysroot."
sys.exit(1)
libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
libdir += ':' + sysroot + '/usr/share/pkgconfig'
os.environ['PKG_CONFIG_LIBDIR'] = libdir
return libdir
def GetPkgConfigPrefixToStrip(args):
"""Returns the prefix from pkg-config where packages are installed.
This returned prefix is the one that should be stripped from the beginning of
directory names to take into account sysroots.
"""
# Some sysroots, like the Chromium OS ones, may generate paths that are not
# relative to the sysroot. For example,
# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
# paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
# instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
# To support this correctly, it's necessary to extract the prefix to strip
# from pkg-config's |prefix| variable.
prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args,
env=os.environ)
if prefix[-4] == '/usr':
return prefix[4:]
return prefix
def MatchesAnyRegexp(flag, list_of_regexps):
"""Returns true if the first argument matches any regular expression in the
given list."""
for regexp in list_of_regexps:
if regexp.search(flag) != None:
return True
return False
def RewritePath(path, strip_prefix, sysroot):
"""Rewrites a path by stripping the prefix and prepending the sysroot."""
if os.path.isabs(path) and not path.startswith(sysroot):
if path.startswith(strip_prefix):
path = path[len(strip_prefix):]
path = path.lstrip('/')
return os.path.join(sysroot, path)
else:
return path
def main():
# If this is run on non-Linux platforms, just return nothing and indicate
# success. This allows us to "kind of emulate" a Linux build from other
# platforms.
if "linux" not in sys.platform:
print "[[],[],[],[],[]]"
return 0
parser = OptionParser()
parser.add_option('-d', '--debug', action='store_true')
parser.add_option('-p', action='store', dest='pkg_config', type='string',
default='pkg-config')
parser.add_option('-v', action='append', dest='strip_out', type='string')
parser.add_option('-s', action='store', dest='sysroot', type='string')
parser.add_option('-a', action='store', dest='arch', type='string')
parser.add_option('--system_libdir', action='store', dest='system_libdir',
type='string', default='lib')
parser.add_option('--atleast-version', action='store',
dest='atleast_version', type='string')
parser.add_option('--libdir', action='store_true', dest='libdir')
(options, args) = parser.parse_args()
# Make a list of regular expressions to strip out.
strip_out = []
if options.strip_out != None:
for regexp in options.strip_out:
strip_out.append(re.compile(regexp))
if options.sysroot:
libdir = SetConfigPath(options)
if options.debug:
sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
prefix = GetPkgConfigPrefixToStrip(args)
else:
prefix = ''
if options.atleast_version:
# When asking for the return value, just run pkg-config and print the return
# value, no need to do other work.
if not subprocess.call([options.pkg_config,
"--atleast-version=" + options.atleast_version] +
args):
print "true"
else:
print "false"
return 0
if options.libdir:
cmd = [options.pkg_config, "--variable=libdir"] + args
if options.debug:
sys.stderr.write('Running: %s\n' % cmd)
try:
libdir = subprocess.check_output(cmd)
except:
print "Error from pkg-config."
return 1
sys.stdout.write(libdir.strip())
return 0
cmd = [options.pkg_config, "--cflags", "--libs"] + args
if options.debug:
sys.stderr.write('Running: %s\n' % ' '.join(cmd))
try:
flag_string = subprocess.check_output(cmd)
except:
sys.stderr.write('Could not run pkg-config.\n')
return 1
# For now just split on spaces to get the args out. This will break if
# pkgconfig returns quoted things with spaces in them, but that doesn't seem
# to happen in practice.
all_flags = flag_string.strip().split(' ')
sysroot = options.sysroot
if not sysroot:
sysroot = ''
includes = []
cflags = []
libs = []
lib_dirs = []
ldflags = []
for flag in all_flags[:]:
if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
continue;
if flag[:2] == '-l':
libs.append(RewritePath(flag[2:], prefix, sysroot))
elif flag[:2] == '-L':
lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
elif flag[:2] == '-I':
includes.append(RewritePath(flag[2:], prefix, sysroot))
elif flag[:3] == '-Wl':
ldflags.append(flag)
elif flag == '-pthread':
# Many libs specify "-pthread" which we don't need since we always include
# this anyway. Removing it here prevents a bunch of duplicate inclusions
# on the command line.
pass
else:
cflags.append(flag)
# Output a GN array, the first one is the cflags, the second are the libs. The
# JSON formatter prints GN compatible lists when everything is a list of
# strings.
print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
return 0
if __name__ == '__main__':
sys.exit(main())

Просмотреть файл

@ -0,0 +1,117 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/sysroot.gni")
# Defines a config specifying the result of running pkg-config for the given
# packages. Put the package names you want to query in the "packages" variable
# inside the template invocation.
#
# You can also add defines via the "defines" variable. This can be useful to
# add this to the config to pass defines that the library expects to get by
# users of its headers.
#
# Example:
# pkg_config("mything") {
# packages = [ "mything1", "mything2" ]
# defines = [ "ENABLE_AWESOME" ]
# }
#
# You can also use "extra args" to filter out results (see pkg-config.py):
# extra_args = [ "-v, "foo" ]
# To ignore libs and ldflags (only cflags/defines will be set, which is useful
# when doing manual dynamic linking), set:
# ignore_libs = true
declare_args() {
# A pkg-config wrapper to call instead of trying to find and call the right
# pkg-config directly. Wrappers like this are common in cross-compilation
# environments.
# Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
# the sysroot mechanism to find the right .pc files.
pkg_config = ""
# A optional pkg-config wrapper to use for tools built on the host.
host_pkg_config = ""
# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
# depending on whether the systemroot is for a 32 or 64 bit architecture.
#
# When build under GYP, CrOS board builds specify the 'system_libdir' variable
# as part of the GYP_DEFINES provided by the CrOS emerge build or simple
# chrome build scheme. This variable permits controlling this for GN builds
# in similar fashion by setting the `system_libdir` variable in the build's
# args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
system_libdir = "lib"
}
pkg_config_script = "//build/config/linux/pkg-config.py"
# Define the args we pass to the pkg-config script for other build files that
# need to invoke it manually.
if (sysroot != "") {
# Pass the sysroot if we're using one (it requires the CPU arch also).
pkg_config_args = [
"-s",
rebase_path(sysroot),
"-a",
current_cpu,
]
} else if (pkg_config != "") {
pkg_config_args = [
"-p",
pkg_config,
]
} else {
pkg_config_args = []
}
# Only use the custom libdir when building with the target sysroot.
if (target_sysroot != "" && sysroot == target_sysroot) {
pkg_config_args += [
"--system_libdir",
system_libdir,
]
}
if (host_pkg_config != "") {
host_pkg_config_args = [
"-p",
host_pkg_config,
]
} else {
host_pkg_config_args = pkg_config_args
}
template("pkg_config") {
assert(defined(invoker.packages),
"Variable |packages| must be defined to be a list in pkg_config.")
config(target_name) {
if (host_toolchain == current_toolchain) {
args = host_pkg_config_args + invoker.packages
} else {
args = pkg_config_args + invoker.packages
}
if (defined(invoker.extra_args)) {
args += invoker.extra_args
}
pkgresult = exec_script(pkg_config_script, args, "value")
include_dirs = pkgresult[0]
cflags = pkgresult[1]
if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
libs = pkgresult[2]
lib_dirs = pkgresult[3]
ldflags = pkgresult[4]
}
forward_variables_from(invoker,
[
"defines",
"visibility",
])
}
}

Просмотреть файл

@ -0,0 +1,187 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Android doesn't ship all locales in order to save space (but webview does).
# http://crbug.com/369218
if (is_android) {
android_chrome_omitted_locales = [
"bn",
"et",
"gu",
"kn",
"ml",
"mr",
"ms",
"ta",
"te",
]
}
# Chrome on iOS only ships with a subset of the locales supported by other
# version of Chrome as the corresponding locales are not supported by the
# operating system (but for simplicity, the corresponding .pak files are
# still generated).
if (is_ios) {
ios_unsupported_locales = [
"am",
"bn",
"et",
"fil",
"gu",
"kn",
"lv",
"ml",
"mr",
"sl",
"sw",
"ta",
"te",
]
}
# Note: keep in sync with below.
locales = [
"am",
"ar",
"bg",
"bn",
"ca",
"cs",
"da",
"de",
"el",
"en-GB",
"en-US",
"es",
"et",
"fa",
"fi",
"fil",
"fr",
"gu",
"he",
"hi",
"hr",
"hu",
"id",
"it",
"ja",
"kn",
"ko",
"lt",
"lv",
"ml",
"mr",
"ms",
"nb",
"nl",
"pl",
"pt-PT",
"ro",
"ru",
"sk",
"sl",
"sr",
"sv",
"sw",
"ta",
"te",
"th",
"tr",
"uk",
"vi",
"zh-CN",
"zh-TW",
]
# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
# respectively "es-MX" and "pt" on iOS).
if (!is_ios) {
locales += [
"es-419",
"pt-BR",
]
} else {
locales += [
"es-MX",
"pt",
]
ios_packed_locales = locales - ios_unsupported_locales
}
locales_with_fake_bidi = locales + [ "fake-bidi" ]
# Same as the locales list but in the format Mac expects for output files:
# it uses underscores instead of hyphens, and "en" instead of "en-US".
locales_as_mac_outputs = [
"am",
"ar",
"bg",
"bn",
"ca",
"cs",
"da",
"de",
"el",
"en_GB",
"en",
"es",
"et",
"fa",
"fi",
"fil",
"fr",
"gu",
"he",
"hi",
"hr",
"hu",
"id",
"it",
"ja",
"kn",
"ko",
"lt",
"lv",
"ml",
"mr",
"ms",
"nb",
"nl",
"pl",
"pt_PT",
"ro",
"ru",
"sk",
"sl",
"sr",
"sv",
"sw",
"ta",
"te",
"th",
"tr",
"uk",
"vi",
"zh_CN",
"zh_TW",
]
# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
# respectively "es-MX" and "pt" on iOS).
if (!is_ios) {
locales_as_mac_outputs += [
"es_419",
"pt_BR",
]
} else {
locales_as_mac_outputs += [
"es_MX",
"pt",
]
ios_packed_locales_as_mac_outputs =
locales_as_mac_outputs - ios_unsupported_locales
}

Просмотреть файл

@ -0,0 +1,104 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/sysroot.gni")
import("//build/config/mac/mac_sdk.gni")
import("//build/config/mac/symbols.gni")
# This is included by reference in the //build/config/compiler config that
# is applied to all targets. It is here to separate out the logic.
config("compiler") {
# These flags are shared between the C compiler and linker.
common_mac_flags = []
# CPU architecture.
if (current_cpu == "x64") {
common_mac_flags += [
"-arch",
"x86_64",
]
} else if (current_cpu == "x86") {
common_mac_flags += [
"-arch",
"i386",
]
}
# This is here so that all files get recompiled after an Xcode update.
# (defines are passed via the command line, and build system rebuild things
# when their commandline changes). Nothing should ever read this define.
defines = [ "CR_XCODE_VERSION=$xcode_version" ]
asmflags = common_mac_flags
cflags = common_mac_flags
# Without this, the constructors and destructors of a C++ object inside
# an Objective C struct won't be called, which is very bad.
cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
cflags_c = [ "-std=c99" ]
cflags_objc = cflags_c
ldflags = common_mac_flags
if (save_unstripped_output) {
ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
}
}
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is Mac-only. Please see that target for advice on what should go in
# :runtime_library vs. :compiler.
config("runtime_library") {
common_flags = [
"-isysroot",
rebase_path(sysroot, root_build_dir),
"-mmacosx-version-min=$mac_deployment_target",
]
asmflags = common_flags
cflags = common_flags
ldflags = common_flags
# Prevent Mac OS X AssertMacros.h (included by system header) from defining
# macros that collide with common names, like 'check', 'require', and
# 'verify'.
# http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ]
}
# On Mac, this is used for everything except static libraries.
config("mac_dynamic_flags") {
ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and classes.
if (is_component_build) {
ldflags += [
# Path for loading shared libraries for unbundled binaries.
"-Wl,-rpath,@loader_path/.",
# Path for loading shared libraries for bundled binaries. Get back from
# Binary.app/Contents/MacOS.
"-Wl,-rpath,@loader_path/../../..",
]
}
}
# On Mac, this is used only for executables.
config("mac_executable_flags") {
# Remove this when targeting >=10.7 since it is the default in that config.
ldflags = [ "-Wl,-pie" ] # Position independent.
}
# The ldflags referenced below are handled by
# //build/toolchain/mac/linker_driver.py.
# Remove this config if a target wishes to change the arguments passed to the
# strip command during linking. This config by default strips all symbols
# from a binary, but some targets may wish to specify a saves file to preserve
# specific symbols.
config("strip_all") {
if (enable_stripping) {
ldflags = [ "-Wcrl,strip,-x,-S" ]
}
}

Просмотреть файл

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>BuildMachineOSBuild</key>
<string>${BUILD_MACHINE_OS_BUILD}</string>
<key>DTCompiler</key>
<string>${GCC_VERSION}</string>
<key>DTSDKBuild</key>
<string>${MAC_SDK_BUILD}</string>
<key>DTSDKName</key>
<string>${MAC_SDK_NAME}</string>
<key>DTXcode</key>
<string>${XCODE_VERSION}</string>
<key>DTXcodeBuild</key>
<string>${XCODE_BUILD}</string>
<key>CFBundleShortVersionString</key>
<string>${VERSION}</string>
<key>CFBundleVersion</key>
<string>${VERSION_BUILD}</string>
<key>CFBundleIdentifier</key>
<string>org.chromium.${PRODUCT_NAME:rfc1034identifier}</string>
<key>SCM_REVISION</key>
<string>${COMMIT_HASH}</string>
</dict>
</plist>

Просмотреть файл

@ -0,0 +1,4 @@
rsesek@chromium.org
sdefresne@chromium.org
# COMPONENT: Build

Просмотреть файл

@ -0,0 +1,253 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file contains rules that are shared between Mac and iOS.
import("//build/toolchain/toolchain.gni")
import("//build/config/mac/symbols.gni")
if (is_mac) {
import("//build/config/mac/mac_sdk.gni")
} else if (is_ios) {
import("//build/config/ios/ios_sdk.gni")
}
# Convert plist file to given format.
#
# Arguments
#
# source:
# string, path to the plist file to convert
#
# output:
# string, path to the converted plist, must be under $root_build_dir
#
# format:
# string, the format to `plutil -convert` the plist to.
template("convert_plist") {
assert(defined(invoker.source), "source must be defined for $target_name")
assert(defined(invoker.output), "output must be defined for $target_name")
assert(defined(invoker.format), "format must be defined for $target_name")
action(target_name) {
forward_variables_from(invoker,
[
"visibility",
"testonly",
"deps",
])
script = "//build/config/mac/xcrun.py"
sources = [
invoker.source,
]
outputs = [
invoker.output,
]
args = []
if (!use_system_xcode) {
args += [
"--developer_dir",
hermetic_xcode_path,
]
}
args += [
"plutil",
"-convert",
invoker.format,
"-o",
rebase_path(invoker.output, root_build_dir),
rebase_path(invoker.source, root_build_dir),
]
}
}
# Template to merge multiple plist files and perform variable substitutions.
#
# Arguments
#
# plist_templates:
# string array, paths to plist files which will be used for the bundle.
#
# format:
# string, the format to `plutil -convert` the plist to when
# generating the output.
#
# substitutions:
# string array, 'key=value' pairs used to replace ${key} by value
# when generating the output plist file.
#
# output_name:
# string, name of the generated plist file.
template("compile_plist") {
assert(defined(invoker.plist_templates),
"A list of template plist files must be specified for $target_name")
assert(defined(invoker.format),
"The plist format must be specified for $target_name")
assert(defined(invoker.substitutions),
"A list of key=value pairs must be specified for $target_name")
assert(defined(invoker.output_name),
"The name of the output file must be specified for $target_name")
_output_name = invoker.output_name
_merged_name = get_path_info(_output_name, "dir") + "/" +
get_path_info(_output_name, "name") + "_merged." +
get_path_info(_output_name, "extension")
_merge_target = target_name + "_merge"
action(_merge_target) {
forward_variables_from(invoker,
[
"deps",
"testonly",
])
script = "//build/config/mac/plist_util.py"
sources = invoker.plist_templates
outputs = [
_merged_name,
]
args = [
"merge",
"-f=" + invoker.format,
"-o=" + rebase_path(_merged_name, root_build_dir),
] + rebase_path(invoker.plist_templates, root_build_dir)
}
action(target_name) {
forward_variables_from(invoker,
[
"testonly",
"visibility",
])
script = "//build/config/mac/plist_util.py"
sources = [
_merged_name,
]
outputs = [
_output_name,
]
args = [
"substitute",
"-f=" + invoker.format,
"-o=" + rebase_path(_output_name, root_build_dir),
"-t=" + rebase_path(_merged_name, root_build_dir),
]
foreach(_substitution, invoker.substitutions) {
args += [ "-s=$_substitution" ]
}
deps = [
":$_merge_target",
]
}
}
# The base template used to generate Info.plist files for iOS and Mac apps and
# frameworks.
#
# Arguments
#
# plist_templates:
# string array, paths to plist files which will be used for the bundle.
#
# executable_name:
# string, name of the generated target used for the product
# and executable name as specified in the output Info.plist.
#
# format:
# string, the format to `plutil -convert` the plist to when
# generating the output.
#
# extra_substitutions:
# (optional) string array, 'key=value' pairs for extra fields which are
# specified in a source Info.plist template.
#
# output_name:
# (optional) string, name of the generated plist file, default to
# "$target_gen_dir/$target_name.plist".
template("info_plist") {
assert(defined(invoker.executable_name),
"The executable_name must be specified for $target_name")
executable_name = invoker.executable_name
compile_plist(target_name) {
forward_variables_from(invoker,
[
"plist_templates",
"testonly",
"deps",
"visibility",
"format",
])
if (defined(invoker.output_name)) {
output_name = invoker.output_name
} else {
output_name = "$target_gen_dir/$target_name.plist"
}
substitutions = [
"BUILD_MACHINE_OS_BUILD=$machine_os_build",
"EXECUTABLE_NAME=$executable_name",
"GCC_VERSION=com.apple.compilers.llvm.clang.1_0",
"PRODUCT_NAME=$executable_name",
"XCODE_BUILD=$xcode_build",
"XCODE_VERSION=$xcode_version",
]
if (defined(invoker.extra_substitutions)) {
substitutions += invoker.extra_substitutions
}
}
}
# Template to compile .xib and .storyboard files.
#
# Arguments
#
# sources:
# list of string, sources to compile
#
# ibtool_flags:
# (optional) list of string, additional flags to pass to the ibtool
template("compile_ib_files") {
action_foreach(target_name) {
forward_variables_from(invoker,
[
"testonly",
"visibility",
])
assert(defined(invoker.sources),
"sources must be specified for $target_name")
assert(defined(invoker.output_extension),
"output_extension must be specified for $target_name")
ibtool_flags = []
if (defined(invoker.ibtool_flags)) {
ibtool_flags = invoker.ibtool_flags
}
_output_extension = invoker.output_extension
script = "//build/config/mac/compile_ib_files.py"
sources = invoker.sources
outputs = [
"$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
]
args = [
"--input",
"{{source}}",
"--output",
rebase_path(
"$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension"),
]
if (!use_system_xcode) {
args += [
"--developer_dir",
hermetic_xcode_path,
]
}
args += ibtool_flags
}
}

Просмотреть файл

@ -0,0 +1,57 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import re
import subprocess
import sys
def main():
parser = argparse.ArgumentParser(
description='A script to compile xib and storyboard.',
fromfile_prefix_chars='@')
parser.add_argument('-o', '--output', required=True,
help='Path to output bundle.')
parser.add_argument('-i', '--input', required=True,
help='Path to input xib or storyboard.')
parser.add_argument('--developer_dir', required=False,
help='Path to Xcode.')
args, unknown_args = parser.parse_known_args()
if args.developer_dir:
os.environ['DEVELOPER_DIR'] = args.developer_dir
ibtool_args = [
'xcrun', 'ibtool',
'--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text'
]
ibtool_args += unknown_args
ibtool_args += [
'--compile',
os.path.abspath(args.output),
os.path.abspath(args.input)
]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(ibtool_args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
if __name__ == '__main__':
sys.exit(main())

Просмотреть файл

@ -0,0 +1,105 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/chrome_build.gni")
import("//build/toolchain/toolchain.gni")
# See https://bugs.chromium.org/p/webrtc/issues/detail?id=5453.
# We can drop the rtc_require_mac_10_7_deployment flag when Chromium
# also requires a 10.7 deployment target.
import("//build_overrides/build.gni")
declare_args() {
# Minimum supported version of the Mac SDK.
mac_sdk_min = mac_sdk_min_build_override
# Minimum supported version of OSX.
mac_deployment_target = "10.9"
# Path to a specific version of the Mac SDK, not including a slash at the end.
# If empty, the path to the lowest version greater than or equal to
# mac_sdk_min is used.
mac_sdk_path = ""
# The SDK name as accepted by xcodebuild.
mac_sdk_name = "macosx"
}
# Check that the version of macOS SDK used is the one requested when building
# a version of Chrome shipped to the users. Disable the check if building for
# iOS as the version macOS SDK used is not relevant for the tool build for the
# host (they are not shipped) --- this is required as Chrome on iOS is usually
# build with the latest version of Xcode that may not ship with the version of
# the macOS SDK used to build Chrome on mac.
# TODO(crbug.com/635745): the check for target_os should be replaced by a
# check that current_toolchain is default_toolchain, and the file should
# assert that current_os is "mac" once this file is no longer included by
# iOS toolchains.
_verify_sdk = is_chrome_branded && is_official_build && target_os != "ios"
find_sdk_args = [ "--print_sdk_path" ]
if (!use_system_xcode) {
find_sdk_args += [
"--developer_dir",
hermetic_xcode_path,
]
}
if (_verify_sdk) {
find_sdk_args += [
"--verify",
mac_sdk_min,
"--sdk_path=" + mac_sdk_path,
]
} else {
find_sdk_args += [ mac_sdk_min ]
}
# The tool will print the SDK path on the first line, and the version on the
# second line.
find_sdk_lines =
exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
mac_sdk_version = find_sdk_lines[1]
if (mac_sdk_path == "") {
mac_sdk_path = find_sdk_lines[0]
}
script_name = "//build/config/mac/sdk_info.py"
sdk_info_args = []
if (!use_system_xcode) {
sdk_info_args += [
"--developer_dir",
hermetic_xcode_path,
]
}
sdk_info_args += [ mac_sdk_name ]
_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
xcode_version = _mac_sdk_result.xcode_version
xcode_build = _mac_sdk_result.xcode_build
machine_os_build = _mac_sdk_result.machine_os_build
if (mac_sdk_version != mac_sdk_min_build_override &&
exec_script("//build/check_return_value.py",
[
"test",
xcode_version,
"-ge",
"0730",
],
"value") != 1) {
print(
"********************************************************************************")
print(
" WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,")
print(
" either upgrade Xcode to the latest version or install the Mac OS X")
print(
" $mac_sdk_min_build_override SDK. For more information, see https://crbug.com/620127.")
print()
print(" Current SDK Version: $mac_sdk_version")
print(" Current Xcode Version: $xcode_version ($xcode_build)")
print(
"********************************************************************************")
assert(false, "SDK is incompatible with Xcode")
}

Просмотреть файл

@ -0,0 +1,60 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import errno
import os
import shutil
import sys
def Main():
parser = argparse.ArgumentParser(description='Create Mac Framework symlinks')
parser.add_argument('--framework', action='store', type=str, required=True)
parser.add_argument('--version', action='store', type=str)
parser.add_argument('--contents', action='store', type=str, nargs='+')
parser.add_argument('--stamp', action='store', type=str, required=True)
args = parser.parse_args()
VERSIONS = 'Versions'
CURRENT = 'Current'
# Ensure the Foo.framework/Versions/A/ directory exists and create the
# Foo.framework/Versions/Current symlink to it.
if args.version:
try:
os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0744)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
_Relink(os.path.join(args.version),
os.path.join(args.framework, VERSIONS, CURRENT))
# Establish the top-level symlinks in the framework bundle. The dest of
# the symlinks may not exist yet.
if args.contents:
for item in args.contents:
_Relink(os.path.join(VERSIONS, CURRENT, item),
os.path.join(args.framework, item))
# Write out a stamp file.
if args.stamp:
with open(args.stamp, 'w') as f:
f.write(str(args))
return 0
def _Relink(dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
try:
os.remove(link)
except OSError as e:
if e.errno != errno.ENOENT:
shutil.rmtree(link)
os.symlink(dest, link)
if __name__ == '__main__':
sys.exit(Main())

Просмотреть файл

@ -0,0 +1,254 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import plistlib
import os
import re
import subprocess
import sys
import tempfile
import shlex
# Xcode substitutes variables like ${PRODUCT_NAME} when compiling Info.plist.
# It also supports supports modifiers like :identifier or :rfc1034identifier.
# SUBST_RE matches a variable substitution pattern with an optional modifier,
# while IDENT_RE matches all characters that are not valid in an "identifier"
# value (used when applying the modifier).
SUBST_RE = re.compile(r'\$\{(?P<id>[^}]*?)(?P<modifier>:[^}]*)?\}')
IDENT_RE = re.compile(r'[_/\s]')
def InterpolateList(values, substitutions):
"""Interpolates variable references into |value| using |substitutions|.
Inputs:
values: a list of values
substitutions: a mapping of variable names to values
Returns:
A new list of values with all variables references ${VARIABLE} replaced
by their value in |substitutions| or None if any of the variable has no
subsitution.
"""
result = []
for value in values:
interpolated = InterpolateValue(value, substitutions)
if interpolated is None:
return None
result.append(interpolated)
return result
def InterpolateString(value, substitutions):
"""Interpolates variable references into |value| using |substitutions|.
Inputs:
value: a string
substitutions: a mapping of variable names to values
Returns:
A new string with all variables references ${VARIABLES} replaced by their
value in |substitutions| or None if any of the variable has no substitution.
"""
result = value
for match in reversed(list(SUBST_RE.finditer(value))):
variable = match.group('id')
if variable not in substitutions:
return None
# Some values need to be identifier and thus the variables references may
# contains :modifier attributes to indicate how they should be converted
# to identifiers ("identifier" replaces all invalid characters by '_' and
# "rfc1034identifier" replaces them by "-" to make valid URI too).
modifier = match.group('modifier')
if modifier == ':identifier':
interpolated = IDENT_RE.sub('_', substitutions[variable])
elif modifier == ':rfc1034identifier':
interpolated = IDENT_RE.sub('-', substitutions[variable])
else:
interpolated = substitutions[variable]
result = result[:match.start()] + interpolated + result[match.end():]
return result
def InterpolateValue(value, substitutions):
"""Interpolates variable references into |value| using |substitutions|.
Inputs:
value: a value, can be a dictionary, list, string or other
substitutions: a mapping of variable names to values
Returns:
A new value with all variables references ${VARIABLES} replaced by their
value in |substitutions| or None if any of the variable has no substitution.
"""
if isinstance(value, dict):
return Interpolate(value, substitutions)
if isinstance(value, list):
return InterpolateList(value, substitutions)
if isinstance(value, str):
return InterpolateString(value, substitutions)
return value
def Interpolate(plist, substitutions):
"""Interpolates variable references into |value| using |substitutions|.
Inputs:
plist: a dictionary representing a Property List (.plist) file
substitutions: a mapping of variable names to values
Returns:
A new plist with all variables references ${VARIABLES} replaced by their
value in |substitutions|. All values that contains references with no
substitutions will be removed and the corresponding key will be cleared
from the plist (not recursively).
"""
result = {}
for key in plist:
value = InterpolateValue(plist[key], substitutions)
if value is not None:
result[key] = value
return result
def LoadPList(path):
"""Loads Plist at |path| and returns it as a dictionary."""
fd, name = tempfile.mkstemp()
try:
subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
with os.fdopen(fd, 'r') as f:
return plistlib.readPlist(f)
finally:
os.unlink(name)
def SavePList(path, format, data):
"""Saves |data| as a Plist to |path| in the specified |format|."""
fd, name = tempfile.mkstemp()
try:
with os.fdopen(fd, 'w') as f:
plistlib.writePlist(data, f)
subprocess.check_call(['plutil', '-convert', format, '-o', path, name])
finally:
os.unlink(name)
def MergePList(plist1, plist2):
"""Merges |plist1| with |plist2| recursively.
Creates a new dictionary representing a Property List (.plist) files by
merging the two dictionary |plist1| and |plist2| recursively (only for
dictionary values). List value will be concatenated.
Args:
plist1: a dictionary representing a Property List (.plist) file
plist2: a dictionary representing a Property List (.plist) file
Returns:
A new dictionary representing a Property List (.plist) file by merging
|plist1| with |plist2|. If any value is a dictionary, they are merged
recursively, otherwise |plist2| value is used. If values are list, they
are concatenated.
"""
if not isinstance(plist1, dict) or not isinstance(plist2, dict):
if plist2 is not None:
return plist2
else:
return plist1
result = {}
for key in set(plist1) | set(plist2):
if key in plist2:
value = plist2[key]
else:
value = plist1[key]
if isinstance(value, dict):
value = MergePList(plist1.get(key, None), plist2.get(key, None))
if isinstance(value, list):
value = plist1.get(key, []) + plist2.get(key, [])
result[key] = value
return result
class Action(object):
"""Class implementing one action supported by the script."""
@classmethod
def Register(cls, subparsers):
parser = subparsers.add_parser(cls.name, help=cls.help)
parser.set_defaults(func=cls._Execute)
cls._Register(parser)
class MergeAction(Action):
"""Class to merge multiple plist files."""
name = 'merge'
help = 'merge multiple plist files'
@staticmethod
def _Register(parser):
parser.add_argument(
'-o', '--output', required=True,
help='path to the output plist file')
parser.add_argument(
'-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
help='format of the plist file to generate')
parser.add_argument(
'path', nargs="+",
help='path to plist files to merge')
@staticmethod
def _Execute(args):
data = {}
for filename in args.path:
data = MergePList(data, LoadPList(filename))
SavePList(args.output, args.format, data)
class SubstituteAction(Action):
"""Class implementing the variable substitution in a plist file."""
name = 'substitute'
help = 'perform pattern substitution in a plist file'
@staticmethod
def _Register(parser):
parser.add_argument(
'-o', '--output', required=True,
help='path to the output plist file')
parser.add_argument(
'-t', '--template', required=True,
help='path to the template file')
parser.add_argument(
'-s', '--substitution', action='append', default=[],
help='substitution rule in the format key=value')
parser.add_argument(
'-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
help='format of the plist file to generate')
@staticmethod
def _Execute(args):
substitutions = {}
for substitution in args.substitution:
key, value = substitution.split('=', 1)
substitutions[key] = value
data = Interpolate(LoadPList(args.template), substitutions)
SavePList(args.output, args.format, data)
def Main():
parser = argparse.ArgumentParser(description='manipulate plist files')
subparsers = parser.add_subparsers()
for action in [MergeAction, SubstituteAction]:
action.Register(subparsers)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
sys.exit(Main())

Просмотреть файл

@ -0,0 +1,42 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
# Ensures that the current version matches the last-produced version, which is
# stored in the version_file. If it does not, then the framework_root_dir is
# obliterated.
# Usage: python prepare_framework_version.py out/obj/version_file \
# out/Framework.framework \
# 'A'
def PrepareFrameworkVersion(version_file, framework_root_dir, version):
# Test what the current framework version is. Stop if it is up-to-date.
try:
with open(version_file, 'r') as f:
current_version = f.read()
if current_version == version:
return
except IOError:
pass
# The framework version has changed, so clobber the framework.
if os.path.exists(framework_root_dir):
shutil.rmtree(framework_root_dir)
# Write out the new framework version file, making sure its containing
# directory exists.
dirname = os.path.dirname(version_file)
if not os.path.isdir(dirname):
os.makedirs(dirname, 0700)
with open(version_file, 'w+') as f:
f.write(version)
if __name__ == '__main__':
PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3])
sys.exit(0)

Просмотреть файл

@ -0,0 +1,692 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/mac/base_rules.gni")
# Generates Info.plist files for Mac apps and frameworks.
#
# Arguments
#
# info_plist:
# (optional) string, path to the Info.plist file that will be used for
# the bundle.
#
# info_plist_target:
# (optional) string, if the info_plist is generated from an action,
# rather than a regular source file, specify the target name in lieu
# of info_plist. The two arguments are mutually exclusive.
#
# executable_name:
# string, name of the generated target used for the product
# and executable name as specified in the output Info.plist.
#
# extra_substitutions:
# (optional) string array, 'key=value' pairs for extra fields which are
# specified in a source Info.plist template.
template("mac_info_plist") {
assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
"Only one of info_plist or info_plist_target may be specified in " +
target_name)
if (defined(invoker.info_plist)) {
_info_plist = invoker.info_plist
} else {
_info_plist_target_output = get_target_outputs(invoker.info_plist_target)
_info_plist = _info_plist_target_output[0]
}
info_plist(target_name) {
format = "xml1"
extra_substitutions = []
if (defined(invoker.extra_substitutions)) {
extra_substitutions = invoker.extra_substitutions
}
extra_substitutions += [
"MAC_SDK_BUILD=$mac_sdk_version",
"MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
]
plist_templates = [
"//build/config/mac/BuildInfo.plist",
_info_plist,
]
if (defined(invoker.info_plist_target)) {
deps = [
invoker.info_plist_target,
]
}
forward_variables_from(invoker,
[
"testonly",
"executable_name",
])
}
}
# Template to compile and package Mac XIB files as bundle data.
#
# Arguments
#
# sources:
# list of string, sources to comiple
#
# output_path:
# (optional) string, the path to use for the outputs list in the
# bundle_data step. If unspecified, defaults to bundle_resources_dir.
template("mac_xib_bundle_data") {
_target_name = target_name
_compile_target_name = _target_name + "_compile_ibtool"
compile_ib_files(_compile_target_name) {
forward_variables_from(invoker, [ "testonly" ])
visibility = [ ":$_target_name" ]
sources = invoker.sources
output_extension = "nib"
ibtool_flags = [
"--minimum-deployment-target",
mac_deployment_target,
# TODO(rsesek): Enable this once all the bots are on Xcode 7+.
# "--target-device",
# "mac",
]
}
bundle_data(_target_name) {
forward_variables_from(invoker,
[
"testonly",
"visibility",
])
public_deps = [
":$_compile_target_name",
]
sources = get_target_outputs(":$_compile_target_name")
_output_path = "{{bundle_resources_dir}}"
if (defined(invoker.output_path)) {
_output_path = invoker.output_path
}
outputs = [
"$_output_path/{{source_file_part}}",
]
}
}
# Template to package a shared library into a Mac framework bundle.
#
# By default, the bundle target this template generates does not link the
# resulting framework into anything that depends on it. If a dependency wants
# a link-time (as well as build-time) dependency on the framework bundle,
# depend against "$target_name+link". If only the build-time dependency is
# required (e.g., for copying into another bundle), then use "$target_name".
#
# Arguments
#
# info_plist:
# (optional) string, path to the Info.plist file that will be used for
# the bundle.
#
# info_plist_target:
# (optional) string, if the info_plist is generated from an action,
# rather than a regular source file, specify the target name in lieu
# of info_plist. The two arguments are mutually exclusive.
#
# output_name:
# (optional) string, name of the generated framework without the
# .framework suffix. If omitted, defaults to target_name.
#
# framework_version:
# (optional) string, version of the framework. Typically this is a
# single letter, like "A". If omitted, the Versions/ subdirectory
# structure will not be created, and build output will go directly
# into the framework subdirectory.
#
# framework_contents:
# (optional) list of string, top-level items in the framework. For
# frameworks with a framework_version, this is the list of symlinks to
# create in the .framework directory that link into Versions/Current/.
#
# extra_substitutions:
# (optional) string array, 'key=value' pairs for extra fields which are
# specified in a source Info.plist template.
#
# This template provides two targets for the resulting framework bundle. The
# link-time behavior varies depending on which of the two targets below is
# added as a dependency:
# - $target_name only adds a build-time dependency. Targets that depend on
# it will not link against the framework.
# - $target_name+link adds a build-time and link-time dependency. Targets
# that depend on it will link against the framework.
#
# The build-time-only dependency is used for when a target needs to use the
# framework either only for resources, or because the target loads it at run-
# time, via dlopen() or NSBundle. The link-time dependency will cause the
# dependee to have the framework loaded by dyld at launch.
#
# Example of build-time only dependency:
#
# mac_framework_bundle("CoreTeleportation") {
# sources = [ ... ]
# }
#
# bundle_data("core_teleportation_bundle_data") {
# deps = [ ":CoreTeleportation" ]
# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
# }
#
# app_bundle("GoatTeleporter") {
# sources = [ ... ]
# deps = [
# ":core_teleportation_bundle_data",
# ]
# }
#
# The GoatTeleporter.app will not directly link against
# CoreTeleportation.framework, but it will be included in the bundle's
# Frameworks directory.
#
# Example of link-time dependency:
#
# mac_framework_bundle("CoreTeleportation") {
# sources = [ ... ]
# ldflags = [
# "-install_name",
# "@executable_path/../Frameworks/$target_name.framework"
# ]
# }
#
# bundle_data("core_teleportation_bundle_data") {
# deps = [ ":CoreTeleportation+link" ]
# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
# outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
# }
#
# app_bundle("GoatTeleporter") {
# sources = [ ... ]
# deps = [
# ":core_teleportation_bundle_data",
# ]
# }
#
# Note that the framework is still copied to the app's bundle, but dyld will
# load this library when the app is launched because it uses the "+link"
# target as a dependency. This also requires that the framework set its
# install_name so that dyld can locate it.
#
# See "gn help shared_library" for more information on arguments supported
# by shared library target.
template("mac_framework_bundle") {
assert(defined(invoker.deps),
"Dependencies must be specified for $target_name")
assert(!defined(invoker.framework_contents) ||
defined(invoker.framework_version),
"framework_contents requres a versioned framework")
_info_plist_target = target_name + "_info_plist"
mac_info_plist(_info_plist_target) {
executable_name = target_name
if (defined(invoker.output_name)) {
executable_name = invoker.output_name
}
forward_variables_from(invoker,
[
"extra_substitutions",
"info_plist",
"info_plist_target",
"testonly",
])
}
_info_plist_bundle_data = _info_plist_target + "_bundle_data"
bundle_data(_info_plist_bundle_data) {
forward_variables_from(invoker, [ "testonly" ])
sources = get_target_outputs(":$_info_plist_target")
outputs = [
"{{bundle_resources_dir}}/Info.plist",
]
public_deps = [
":$_info_plist_target",
]
}
_target_name = target_name
_output_name = target_name
if (defined(invoker.output_name)) {
_output_name = invoker.output_name
}
# Create a file to track the build dependency on the framework_version and
# framework_contents variables.
_framework_toc = []
if (defined(invoker.framework_version)) {
_framework_toc += [
"Version=" + invoker.framework_version,
_output_name,
]
_framework_contents = [ _output_name ]
}
if (defined(invoker.framework_contents)) {
_framework_toc += invoker.framework_contents
_framework_contents += invoker.framework_contents
}
_framework_toc_file = "$target_out_dir/${target_name}.toc"
write_file(_framework_toc_file, _framework_toc)
# Create local variables for referencing different parts of the bundle.
_framework_target = _target_name
_framework_name = _output_name + ".framework"
_framework_base_dir = "$root_out_dir/$_framework_name"
if (defined(invoker.framework_version) && invoker.framework_version != "") {
_framework_version = invoker.framework_version
_framework_root_dir = _framework_base_dir + "/Versions/$_framework_version"
} else {
_framework_root_dir = _framework_base_dir
}
# Clean the entire framework if the framework_version changes.
_version_arg = "''"
if (defined(invoker.framework_version)) {
_version_arg = _framework_version
}
_version_file = "$target_out_dir/${target_name}_version"
exec_script("//build/config/mac/prepare_framework_version.py",
[
rebase_path(_version_file),
rebase_path(_framework_base_dir),
_version_arg,
])
# Create the symlinks.
_framework_package_target = target_name + "_package"
action(_framework_package_target) {
script = "//build/config/mac/package_framework.py"
# The TOC file never needs to be read, since its contents are the values
# of GN variables. It is only used to trigger this rule when the values
# change.
inputs = [
_framework_toc_file,
]
_stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp"
outputs = [
_stamp_file,
]
visibility = [ ":$_framework_target" ]
args = [
"--framework",
rebase_path(_framework_base_dir, root_build_dir),
"--stamp",
rebase_path(_stamp_file, root_build_dir),
]
if (defined(invoker.framework_version)) {
args += [
"--version",
invoker.framework_version,
"--contents",
] + _framework_contents
# It is not possible to list _framework_contents as outputs, since
# ninja does not properly stat symbolic links.
# https://github.com/ninja-build/ninja/issues/1186
}
}
_link_shared_library_target = target_name + "_shared_library"
_shared_library_bundle_data = target_name + "_shared_library_bundle_data"
shared_library(_link_shared_library_target) {
forward_variables_from(invoker,
"*",
[
"assert_no_deps",
"bundle_deps",
"code_signing_enabled",
"data_deps",
"info_plist",
"info_plist_target",
"output_name",
"visibility",
])
visibility = [ ":$_shared_library_bundle_data" ]
output_name = _output_name
output_prefix_override = true
output_extension = ""
output_dir = "$target_out_dir/$_link_shared_library_target"
}
bundle_data(_shared_library_bundle_data) {
visibility = [ ":$_framework_target" ]
forward_variables_from(invoker, [ "testonly" ])
sources = [
"$target_out_dir/$_link_shared_library_target/$_output_name",
]
outputs = [
"{{bundle_executable_dir}}/$_output_name",
]
public_deps = [
":$_link_shared_library_target",
]
}
_framework_public_config = _target_name + "_public_config"
config(_framework_public_config) {
# TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
# and include_dirs to avoid duplicate values on the command-line.
visibility = [ ":$_framework_target" ]
ldflags = [
"-F",
rebase_path("$root_out_dir/.", root_build_dir),
]
lib_dirs = [ root_out_dir ]
libs = [ _framework_name ]
}
create_bundle(_framework_target) {
forward_variables_from(invoker,
[
"data_deps",
"deps",
"public_deps",
"testonly",
])
if (defined(invoker.visibility)) {
visibility = invoker.visibility
visibility += [ ":$_target_name+link" ]
}
if (!defined(deps)) {
deps = []
}
deps += [ ":$_info_plist_bundle_data" ]
if (defined(invoker.bundle_deps)) {
deps += invoker.bundle_deps
}
if (!defined(public_deps)) {
public_deps = []
}
public_deps += [
":$_framework_package_target",
":$_shared_library_bundle_data",
]
bundle_root_dir = _framework_root_dir
bundle_resources_dir = "$bundle_root_dir/Resources"
bundle_executable_dir = "$bundle_root_dir"
}
group(_target_name + "+link") {
forward_variables_from(invoker,
[
"public_configs",
"testonly",
"visibility",
])
public_deps = [
":$_target_name",
]
if (!defined(public_configs)) {
public_configs = []
}
public_configs += [ ":$_framework_public_config" ]
}
}
set_defaults("mac_framework_bundle") {
configs = default_shared_library_configs
}
# Template to create a Mac executable application bundle.
#
# Arguments
#
# package_type:
# (optional) string, the product package type to create. Options are:
# "app" to create a .app bundle (default)
# "xpc" to create an .xpc service bundle
#
# info_plist:
# (optional) string, path to the Info.plist file that will be used for
# the bundle.
#
# info_plist_target:
# (optional) string, if the info_plist is generated from an action,
# rather than a regular source file, specify the target name in lieu
# of info_plist. The two arguments are mutually exclusive.
#
# output_name:
# (optional) string, name of the generated app without the
# .app suffix. If omitted, defaults to target_name.
#
# extra_configs:
# (optional) list of label, additional configs to apply to the
# executable target.
#
# remove_configs:
# (optional) list of label, default configs to remove from the target.
#
# extra_substitutions:
# (optional) string array, 'key=value' pairs for extra fields which are
# specified in a source Info.plist template.
template("mac_app_bundle") {
_target_name = target_name
_output_name = target_name
if (defined(invoker.output_name)) {
_output_name = invoker.output_name
}
_package_type = "app"
if (defined(invoker.package_type)) {
_package_type = invoker.package_type
}
if (_package_type == "app") {
_output_extension = "app"
_product_type = "com.apple.product-type.application"
_write_pkg_info = true
} else if (_package_type == "xpc") {
_output_extension = "xpc"
_product_type = "com.apple.product-type.xpc-service"
_write_pkg_info = false
} else {
assert(false, "Unsupported packge_type: " + packge_type)
}
_executable_target = target_name + "_executable"
_executable_bundle_data = _executable_target + "_bundle_data"
_info_plist_target = target_name + "_info_plist"
mac_info_plist(_info_plist_target) {
executable_name = _output_name
forward_variables_from(invoker,
[
"extra_substitutions",
"info_plist",
"info_plist_target",
"testonly",
])
}
if (_write_pkg_info) {
_pkg_info_target = target_name + "_pkg_info"
action(_pkg_info_target) {
forward_variables_from(invoker, [ "testonly" ])
script = "//build/config/mac/write_pkg_info.py"
sources = get_target_outputs(":$_info_plist_target")
outputs = [
"$target_gen_dir/$_pkg_info_target",
]
args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
[ "--output" ] + rebase_path(outputs, root_build_dir)
deps = [
":$_info_plist_target",
]
}
}
executable(_executable_target) {
visibility = [ ":$_executable_bundle_data" ]
forward_variables_from(invoker,
"*",
[
"assert_no_deps",
"data_deps",
"info_plist",
"output_name",
"visibility",
])
if (defined(extra_configs)) {
configs += extra_configs
}
if (defined(remove_configs)) {
configs -= remove_configs
}
output_name = _output_name
output_dir = "$target_out_dir/$_executable_target"
}
bundle_data(_executable_bundle_data) {
visibility = [ ":$_target_name" ]
forward_variables_from(invoker, [ "testonly" ])
sources = [
"$target_out_dir/$_executable_target/$_output_name",
]
outputs = [
"{{bundle_executable_dir}}/$_output_name",
]
public_deps = [
":$_executable_target",
]
}
_info_plist_bundle_data = _info_plist_target + "_bundle_data"
bundle_data(_info_plist_bundle_data) {
forward_variables_from(invoker, [ "testonly" ])
visibility = [ ":$_target_name" ]
sources = get_target_outputs(":$_info_plist_target")
outputs = [
"{{bundle_root_dir}}/Info.plist",
]
public_deps = [
":$_info_plist_target",
]
}
if (_write_pkg_info) {
_pkg_info_bundle_data = _pkg_info_target + "_bundle_data"
bundle_data(_pkg_info_bundle_data) {
forward_variables_from(invoker, [ "testonly" ])
visibility = [ ":$_target_name" ]
sources = get_target_outputs(":$_pkg_info_target")
outputs = [
"{{bundle_root_dir}}/PkgInfo",
]
public_deps = [
":$_pkg_info_target",
]
}
}
create_bundle(_target_name) {
forward_variables_from(invoker,
[
"data_deps",
"deps",
"public_deps",
"testonly",
])
if (!defined(deps)) {
deps = []
}
deps += [
":$_executable_bundle_data",
":$_info_plist_bundle_data",
]
if (_write_pkg_info) {
deps += [ ":$_pkg_info_bundle_data" ]
}
product_type = _product_type
bundle_root_dir =
"$root_out_dir/${_output_name}.${_output_extension}/Contents"
bundle_resources_dir = "$bundle_root_dir/Resources"
bundle_executable_dir = "$bundle_root_dir/MacOS"
}
}
# Template to package a loadable_module into a .plugin bundle.
#
# This takes no extra arguments that differ from a loadable_module.
template("mac_plugin_bundle") {
assert(defined(invoker.deps),
"Dependencies must be specified for $target_name")
_target_name = target_name
_loadable_module_target = _target_name + "_loadable_module"
_loadable_module_bundle_data = _loadable_module_target + "_bundle_data"
_output_name = _target_name
if (defined(invoker.output_name)) {
_output_name = invoker.output_name
}
loadable_module(_loadable_module_target) {
visibility = [ ":$_loadable_module_bundle_data" ]
forward_variables_from(invoker,
"*",
[
"assert_no_deps",
"data_deps",
"output_name",
"visibility",
])
output_dir = "$target_out_dir"
output_name = _output_name
}
bundle_data(_loadable_module_bundle_data) {
forward_variables_from(invoker, [ "testonly" ])
visibility = [ ":$_target_name" ]
sources = [
"$target_out_dir/${_output_name}.so",
]
outputs = [
"{{bundle_executable_dir}}/$_output_name",
]
public_deps = [
":$_loadable_module_target",
]
}
create_bundle(_target_name) {
forward_variables_from(invoker,
[
"data_deps",
"deps",
"public_deps",
"testonly",
"visibility",
])
if (!defined(deps)) {
deps = []
}
deps += [ ":$_loadable_module_bundle_data" ]
bundle_root_dir = "$root_out_dir/$_output_name.plugin/Contents"
bundle_executable_dir = "$bundle_root_dir/MacOS"
}
}

Просмотреть файл

@ -0,0 +1,73 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import subprocess
import sys
# This script prints information about the build system, the operating
# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
# "iphoneos" or "macosx" generally).
#
# In the GYP build, this is done inside GYP itself based on the SDKROOT
# variable.
def FormatVersion(version):
"""Converts Xcode version to a format required for Info.plist."""
version = version.replace('.', '')
version = version + '0' * (3 - len(version))
return version.zfill(4)
def FillXcodeVersion(settings):
"""Fills the Xcode version and build number into |settings|."""
lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
settings['xcode_build'] = lines[-1].split()[-1]
def FillMachineOSBuild(settings):
"""Fills OS build number into |settings|."""
settings['machine_os_build'] = subprocess.check_output(
['sw_vers', '-buildVersion']).strip()
def FillSDKPathAndVersion(settings, platform, xcode_version):
"""Fills the SDK path and version for |platform| into |settings|."""
settings['sdk_path'] = subprocess.check_output([
'xcrun', '-sdk', platform, '--show-sdk-path']).strip()
settings['sdk_version'] = subprocess.check_output([
'xcrun', '-sdk', platform, '--show-sdk-version']).strip()
settings['sdk_platform_path'] = subprocess.check_output([
'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip()
# TODO: unconditionally use --show-sdk-build-version once Xcode 7.2 or
# higher is required to build Chrome for iOS or OS X.
if xcode_version >= '0720':
settings['sdk_build'] = subprocess.check_output([
'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip()
else:
settings['sdk_build'] = settings['sdk_version']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--developer_dir", required=False)
args, unknownargs = parser.parse_known_args()
if args.developer_dir:
os.environ['DEVELOPER_DIR'] = args.developer_dir
if len(unknownargs) != 1:
sys.stderr.write(
'usage: %s [iphoneos|iphonesimulator|macosx]\n' %
os.path.basename(sys.argv[0]))
sys.exit(1)
settings = {}
FillMachineOSBuild(settings)
FillXcodeVersion(settings)
FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
for key in sorted(settings):
print '%s="%s"' % (key, settings[key])

Просмотреть файл

@ -0,0 +1,30 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/chrome_build.gni")
import("//build/config/sanitizers/sanitizers.gni")
# This file declares arguments and configs that control whether dSYM debug
# info is produced and whether build products are stripped.
declare_args() {
# Produce dSYM files for targets that are configured to do so. dSYM
# generation is controlled globally as it is a linker output (produced via
# the //build/toolchain/mac/linker_driver.py. Enabling this will result in
# all shared library, loadable module, and executable targets having a dSYM
# generated.
enable_dsyms = is_official_build || using_sanitizer
# Strip symbols from linked targets by default. If this is enabled, the
# //build/config/mac:strip_all config will be applied to all linked targets.
# If custom stripping parameters are required, remove that config from a
# linked target and apply custom -Wcrl,strip flags. See
# //build/toolchain/mac/linker_driver.py for more information.
enable_stripping = is_official_build
}
# Save unstripped copies of targets with a ".unstripped" suffix. This is
# useful to preserve the original output when enable_stripping=true but
# we're not actually generating real dSYMs.
save_unstripped_output = enable_stripping && !enable_dsyms

Просмотреть файл

@ -0,0 +1,47 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import plist_util
import sys
# This script creates a PkgInfo file for an OS X .app bundle's plist.
# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \
# --output Foo.app/Contents/PkgInfo
def Main():
parser = argparse.ArgumentParser(
description='A script to write PkgInfo files for .app bundles.')
parser.add_argument('--plist', required=True,
help='Path to the Info.plist for the .app.')
parser.add_argument('--output', required=True,
help='Path to the desired output file.')
args = parser.parse_args()
# Remove the output if it exists already.
if os.path.exists(args.output):
os.unlink(args.output)
plist = plist_util.LoadPList(args.plist)
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \
('AAPL', package_type))
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4:
raise ValueError('CFBundleSignature should be exactly four characters, ' +
'got %s' % signature_code)
with open(args.output, 'w') as fp:
fp.write('%s%s' % (package_type, signature_code))
return 0
if __name__ == '__main__':
sys.exit(Main())

Просмотреть файл

@ -0,0 +1,28 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='A script to execute a command via xcrun.')
parser.add_argument('--stamp', action='store', type=str,
help='Write a stamp file to this path on success.')
parser.add_argument('--developer_dir', required=False,
help='Path to Xcode.')
args, unknown_args = parser.parse_known_args()
if args.developer_dir:
os.environ['DEVELOPER_DIR'] = args.developer_dir
rv = subprocess.check_call(['xcrun'] + unknown_args)
if rv == 0 and args.stamp:
if os.path.exists(args.stamp):
os.unlink(args.stamp)
open(args.stamp, 'w+').close()
sys.exit(rv)

Просмотреть файл

@ -0,0 +1,58 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/v8_target_cpu.gni")
# These are primarily relevant in current_cpu == "mips*" contexts, where
# MIPS code is being compiled. But they can also be relevant in the
# other contexts when the code will change its behavior based on the
# cpu it wants to generate code for.
if (current_cpu == "mipsel" || v8_current_cpu == "mipsel") {
declare_args() {
# MIPS arch variant. Possible values are:
# "r1"
# "r2"
# "r6"
mips_arch_variant = "r1"
# MIPS DSP ASE revision. Possible values are:
# 0: unavailable
# 1: revision 1
# 2: revision 2
mips_dsp_rev = 0
# MIPS SIMD Arch compilation flag.
mips_use_msa = false
# MIPS floating-point ABI. Possible values are:
# "hard": sets the GCC -mhard-float option.
# "soft": sets the GCC -msoft-float option.
mips_float_abi = "hard"
# MIPS32 floating-point register width. Possible values are:
# "fp32": sets the GCC -mfp32 option.
# "fp64": sets the GCC -mfp64 option.
# "fpxx": sets the GCC -mfpxx option.
mips_fpu_mode = "fp32"
}
} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el") {
# MIPS arch variant. Possible values are:
# "r2"
# "r6"
if (current_os == "android" || target_os == "android") {
declare_args() {
mips_arch_variant = "r6"
# MIPS SIMD Arch compilation flag.
mips_use_msa = true
}
} else {
declare_args() {
mips_arch_variant = "r2"
# MIPS SIMD Arch compilation flag.
mips_use_msa = false
}
}
}

Просмотреть файл

@ -0,0 +1,143 @@
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/nacl/config.gni")
# Native Client Definitions
config("nacl_defines") {
if (is_linux || is_android || is_nacl) {
defines = [
"_POSIX_C_SOURCE=199506",
"_XOPEN_SOURCE=600",
"_GNU_SOURCE=1",
"__STDC_LIMIT_MACROS=1",
]
} else if (is_win) {
defines = [ "__STDC_LIMIT_MACROS=1" ]
}
if (current_cpu == "pnacl" && !is_nacl_nonsfi) {
# TODO: Remove the following definition once NACL_BUILD_ARCH and
# NACL_BUILD_SUBARCH are defined by the PNaCl toolchain.
defines += [ "NACL_BUILD_ARCH=pnacl" ]
}
}
config("nexe_defines") {
defines = [
"DYNAMIC_ANNOTATIONS_ENABLED=1",
"DYNAMIC_ANNOTATIONS_PREFIX=NACL_",
]
}
config("nacl_warnings") {
if (is_win) {
# Some NaCl code uses forward declarations of static const variables,
# with initialized definitions later on. (The alternative would be
# many, many more forward declarations of everything used in that
# const variable's initializer before the definition.) The Windows
# compiler is too stupid to notice that there is an initializer later
# in the file, and warns about the forward declaration.
cflags = [ "/wd4132" ]
}
}
# The base target that all targets in the NaCl build should depend on.
# This allows configs to be modified for everything in the NaCl build, even when
# the NaCl build is composed into the Chrome build. (GN has no functionality to
# add flags to everything in //native_client, having a base target works around
# that limitation.)
source_set("nacl_base") {
public_configs = [
":nacl_defines",
":nacl_warnings",
]
if (current_os == "nacl") {
public_configs += [ ":nexe_defines" ]
}
}
config("compiler") {
configs = []
cflags = []
ldflags = []
libs = []
if (is_clang && current_cpu != "pnacl") {
# -no-integrated-as is the default in nacl-clang for historical
# compatibility with inline assembly code and so forth. But there
# are no such cases in Chromium code, and -integrated-as is nicer in
# general. Moreover, the IRT must be built using LLVM's assembler
# on x86-64 to preserve sandbox base address hiding. Use it
# everywhere for consistency (and possibly quicker builds).
cflags += [ "-integrated-as" ]
}
if (is_nacl_nonsfi) {
cflags += [ "--pnacl-allow-translate" ]
ldflags += [
"--pnacl-allow-translate",
"--pnacl-allow-native",
"-Wl,--noirt",
"-Wt,--noirt",
"-Wt,--noirtshim",
# The clang driver automatically injects -lpthread when using libc++, but
# the toolchain doesn't have it yet. To get around this, use
# -nodefaultlibs and make each executable target depend on
# "//native_client/src/nonsfi/irt:nacl_sys_private".
"-nodefaultlibs",
]
libs += [
"c++",
"m",
"c",
"pnaclmm",
]
include_dirs = [ "//native_client/src/public/linux_syscalls" ]
}
asmflags = cflags
}
config("compiler_codegen") {
cflags = []
if (is_nacl_irt) {
cflags += [
# A debugger should be able to unwind IRT call frames. This is
# the default behavior on x86-64 and when compiling C++ with
# exceptions enabled; the change is for the benefit of x86-32 C.
# The frame pointer is unnecessary when unwind tables are used.
"-fasynchronous-unwind-tables",
"-fomit-frame-pointer",
]
if (current_cpu == "x86") {
# The x86-32 IRT needs to be callable with an under-aligned
# stack; so we disable SSE instructions, which can fault on
# misaligned addresses. See
# https://code.google.com/p/nativeclient/issues/detail?id=3935
cflags += [
"-mstackrealign",
"-mno-sse",
]
}
}
asmflags = cflags
}
config("irt_optimize") {
cflags = [
# Optimize for space, keep the IRT nexe small.
"-Os",
# These are omitted from non-IRT libraries to keep the libraries
# themselves small.
"-ffunction-sections",
"-fdata-sections",
]
ldflags = [ "-Wl,--gc-sections" ]
}

Просмотреть файл

@ -0,0 +1,52 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
declare_args() {
# Native Client supports both Newlib and Glibc C libraries where Newlib
# is assumed to be the default one; use this to determine whether Glibc
# is being used instead.
is_nacl_glibc = false
}
is_nacl_irt = false
is_nacl_nonsfi = false
if (enable_nacl) {
nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
if (is_nacl_glibc) {
if (current_cpu == "x86" || current_cpu == "x64") {
nacl_toolchain_package = "nacl_x86_glibc"
} else if (current_cpu == "arm") {
nacl_toolchain_package = "nacl_arm_glibc"
}
} else {
nacl_toolchain_package = "pnacl_newlib"
}
if (current_cpu == "pnacl") {
_nacl_tuple = "pnacl"
} else if (current_cpu == "x86" || current_cpu == "x64") {
_nacl_tuple = "x86_64-nacl"
} else if (current_cpu == "arm") {
_nacl_tuple = "arm-nacl"
} else if (current_cpu == "mipsel") {
_nacl_tuple = "mipsel-nacl"
}
nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin"
nacl_toolchain_tooldir =
"${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}"
nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-"
nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu
is_nacl_irt = current_toolchain == nacl_irt_toolchain
# Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC
# applications.
nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi"
is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain
}

Просмотреть файл

@ -0,0 +1,185 @@
# Copyright 2015 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/features.gni")
import("//build/config/nacl/config.gni")
# Generate a nmf file
#
# Native Client Manifest (nmf) is a JSON file that tells the browser where to
# download and load Native Client application files and libraries.
#
# Variables:
# executables: .nexe/.pexe/.bc executables to generate nmf for
# lib_prefix: path to prepend to shared libraries in the nmf
# nmf: the name and the path of the output file
# nmfflags: additional flags for the nmf generator
# stage_dependencies: directory for staging libraries
template("generate_nmf") {
assert(defined(invoker.executables), "Must define executables")
assert(defined(invoker.nmf), "Must define nmf")
action(target_name) {
forward_variables_from(invoker,
[
"deps",
"data_deps",
"executables",
"lib_prefix",
"nmf",
"nmfflags",
"public_deps",
"stage_dependencies",
"testonly",
"visibility",
])
if (!defined(nmfflags)) {
nmfflags = []
}
# TODO(phosek): Remove this conditional once
# https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is
# resolved.
if (current_cpu == "pnacl") {
objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump")
} else {
objdump = rebase_path("${nacl_toolprefix}objdump")
}
if (host_os == "win") {
objdump += ".exe"
}
script = "//native_client_sdk/src/tools/create_nmf.py"
inputs = [
objdump,
]
sources = executables
outputs = [
nmf,
]
if (is_nacl_glibc) {
if (defined(stage_dependencies)) {
nmfflags += [ "--stage-dependencies=" +
rebase_path(stage_dependencies, root_build_dir) ]
lib_path = stage_dependencies
} else {
lib_path = root_build_dir
}
if (defined(lib_prefix)) {
nmfflags += [ "--lib-prefix=" + lib_prefix ]
lib_path += "/${lib_prefix}"
}
# Starts empty so the code below can use += everywhere.
data = []
nmfflags += [ "--library-path=" + rebase_path(root_out_dir) ]
# NOTE: There is no explicit dependency for the lib directory
# (lib32 and lib64 for x86/x64) created in the product directory.
# They are created as a side-effect of nmf creation.
if (current_cpu != "x86" && current_cpu != "x64") {
nmfflags +=
[ "--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib") ]
if (current_cpu == "arm") {
data += [ "${lib_path}/libarm/" ]
} else {
data += [ "${lib_path}/lib/" ]
}
} else {
# For x86-32, the lib/ directory is called lib32/ instead.
if (current_cpu == "x86") {
nmfflags += [ "--library-path=" +
rebase_path("${nacl_toolchain_tooldir}/lib32") ]
data += [ "${lib_path}/lib32/" ]
}
# x86-32 Windows needs to build both x86-32 and x86-64 NaCl
# binaries into the same nmf covering both architectures. That
# gets handled at a higher level (see the nacl_test_data template),
# so a single generate_nmf invocation gets both x86-32 and x86-64
# nexes listed in executables.
if (current_cpu == "x64" || target_os == "win") {
# For x86-64, the lib/ directory is called lib64/ instead
# when copied by create_nmf.py.
glibc_tc = "//build/toolchain/nacl:glibc"
assert(current_toolchain == "${glibc_tc}_${current_cpu}")
if (current_cpu == "x64") {
x64_out_dir = root_out_dir
} else {
x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)",
"root_out_dir")
}
nmfflags += [
"--library-path=" + rebase_path(x64_out_dir),
"--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib"),
]
data += [ "${lib_path}/lib64/" ]
}
}
}
args = [
"--no-default-libpath",
"--objdump=" + objdump,
"--output=" + rebase_path(nmf, root_build_dir),
] + nmfflags + rebase_path(sources, root_build_dir)
if (is_nacl_glibc && current_cpu == "arm") {
deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ]
}
}
}
# Generate a nmf file for Non-SFI tests
#
# Non-SFI tests use a different manifest format from regular Native Client and
# as such requires a different generator.
#
# Variables:
# executable: Non-SFI .nexe executable to generate nmf for
# nmf: the name and the path of the output file
# nmfflags: additional flags for the nmf generator
template("generate_nonsfi_test_nmf") {
assert(defined(invoker.executable), "Must define executable")
assert(defined(invoker.nmf), "Must define nmf")
action(target_name) {
forward_variables_from(invoker,
[
"deps",
"data_deps",
"executable",
"nmf",
"testonly",
"public_deps",
"visibility",
])
script = "//ppapi/tests/create_nonsfi_test_nmf.py"
sources = [
executable,
]
outputs = [
nmf,
]
# NOTE: We use target_cpu rather than current_cpu on purpose because
# current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI
# .nexe executable is always translated to run on the target machine.
if (target_cpu == "x86") {
arch = "x86-32"
} else if (target_cpu == "x64") {
arch = "x86-64"
} else {
arch = target_cpu
}
args = [
"--program=" + rebase_path(executable, root_build_dir),
"--arch=${arch}",
"--output=" + rebase_path(nmf, root_build_dir),
]
if (defined(invoker.nmfflags)) {
args += invoker.nmfflags
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/toolchain/goma.gni")
declare_args() {
# Precompiled header file support is by default available,
# but for distributed build system uses (like goma) or when
# doing official builds.
enable_precompiled_headers = !is_official_build && !use_goma
}

Просмотреть файл

@ -0,0 +1,43 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/clang/clang.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/sysroot.gni")
import("//build/toolchain/toolchain.gni")
assert(is_posix)
group("posix") {
visibility = [ "//:optimize_gn_gen" ]
}
# This is included by reference in the //build/config/compiler:runtime_library
# config that is applied to all targets. It is here to separate out the logic
# that is Posix-only. Please see that target for advice on what should go in
# :runtime_library vs. :compiler.
config("runtime_library") {
if (!is_mac && !is_ios && sysroot != "") {
# Pass the sysroot to all C compiler variants, the assembler, and linker.
cflags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
asmflags = cflags
ldflags = cflags
# Need to get some linker flags out of the sysroot.
ld_paths = exec_script("sysroot_ld_path.py",
[
rebase_path("//build/linux/sysroot_ld_path.sh",
root_build_dir),
rebase_path(sysroot),
],
"list lines")
foreach(ld_path, ld_paths) {
ld_path = rebase_path(ld_path, root_build_dir)
ldflags += [
"-L" + ld_path,
"-Wl,-rpath-link=" + ld_path,
]
}
}
}

Просмотреть файл

@ -0,0 +1,21 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file takes two arguments, the relative location of the shell script that
# does the checking, and the name of the sysroot.
# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
# Python in this file.
import subprocess
import sys
if len(sys.argv) != 3:
print "Need two arguments"
sys.exit(1)
result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
result = result.replace(" ", "\n")
if result != "":
print result

Просмотреть файл

@ -0,0 +1,579 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build_overrides/build.gni")
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/clang/clang.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/toolchain/toolchain.gni")
if (is_ios) {
import("//build/config/ios/ios_sdk.gni")
}
# Contains the dependencies needed for sanitizers to link into
# executables and shared_libraries. Unconditionally depend upon
# "//build/config:exe_and_shlib_deps" to pull in this target.
group("deps") {
visibility = [ "//build/config:exe_and_shlib_deps" ]
if (using_sanitizer) {
public_configs = [
":sanitizer_options_link_helper",
# Even when a target removes default_sanitizer_flags, it may be depending
# on a library that did not remove default_sanitizer_flags. Thus, we need
# to add the ldflags here as well as in default_sanitizer_flags.
":default_sanitizer_ldflags",
]
deps = [
":options_sources",
]
if (is_win) {
exe = ".exe"
} else {
exe = ""
}
data = [
"//tools/valgrind/asan/",
"$clang_base_path/bin/llvm-symbolizer${exe}",
]
if (is_linux) {
# llvm-symbolizer needs this.
data += [ "$clang_base_path/lib/libstdc++.so.6" ]
}
if (use_prebuilt_instrumented_libraries ||
use_locally_built_instrumented_libraries) {
deps += [ "//third_party/instrumented_libraries:deps" ]
}
# ASAN is supported on iOS but the runtime library depends on the compiler
# used (Chromium version of clang versus Xcode version of clang). Only copy
# the ASAN runtime on iOS if building with Chromium clang.
if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
data_deps = [
":copy_asan_runtime",
]
}
if (is_mac || (is_ios && !use_xcode_clang)) {
public_deps = [
":asan_runtime_bundle_data",
]
}
}
}
if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && using_sanitizer) {
if (is_mac) {
_clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
} else if (is_ios) {
_clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
} else if (is_win && target_cpu == "x86") {
_clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll"
} else if (is_win && target_cpu == "x64") {
_clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
}
_clang_rt_dso_full_path =
"$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
if (!is_ios) {
copy("copy_asan_runtime") {
sources = [
_clang_rt_dso_full_path,
]
outputs = [
"$root_out_dir/{{source_file_part}}",
]
}
} else {
# On iOS, the runtime library need to be code signed (adhoc signature)
# starting with Xcode 8, so use an action instead of a copy on iOS.
action("copy_asan_runtime") {
script = "//build/config/ios/codesign.py"
sources = [
_clang_rt_dso_full_path,
]
outputs = [
"$root_out_dir/" + get_path_info(sources[0], "file"),
]
args = [
"code-sign-file",
"--identity=" + ios_code_signing_identity,
"--output=" + rebase_path(outputs[0], root_build_dir),
rebase_path(sources[0], root_build_dir),
]
}
}
if (is_mac || is_ios) {
bundle_data("asan_runtime_bundle_data") {
sources = get_target_outputs(":copy_asan_runtime")
outputs = [
"{{bundle_executable_dir}}/{{source_file_part}}",
]
public_deps = [
":copy_asan_runtime",
]
}
}
}
config("sanitizer_options_link_helper") {
if (is_mac || is_ios) {
ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
} else if (!is_win) {
ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
}
}
static_library("options_sources") {
# This is a static_library instead of a source_set, as it shouldn't be
# unconditionally linked into targets.
visibility = [
":deps",
"//:gn_visibility",
]
sources = [
"//build/sanitizers/sanitizer_options.cc",
]
# Don't compile this target with any sanitizer code. It can be called from
# the sanitizer runtimes, so instrumenting these functions could cause
# recursive calls into the runtime if there is an error.
configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
if (is_asan) {
if (!defined(asan_suppressions_file)) {
asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
}
sources += [ asan_suppressions_file ]
}
if (is_lsan) {
if (!defined(lsan_suppressions_file)) {
lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
}
sources += [ lsan_suppressions_file ]
}
if (is_tsan) {
if (!defined(tsan_suppressions_file)) {
tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
}
sources += [ tsan_suppressions_file ]
}
}
# Applies linker flags necessary when either :deps or :default_sanitizer_flags
# are used.
config("default_sanitizer_ldflags") {
visibility = [
":default_sanitizer_flags",
":deps",
]
if (is_posix) {
ldflags = []
if (is_asan) {
ldflags += [ "-fsanitize=address" ]
if (is_mac) {
# https://crbug.com/708707
ldflags += [ "-fno-sanitize-address-use-after-scope" ]
} else {
ldflags += [ "-fsanitize-address-use-after-scope" ]
}
}
if (is_lsan) {
ldflags += [ "-fsanitize=leak" ]
}
if (is_tsan) {
ldflags += [ "-fsanitize=thread" ]
}
if (is_msan) {
ldflags += [ "-fsanitize=memory" ]
}
if (is_ubsan || is_ubsan_security) {
ldflags += [ "-fsanitize=undefined" ]
}
if (is_ubsan_null) {
ldflags += [ "-fsanitize=null" ]
}
if (is_ubsan_vptr) {
ldflags += [ "-fsanitize=vptr" ]
}
if (use_sanitizer_coverage) {
ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
}
if (is_cfi && !is_nacl) {
ldflags += [ "-fsanitize=cfi-vcall" ]
if (use_cfi_cast) {
ldflags += [
"-fsanitize=cfi-derived-cast",
"-fsanitize=cfi-unrelated-cast",
]
}
if (use_cfi_diag) {
ldflags += [
"-fno-sanitize-trap=cfi",
"-fsanitize-recover=cfi",
]
}
}
} else if (is_win && is_asan) {
# Windows directly calls link.exe instead of the compiler driver when
# linking. Hence, pass the runtime libraries instead of -fsanitize=address.
# In the static-library build, libraries are different for executables
# and dlls, see link_executable and link_shared_library below.
# This here handles only the component build.
if (target_cpu == "x64") {
# Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
if (is_component_build) {
assert(false, "win/asan does not work in 64-bit yet")
libs = [
"clang_rt.asan_dynamic-x86_64.lib",
"clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
]
}
} else {
assert(target_cpu == "x86", "WinASan unsupported architecture")
if (is_component_build) {
libs = [
"clang_rt.asan_dynamic-i386.lib",
"clang_rt.asan_dynamic_runtime_thunk-i386.lib",
]
}
}
}
}
config("common_sanitizer_flags") {
cflags = []
# Sanitizers need line table info for stack traces. They don't need type info
# or variable info, so we can leave that out to speed up the build (unless
# it's explicitly asked for by setting |sanitizer_keep_symbols| to true).
if (using_sanitizer) {
assert(is_clang, "sanitizers only supported with clang")
if (!sanitizer_keep_symbols) {
cflags += [ "-gline-tables-only" ]
}
cflags += [
# Column info in debug data confuses Visual Studio's debugger, so don't
# use this by default. However, clusterfuzz needs it for good attribution
# of reports to CLs, so turn it on there.
"-gcolumn-info",
]
}
# Common options for AddressSanitizer, LeakSanitizer, ThreadSanitizer,
# MemorySanitizer and non-official CFI builds.
if (using_sanitizer || (is_cfi && !is_official_build)) {
if (is_posix) {
cflags += [ "-fno-omit-frame-pointer" ]
} else {
cflags += [ "/Oy-" ]
}
}
}
# TODO(thomasanderson): Move this out of build/config/sanitizers.
config("libcxx_flags") {
if (use_custom_libcxx) {
prefix = "//buildtools/third_party"
include = "trunk/include"
cflags_cc = [
"-nostdinc++",
"-isystem" + rebase_path("$prefix/libc++/$include", root_build_dir),
"-isystem" + rebase_path("$prefix/libc++abi/$include", root_build_dir),
]
}
}
config("asan_flags") {
cflags = []
if (is_asan) {
cflags += [ "-fsanitize=address" ]
if (!is_mac) {
cflags += [ "-fsanitize-address-use-after-scope" ]
} else {
# https://crbug.com/708707
cflags += [ "-fno-sanitize-address-use-after-scope" ]
}
if (!asan_globals) {
cflags += [
"-mllvm",
"-asan-globals=0",
]
}
if (is_win) {
if (!defined(asan_win_blacklist_path)) {
asan_win_blacklist_path =
rebase_path("//tools/memory/asan/blacklist_win.txt", root_build_dir)
}
cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ]
} else {
# TODO(rnk): Remove this as discussed in http://crbug.com/427202.
if (!defined(asan_blacklist_path)) {
asan_blacklist_path =
rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
}
cflags += [ "-fsanitize-blacklist=$asan_blacklist_path" ]
}
}
}
config("link_executable") {
if (is_asan && is_win && !is_component_build) {
if (target_cpu == "x64") {
# Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
assert(false, "win/asan does not work in 64-bit yet")
libs = [ "clang_rt.asan-x86_64.lib" ]
ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ]
} else {
assert(target_cpu == "x86", "WinASan unsupported architecture")
libs = [ "clang_rt.asan-i386.lib" ]
ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ]
}
}
}
config("link_shared_library") {
if (is_asan && is_win && !is_component_build) {
if (target_cpu == "x64") {
# Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
assert(false, "win/asan does not work in 64-bit yet")
libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
} else {
assert(target_cpu == "x86", "WinASan unsupported architecture")
libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
}
}
}
config("cfi_flags") {
cflags = []
if (is_cfi && !is_nacl) {
if (!defined(cfi_blacklist_path)) {
cfi_blacklist_path =
rebase_path("//tools/cfi/blacklist.txt", root_build_dir)
}
cflags += [
"-fsanitize=cfi-vcall",
"-fsanitize-blacklist=$cfi_blacklist_path",
]
if (use_cfi_cast) {
cflags += [
"-fsanitize=cfi-derived-cast",
"-fsanitize=cfi-unrelated-cast",
]
}
if (use_cfi_icall) {
cflags += [ "-fsanitize=cfi-icall" ]
}
if (use_cfi_diag) {
cflags += [
"-fno-sanitize-trap=cfi",
"-fsanitize-recover=cfi",
"-fno-inline-functions",
"-fno-inline",
"-fno-omit-frame-pointer",
"-O1",
]
} else {
defines = [ "CFI_ENFORCEMENT" ]
}
}
}
config("coverage_flags") {
cflags = []
if (use_sanitizer_coverage) {
cflags += [
"-fsanitize-coverage=$sanitizer_coverage_flags",
"-mllvm",
"-sanitizer-coverage-prune-blocks=1",
]
if (current_cpu == "arm") {
# http://crbug.com/517105
cflags += [
"-mllvm",
"-sanitizer-coverage-block-threshold=0",
]
}
defines = [ "SANITIZER_COVERAGE" ]
}
}
config("lsan_flags") {
if (is_lsan) {
cflags = [ "-fsanitize=leak" ]
}
}
config("msan_flags") {
if (is_msan) {
assert(is_linux, "msan only supported on linux x86_64")
if (!defined(msan_blacklist_path)) {
msan_blacklist_path =
rebase_path("//tools/msan/blacklist.txt", root_build_dir)
}
cflags = [
"-fsanitize=memory",
"-fsanitize-memory-track-origins=$msan_track_origins",
"-fsanitize-blacklist=$msan_blacklist_path",
]
}
}
config("tsan_flags") {
if (is_tsan) {
assert(is_linux, "tsan only supported on linux x86_64")
if (!defined(tsan_blacklist_path)) {
tsan_blacklist_path =
rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
}
cflags = [
"-fsanitize=thread",
"-fsanitize-blacklist=$tsan_blacklist_path",
]
}
}
config("ubsan_flags") {
cflags = []
if (is_ubsan) {
if (!defined(ubsan_blacklist_path)) {
ubsan_blacklist_path =
rebase_path("//tools/ubsan/blacklist.txt", root_build_dir)
}
cflags += [
# Yasm dies with an "Illegal instruction" error when bounds checking is
# enabled. See http://crbug.com/489901
# "-fsanitize=bounds",
"-fsanitize=float-divide-by-zero",
"-fsanitize=integer-divide-by-zero",
"-fsanitize=null",
"-fsanitize=object-size",
"-fsanitize=return",
"-fsanitize=returns-nonnull-attribute",
"-fsanitize=shift-exponent",
"-fsanitize=signed-integer-overflow",
"-fsanitize=unreachable",
"-fsanitize=vla-bound",
"-fsanitize-blacklist=$ubsan_blacklist_path",
]
# Chromecast ubsan builds fail to compile with these
# experimental flags, so only add them to non-chromecast ubsan builds.
if (!is_chromecast) {
cflags += [
# Employ the experimental PBQP register allocator to avoid slow
# compilation on files with too many basic blocks.
# See http://crbug.com/426271.
"-mllvm",
"-regalloc=pbqp",
# Speculatively use coalescing to slightly improve the code generated
# by PBQP regallocator. May increase compile time.
"-mllvm",
"-pbqp-coalescing",
]
}
}
}
config("ubsan_no_recover") {
if (is_ubsan_no_recover) {
cflags = [ "-fno-sanitize-recover=undefined" ]
}
}
config("ubsan_security_flags") {
if (is_ubsan_security) {
if (!defined(ubsan_security_blacklist_path)) {
ubsan_security_blacklist_path =
rebase_path("//tools/ubsan/security_blacklist.txt", root_build_dir)
}
cflags = [
"-fsanitize=signed-integer-overflow,shift,vptr,function,vla-bound",
"-fsanitize-blacklist=$ubsan_security_blacklist_path",
]
}
}
config("ubsan_null_flags") {
if (is_ubsan_null) {
cflags = [ "-fsanitize=null" ]
}
}
config("ubsan_vptr_flags") {
if (is_ubsan_vptr) {
if (!defined(ubsan_vptr_blacklist_path)) {
ubsan_vptr_blacklist_path =
rebase_path("//tools/ubsan/vptr_blacklist.txt", root_build_dir)
}
cflags = [
"-fsanitize=vptr",
"-fsanitize-blacklist=$ubsan_vptr_blacklist_path",
]
}
}
config("fuzzing_build_mode") {
if (use_libfuzzer || use_afl) {
defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
}
}
all_sanitizer_configs = [
":common_sanitizer_flags",
":libcxx_flags",
":coverage_flags",
":default_sanitizer_ldflags",
":asan_flags",
":cfi_flags",
":lsan_flags",
":msan_flags",
":tsan_flags",
":ubsan_flags",
":ubsan_no_recover",
":ubsan_null_flags",
":ubsan_security_flags",
":ubsan_vptr_flags",
":fuzzing_build_mode",
]
# This config is applied by default to all targets. It sets the compiler flags
# for sanitizer usage, or, if no sanitizer is set, does nothing.
#
# This needs to be in a separate config so that targets can opt out of
# sanitizers (by removing the config) if they desire. Even if a target
# removes this config, executables & shared libraries should still depend on
# :deps if any of their dependencies have not opted out of sanitizers.
# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
config("default_sanitizer_flags") {
configs = all_sanitizer_configs
}
# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
# This allows to selectively disable ubsan_vptr, when needed. In particular,
# if some third_party code is required to be compiled without rtti, which
# is a requirement for ubsan_vptr.
config("default_sanitizer_flags_but_ubsan_vptr") {
configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
}
config("default_sanitizer_flags_but_coverage") {
configs = all_sanitizer_configs - [ ":coverage_flags" ]
}

Просмотреть файл

@ -0,0 +1,2 @@
mmoroz@chromium.org
ochang@chromium.org

Просмотреть файл

@ -0,0 +1,203 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/chrome_build.gni")
import("//build/toolchain/toolchain.gni")
declare_args() {
# Compile for Address Sanitizer to find memory bugs.
is_asan = false
# Compile for Leak Sanitizer to find leaks.
is_lsan = false
# Compile for Memory Sanitizer to find uninitialized reads.
is_msan = false
# Compile for Thread Sanitizer to find threading bugs.
is_tsan = false
# Compile for Undefined Behaviour Sanitizer to find various types of
# undefined behaviour (excludes vptr checks).
is_ubsan = false
# Halt the program if a problem is detected.
is_ubsan_no_recover = false
# Compile for Undefined Behaviour Sanitizer's null pointer checks.
is_ubsan_null = false
# Compile for Undefined Behaviour Sanitizer's vptr checks.
is_ubsan_vptr = false
# Track where uninitialized memory originates from. From fastest to slowest:
# 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
# chain of stores leading from allocation site to use site.
msan_track_origins = 2
# Use dynamic libraries instrumented by one of the sanitizers instead of the
# standard system libraries. Set this flag to download prebuilt binaries from
# GCS.
use_prebuilt_instrumented_libraries = false
# Use dynamic libraries instrumented by one of the sanitizers instead of the
# standard system libraries. Set this flag to build the libraries from source.
use_locally_built_instrumented_libraries = false
# Enable building with SyzyAsan which can find certain types of memory
# errors. Only works on Windows. See
# https://github.com/google/syzygy/wiki/SyzyASanHowTo
is_syzyasan = false
# Compile with Control Flow Integrity to protect virtual calls and casts.
# See http://clang.llvm.org/docs/ControlFlowIntegrity.html
#
# TODO(pcc): Remove this flag if/when CFI is enabled in all official builds.
is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
is_official_build && allow_posix_link_time_opt
# Enable checks for bad casts: derived cast and unrelated cast.
# TODO(krasin): remove this, when we're ready to add these checks by default.
# https://crbug.com/626794
use_cfi_cast = false
# Enable checks for indirect function calls via a function pointer.
# TODO(pcc): remove this when we're ready to add these checks by default.
# https://crbug.com/701919
use_cfi_icall = false
# By default, Control Flow Integrity will crash the program if it detects a
# violation. Set this to true to print detailed diagnostics instead.
use_cfi_diag = false
# Compile for fuzzing with LLVM LibFuzzer.
# See http://www.chromium.org/developers/testing/libfuzzer
use_libfuzzer = false
# Compile for fuzzing with AFL.
use_afl = false
# Enables core ubsan security features. Will later be removed once it matches
# is_ubsan.
is_ubsan_security = false
# Compile for fuzzing with Dr. Fuzz
# See http://www.chromium.org/developers/testing/dr-fuzz
use_drfuzz = false
# Helper variable for testing builds with disabled libfuzzer.
# Not for client use.
disable_libfuzzer = false
# Value for -fsanitize-coverage flag. Setting this causes
# use_sanitizer_coverage to be enabled.
# Default value when unset and use_afl=true or use_libfuzzer=true:
# trace-pc-guard
# Default value when unset and use_sanitizer_coverage=true:
# trace-pc-guard,indirect-calls
sanitizer_coverage_flags = ""
# Keep symbol level when building with sanitizers. When sanitizers are
# enabled, the default is to compile with the minimum debug info level
# necessary, overriding any other symbol level arguments that may be set.
# Setting this to true prevents this.
sanitizer_keep_symbols = false
}
# Disable sanitizers for non-default toolchains.
if (current_toolchain != default_toolchain) {
is_asan = false
is_cfi = false
is_lsan = false
is_msan = false
is_syzyasan = false
is_tsan = false
is_ubsan = false
is_ubsan_null = false
is_ubsan_no_recover = false
is_ubsan_security = false
is_ubsan_vptr = false
msan_track_origins = 0
sanitizer_coverage_flags = ""
use_afl = false
use_cfi_diag = false
use_custom_libcxx = false
use_drfuzz = false
use_libfuzzer = false
use_prebuilt_instrumented_libraries = false
use_locally_built_instrumented_libraries = false
use_sanitizer_coverage = false
}
# Args that are in turn dependent on other args must be in a separate
# declare_args block. User overrides are only applied at the end of a
# declare_args block.
declare_args() {
# Use libc++ (buildtools/third_party/libc++ and
# buildtools/third_party/libc++abi) instead of stdlibc++ as standard library.
# This is intended to be used for instrumented builds.
use_custom_libcxx =
(is_asan && is_linux && !is_chromeos) || is_tsan || is_msan || is_ubsan ||
is_ubsan_security || use_libfuzzer || use_afl
# Enable -fsanitize-coverage.
use_sanitizer_coverage =
use_libfuzzer || use_afl || sanitizer_coverage_flags != ""
# Detect overflow/underflow for global objects.
#
# Mac: http://crbug.com/352073
asan_globals = !is_mac
}
if ((use_afl || use_libfuzzer) && sanitizer_coverage_flags == "") {
sanitizer_coverage_flags = "trace-pc-guard"
} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
}
using_sanitizer =
is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null ||
is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage
assert(!using_sanitizer || is_clang,
"Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
prebuilt_instrumented_libraries_available =
is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
if (use_libfuzzer && is_linux) {
if (is_asan) {
# We do leak checking with libFuzzer on Linux. Set is_lsan for code that
# relies on LEAK_SANITIZER define to avoid false positives.
is_lsan = true
}
if (is_msan) {
use_prebuilt_instrumented_libraries = true
}
}
# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
# same is possibly true for the other non-ASan sanitizers. But regardless of
# whether it links, one would normally never run a sanitizer in debug mode.
# Running in debug mode probably indicates you forgot to set the "is_debug =
# false" flag in the build args. ASan seems to run fine in debug mode.
#
# If you find a use-case where you want to compile a sanitizer in debug mode
# and have verified it works, ask brettw and we can consider removing it from
# this condition. We may also be able to find another way to enable your case
# without having people accidentally get broken builds by compiling an
# unsupported or unadvisable configurations.
#
# For one-off testing, just comment this assertion out.
assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
"Sanitizers should generally be used in release (set is_debug=false).")
assert(!is_msan || (is_linux && current_cpu == "x64"),
"MSan currently only works on 64-bit Linux and ChromeOS builds.")
# ASAN build on Windows is not working in debug mode. Intercepting memory
# allocation functions is hard on Windows and not yet implemented in LLVM.
assert(!is_win || !is_debug || !is_asan,
"ASan on Windows doesn't work in debug (set is_debug=false).")

Просмотреть файл

@ -0,0 +1,91 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This header file defines the "sysroot" variable which is the absolute path
# of the sysroot. If no sysroot applies, the variable will be an empty string.
import("//build/config/chrome_build.gni")
declare_args() {
# The absolute path of the sysroot that is applied when compiling using
# the target toolchain.
target_sysroot = ""
# The absolute path to directory containing linux sysroot images
target_sysroot_dir = "//build/linux"
use_sysroot = current_cpu != "s390x" && current_cpu != "s390" &&
current_cpu != "ppc64" && current_cpu != "ppc"
}
if (current_os == target_os && current_cpu == target_cpu &&
target_sysroot != "") {
sysroot = target_sysroot
} else if (is_android) {
import("//build/config/android/config.gni")
if (current_cpu == "x86") {
sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
} else if (current_cpu == "arm") {
sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
} else if (current_cpu == "mipsel") {
sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
} else if (current_cpu == "x64") {
sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
} else if (current_cpu == "arm64") {
sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
} else if (current_cpu == "mips64el") {
sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
} else {
assert(false, "No android sysroot for cpu: $target_cpu")
}
} else if (is_linux && use_sysroot) {
# By default build against a sysroot image downloaded from Cloud Storage
# during gclient runhooks.
if (current_cpu == "x64") {
sysroot = "$target_sysroot_dir/debian_jessie_amd64-sysroot"
} else if (current_cpu == "x86") {
sysroot = "$target_sysroot_dir/debian_jessie_i386-sysroot"
} else if (current_cpu == "mipsel") {
sysroot = "$target_sysroot_dir/debian_jessie_mips-sysroot"
} else if (current_cpu == "arm") {
sysroot = "$target_sysroot_dir/debian_jessie_arm-sysroot"
} else if (current_cpu == "arm64") {
sysroot = "$target_sysroot_dir/debian_jessie_arm64-sysroot"
} else {
assert(false, "No linux sysroot for cpu: $target_cpu")
}
if (sysroot != "") {
_script_arch = current_cpu
if (_script_arch == "x86") {
_script_arch = "i386"
} else if (_script_arch == "x64") {
_script_arch = "amd64"
}
assert(
exec_script("//build/dir_exists.py",
[ rebase_path(sysroot) ],
"string") == "True",
"Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch")
}
} else if (is_mac) {
import("//build/config/mac/mac_sdk.gni")
sysroot = mac_sdk_path
} else if (is_ios) {
import("//build/config/ios/ios_sdk.gni")
sysroot = ios_sdk_path
} else if (is_fuchsia) {
import("//build/config/fuchsia/config.gni")
if (current_cpu == "arm64") {
sysroot = fuchsia_sdk + "/sysroot/aarch64-fuchsia"
} else if (current_cpu == "x64") {
sysroot = fuchsia_sdk + "/sysroot/x86_64-fuchsia"
} else {
sysroot = ""
}
sysroot_stamp = rebase_path("$sysroot/.stamp")
sysroot_version = read_file(sysroot_stamp, "trim string")
} else {
sysroot = ""
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше