gyp: update to 78b26f7
authorTimothy J Fontaine <tjfontaine@gmail.com>
Thu, 12 Dec 2013 04:06:11 +0000 (20:06 -0800)
committerTimothy J Fontaine <tjfontaine@gmail.com>
Thu, 12 Dec 2013 04:06:11 +0000 (20:06 -0800)
16 files changed:
tools/gyp/PRESUBMIT.py
tools/gyp/buildbot/buildbot_run.py
tools/gyp/gyp
tools/gyp/gyptest.py
tools/gyp/pylib/gyp/MSVSSettings.py
tools/gyp/pylib/gyp/MSVSVersion.py
tools/gyp/pylib/gyp/generator/android.py
tools/gyp/pylib/gyp/generator/cmake.py [new file with mode: 0644]
tools/gyp/pylib/gyp/generator/make.py
tools/gyp/pylib/gyp/generator/msvs.py
tools/gyp/pylib/gyp/generator/ninja.py
tools/gyp/pylib/gyp/input.py
tools/gyp/pylib/gyp/mac_tool.py
tools/gyp/pylib/gyp/msvs_emulation.py
tools/gyp/pylib/gyp/win_tool.py
tools/gyp/pylib/gyp/xcode_emulation.py

index 5567b88..9c474eb 100644 (file)
@@ -97,14 +97,19 @@ def CheckChangeOnCommit(input_api, output_api):
       'http://gyp-status.appspot.com/status',
       'http://gyp-status.appspot.com/current'))
 
+  import os
   import sys
   old_sys_path = sys.path
   try:
     sys.path = ['pylib', 'test/lib'] + sys.path
+    blacklist = PYLINT_BLACKLIST
+    if sys.platform == 'win32':
+      blacklist = [os.path.normpath(x).replace('\\', '\\\\')
+                   for x in PYLINT_BLACKLIST]
     report.extend(input_api.canned_checks.RunPylint(
         input_api,
         output_api,
-        black_list=PYLINT_BLACKLIST,
+        black_list=blacklist,
         disabled_warnings=PYLINT_DISABLED_WARNINGS))
   finally:
     sys.path = old_sys_path
index 398eb87..979073c 100755 (executable)
@@ -23,6 +23,8 @@ BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
 TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
 ROOT_DIR = os.path.dirname(TRUNK_DIR)
 ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
+CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
+CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
 OUT_DIR = os.path.join(TRUNK_DIR, 'out')
 
 
@@ -34,6 +36,43 @@ def CallSubProcess(*args, **kwargs):
     sys.exit(1)
 
 
+def PrepareCmake():
+  """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
+  if os.environ['BUILDBOT_CLOBBER'] == '1':
+    print '@@@BUILD_STEP Clobber CMake checkout@@@'
+    shutil.rmtree(CMAKE_DIR)
+
+  # We always build CMake 2.8.8, so no need to do anything
+  # if the directory already exists.
+  if os.path.isdir(CMAKE_DIR):
+    return
+
+  print '@@@BUILD_STEP Initialize CMake checkout@@@'
+  os.mkdir(CMAKE_DIR)
+  CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
+  CallSubProcess(['git', 'config', '--global',
+                  'user.email', 'chrome-bot@google.com'])
+  CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
+
+  print '@@@BUILD_STEP Sync CMake@@@'
+  CallSubProcess(
+      ['git', 'clone',
+       '--depth', '1',
+       '--single-branch',
+       '--branch', 'v2.8.8',
+       '--',
+       'git://cmake.org/cmake.git',
+       CMAKE_DIR],
+      cwd=CMAKE_DIR)
+
+  print '@@@BUILD_STEP Build CMake@@@'
+  CallSubProcess(
+      ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
+      cwd=CMAKE_DIR)
+
+  CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
+
+
 def PrepareAndroidTree():
   """Prepare an Android tree to run 'android' format tests."""
   if os.environ['BUILDBOT_CLOBBER'] == '1':
@@ -91,6 +130,7 @@ def GypTestFormat(title, format=None, msvs_version=None):
        '--all',
        '--passed',
        '--format', format,
+       '--path', CMAKE_BIN_DIR,
        '--chdir', 'trunk'])
   if format == 'android':
     # gyptest needs the environment setup from envsetup/lunch in order to build
@@ -124,6 +164,8 @@ def GypBuild():
   elif sys.platform.startswith('linux'):
     retcode += GypTestFormat('ninja')
     retcode += GypTestFormat('make')
+    PrepareCmake()
+    retcode += GypTestFormat('cmake')
   elif sys.platform == 'darwin':
     retcode += GypTestFormat('ninja')
     retcode += GypTestFormat('xcode')
index a157f34..b53a6dd 100755 (executable)
@@ -3,5 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-BASE=`dirname $0`
-python $BASE/gyp_main.py "$@"
+set -e
+base=$(dirname "$0")
+exec python "${base}/gyp_main.py" "$@"
index a80dfbf..8f3ee0f 100755 (executable)
@@ -176,7 +176,7 @@ def main(argv=None):
   if opts.path:
     extra_path = [os.path.abspath(p) for p in opts.path]
     extra_path = os.pathsep.join(extra_path)
-    os.environ['PATH'] += os.pathsep + extra_path
+    os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
 
   if not args:
     if not opts.all:
index e8be386..0c9532d 100644 (file)
@@ -812,6 +812,8 @@ _Same(_link, 'UACExecutionLevel',
       _Enumeration(['AsInvoker',  # /level='asInvoker'
                     'HighestAvailable',  # /level='highestAvailable'
                     'RequireAdministrator']))  # /level='requireAdministrator'
+_Same(_link, 'MinimumRequiredVersion', _string)
+_Same(_link, 'TreatLinkerWarningAsErrors', _boolean)  # /WX
 
 
 # Options found in MSVS that have been renamed in MSBuild.
@@ -850,8 +852,6 @@ _MSBuildOnly(_link, 'LinkStatus', _boolean)  # /LTCG:STATUS
 _MSBuildOnly(_link, 'PreventDllBinding', _boolean)  # /ALLOWBIND
 _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean)  # /DELAY:NOBIND
 _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean)  # /WX
-_MSBuildOnly(_link, 'MinimumRequiredVersion', _string)
 _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name)  # /STUB Visible='false'
 _MSBuildOnly(_link, 'SectionAlignment', _integer)  # /ALIGN
 _MSBuildOnly(_link, 'SpecifySectionAttributes', _string)  # /SECTION
@@ -985,6 +985,7 @@ _Same(_lib, 'OutputFile', _file_name)  # /OUT
 _Same(_lib, 'SuppressStartupBanner', _boolean)  # /NOLOGO
 _Same(_lib, 'UseUnicodeResponseFiles', _boolean)
 _Same(_lib, 'LinkTimeCodeGeneration', _boolean)  # /LTCG
+_Same(_lib, 'TargetMachine', _target_machine_enumeration)
 
 # TODO(jeanluc) _link defines the same value that gets moved to
 # ProjectReference.  We may want to validate that they are consistent.
@@ -1003,7 +1004,6 @@ _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
 _MSBuildOnly(_lib, 'Name', _file_name)  # /NAME
 _MSBuildOnly(_lib, 'RemoveObjects', _file_list)  # /REMOVE
 _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
-_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration)
 _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
 _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean)  # /WX
 _MSBuildOnly(_lib, 'Verbose', _boolean)
index 956fa01..bb30a7b 100644 (file)
@@ -10,6 +10,7 @@ import re
 import subprocess
 import sys
 import gyp
+import glob
 
 
 class VisualStudioVersion(object):
@@ -339,13 +340,13 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
       path = _ConvertToCygpath(path)
       # Check for full.
       full_path = os.path.join(path, 'devenv.exe')
-      express_path = os.path.join(path, 'vcexpress.exe')
+      express_path = os.path.join(path, '*express.exe')
       if not force_express and os.path.exists(full_path):
         # Add this one.
         versions.append(_CreateVersion(version_to_year[version],
             os.path.join(path, '..', '..')))
       # Check for express.
-      elif os.path.exists(express_path):
+      elif glob.glob(express_path):
         # Add this one.
         versions.append(_CreateVersion(version_to_year[version] + 'e',
             os.path.join(path, '..', '..')))
index 9476a1d..63036bb 100644 (file)
@@ -452,7 +452,7 @@ class AndroidMkWriter(object):
                      (output, path))
         self.WriteLn('\t@echo Copying: $@')
         self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
-        self.WriteLn('\t$(hide) $(ACP) -r $< $@')
+        self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
         self.WriteLn()
         outputs.append(output)
     self.WriteLn('%s = %s' % (variable,
diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py
new file mode 100644 (file)
index 0000000..1611899
--- /dev/null
@@ -0,0 +1,1150 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cmake output module
+
+This module is under development and should be considered experimental.
+
+This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
+created for each configuration.
+
+This module's original purpose was to support editing in IDEs like KDevelop
+which use CMake for project management. It is also possible to use CMake to
+generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
+will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
+but build using CMake. As a result QtCreator editor is unaware of compiler
+defines. The generated CMakeLists.txt can also be used to build on Linux. There
+is currently no support for building on platforms other than Linux.
+
+The generated CMakeLists.txt should properly compile all projects. However,
+there is a mismatch between gyp and cmake with regard to linking. All attempts
+are made to work around this, but CMake sometimes sees -Wl,--start-group as a
+library and incorrectly repeats it. As a result the output of this generator
+should not be relied on for building.
+
+When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
+not be able to find the header file directories described in the generated
+CMakeLists.txt file.
+"""
+
+import multiprocessing
+import os
+import signal
+import string
+import subprocess
+import gyp.common
+
+generator_default_variables = {
+  'EXECUTABLE_PREFIX': '',
+  'EXECUTABLE_SUFFIX': '',
+  'STATIC_LIB_PREFIX': 'lib',
+  'STATIC_LIB_SUFFIX': '.a',
+  'SHARED_LIB_PREFIX': 'lib',
+  'SHARED_LIB_SUFFIX': '.so',
+  'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
+  'LIB_DIR': '${obj}.${TOOLSET}',
+  'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
+  'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
+  'PRODUCT_DIR': '${builddir}',
+  'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
+  'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
+  'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
+  'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
+  'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
+  'CONFIGURATION_NAME': '${configuration}',
+}
+
+FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
+
+generator_supports_multiple_toolsets = True
+generator_wants_static_library_dependencies_adjusted = True
+
+COMPILABLE_EXTENSIONS = {
+  '.c': 'cc',
+  '.cc': 'cxx',
+  '.cpp': 'cxx',
+  '.cxx': 'cxx',
+  '.s': 's', # cc
+  '.S': 's', # cc
+}
+
+
+def RemovePrefix(a, prefix):
+  """Returns 'a' without 'prefix' if it starts with 'prefix'."""
+  return a[len(prefix):] if a.startswith(prefix) else a
+
+
+def CalculateVariables(default_variables, params):
+  """Calculate additional variables for use in the build (called by gyp)."""
+  default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+
+def Compilable(filename):
+  """Return true if the file is compilable (should be in OBJS)."""
+  return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
+
+
+def Linkable(filename):
+  """Return true if the file is linkable (should be on the link line)."""
+  return filename.endswith('.o')
+
+
+def NormjoinPathForceCMakeSource(base_path, rel_path):
+  """Resolves rel_path against base_path and returns the result.
+
+  If rel_path is an absolute path it is returned unchanged.
+  Otherwise it is resolved against base_path and normalized.
+  If the result is a relative path, it is forced to be relative to the
+  CMakeLists.txt.
+  """
+  if os.path.isabs(rel_path):
+    return rel_path
+  if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
+    return rel_path
+  # TODO: do we need to check base_path for absolute variables as well?
+  return os.path.join('${CMAKE_SOURCE_DIR}',
+                      os.path.normpath(os.path.join(base_path, rel_path)))
+
+
+def NormjoinPath(base_path, rel_path):
+  """Resolves rel_path against base_path and returns the result.
+  TODO: what is this really used for?
+  If rel_path begins with '$' it is returned unchanged.
+  Otherwise it is resolved against base_path if relative, then normalized.
+  """
+  if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
+    return rel_path
+  return os.path.normpath(os.path.join(base_path, rel_path))
+
+
+def EnsureDirectoryExists(path):
+  """Python version of 'mkdir -p'."""
+  dirPath = os.path.dirname(path)
+  if dirPath and not os.path.exists(dirPath):
+    os.makedirs(dirPath)
+
+
+def CMakeStringEscape(a):
+  """Escapes the string 'a' for use inside a CMake string.
+
+  This means escaping
+  '\' otherwise it may be seen as modifying the next character
+  '"' otherwise it will end the string
+  ';' otherwise the string becomes a list
+
+  The following do not need to be escaped
+  '#' when the lexer is in string state, this does not start a comment
+
+  The following are yet unknown
+  '$' generator variables (like ${obj}) must not be escaped,
+      but text $ should be escaped
+      what is wanted is to know which $ come from generator variables
+  """
+  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def SetFileProperty(output, source_name, property_name, values, sep):
+  """Given a set of source file, sets the given property on them."""
+  output.write('set_source_files_properties(')
+  output.write(source_name)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetFilesProperty(output, source_names, property_name, values, sep):
+  """Given a set of source files, sets the given property on them."""
+  output.write('set_source_files_properties(\n')
+  for source_name in source_names:
+    output.write('  ')
+    output.write(source_name)
+    output.write('\n')
+  output.write(' PROPERTIES\n  ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('"\n)\n')
+
+
+def SetTargetProperty(output, target_name, property_name, values, sep=''):
+  """Given a target, sets the given property."""
+  output.write('set_target_properties(')
+  output.write(target_name)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetVariable(output, variable_name, value):
+  """Sets a CMake variable."""
+  output.write('set(')
+  output.write(variable_name)
+  output.write(' "')
+  output.write(CMakeStringEscape(value))
+  output.write('")\n')
+
+
+def SetVariableList(output, variable_name, values):
+  """Sets a CMake variable to a list."""
+  if not values:
+    return SetVariable(output, variable_name, "")
+  if len(values) == 1:
+    return SetVariable(output, variable_name, values[0])
+  output.write('list(APPEND ')
+  output.write(variable_name)
+  output.write('\n  "')
+  output.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
+  output.write('")\n')
+
+
+def UnsetVariable(output, variable_name):
+  """Unsets a CMake variable."""
+  output.write('unset(')
+  output.write(variable_name)
+  output.write(')\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+  if prepend:
+    output.write(prepend)
+  output.write('${')
+  output.write(variable_name)
+  output.write('}')
+
+
+class CMakeTargetType:
+  def __init__(self, command, modifier, property_modifier):
+    self.command = command
+    self.modifier = modifier
+    self.property_modifier = property_modifier
+
+
+cmake_target_type_from_gyp_target_type = {
+  'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
+  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
+  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
+  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
+  'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
+}
+
+
+def StringToCMakeTargetName(a):
+  """Converts the given string 'a' to a valid CMake target name.
+
+  All invalid characters are replaced by '_'.
+  Invalid for cmake: ' ', '/', '(', ')'
+  Invalid for make: ':'
+  Invalid for unknown reasons but cause failures: '.'
+  """
+  return a.translate(string.maketrans(' /():.', '______'))
+
+
+def WriteActions(target_name, actions, extra_sources, extra_deps,
+                 path_to_gyp, output):
+  """Write CMake for the 'actions' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  for action in actions:
+    action_name = StringToCMakeTargetName(action['action_name'])
+    action_target_name = '%s__%s' % (target_name, action_name)
+
+    inputs = action['inputs']
+    inputs_name = action_target_name + '__input'
+    SetVariableList(output, inputs_name,
+        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+
+    outputs = action['outputs']
+    cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
+                     for out in outputs]
+    outputs_name = action_target_name + '__output'
+    SetVariableList(output, outputs_name, cmake_outputs)
+
+    # Build up a list of outputs.
+    # Collect the output dirs we'll need.
+    dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+    if int(action.get('process_outputs_as_sources', False)):
+      extra_sources.extend(zip(cmake_outputs, outputs))
+
+    # add_custom_command
+    output.write('add_custom_command(OUTPUT ')
+    WriteVariable(output, outputs_name)
+    output.write('\n')
+
+    if len(dirs) > 0:
+      for directory in dirs:
+        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
+        output.write(directory)
+        output.write('\n')
+
+    output.write('  COMMAND ')
+    output.write(gyp.common.EncodePOSIXShellList(action['action']))
+    output.write('\n')
+
+    output.write('  DEPENDS ')
+    WriteVariable(output, inputs_name)
+    output.write('\n')
+
+    output.write('  WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+    output.write(path_to_gyp)
+    output.write('\n')
+
+    output.write('  COMMENT ')
+    if 'message' in action:
+      output.write(action['message'])
+    else:
+      output.write(action_target_name)
+    output.write('\n')
+
+    output.write('  VERBATIM\n')
+    output.write(')\n')
+
+    # add_custom_target
+    output.write('add_custom_target(')
+    output.write(action_target_name)
+    output.write('\n  DEPENDS ')
+    WriteVariable(output, outputs_name)
+    output.write('\n  SOURCES ')
+    WriteVariable(output, inputs_name)
+    output.write('\n)\n')
+
+    extra_deps.append(action_target_name)
+
+
+def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
+  if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
+    if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
+      return rel_path
+  return NormjoinPathForceCMakeSource(base_path, rel_path)
+
+
+def WriteRules(target_name, rules, extra_sources, extra_deps,
+               path_to_gyp, output):
+  """Write CMake for the 'rules' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  for rule in rules:
+    rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
+
+    inputs = rule.get('inputs', [])
+    inputs_name = rule_name + '__input'
+    SetVariableList(output, inputs_name,
+        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+    outputs = rule['outputs']
+    var_outputs = []
+
+    for count, rule_source in enumerate(rule.get('rule_sources', [])):
+      action_name = rule_name + '_' + str(count)
+
+      rule_source_dirname, rule_source_basename = os.path.split(rule_source)
+      rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
+
+      SetVariable(output, 'RULE_INPUT_PATH', rule_source)
+      SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
+      SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
+      SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
+      SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
+
+      # Build up a list of outputs.
+      # Collect the output dirs we'll need.
+      dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+      # Create variables for the output, as 'local' variable will be unset.
+      these_outputs = []
+      for output_index, out in enumerate(outputs):
+        output_name = action_name + '_' + str(output_index)
+        SetVariable(output, output_name,
+                     NormjoinRulePathForceCMakeSource(path_to_gyp, out,
+                                                      rule_source))
+        if int(rule.get('process_outputs_as_sources', False)):
+          extra_sources.append(('${' + output_name + '}', out))
+        these_outputs.append('${' + output_name + '}')
+        var_outputs.append('${' + output_name + '}')
+
+      # add_custom_command
+      output.write('add_custom_command(OUTPUT\n')
+      for out in these_outputs:
+        output.write('  ')
+        output.write(out)
+        output.write('\n')
+
+      for directory in dirs:
+        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
+        output.write(directory)
+        output.write('\n')
+
+      output.write('  COMMAND ')
+      output.write(gyp.common.EncodePOSIXShellList(rule['action']))
+      output.write('\n')
+
+      output.write('  DEPENDS ')
+      WriteVariable(output, inputs_name)
+      output.write(' ')
+      output.write(NormjoinPath(path_to_gyp, rule_source))
+      output.write('\n')
+
+      # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
+      # The cwd is the current build directory.
+      output.write('  WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+      output.write(path_to_gyp)
+      output.write('\n')
+
+      output.write('  COMMENT ')
+      if 'message' in rule:
+        output.write(rule['message'])
+      else:
+        output.write(action_name)
+      output.write('\n')
+
+      output.write('  VERBATIM\n')
+      output.write(')\n')
+
+      UnsetVariable(output, 'RULE_INPUT_PATH')
+      UnsetVariable(output, 'RULE_INPUT_DIRNAME')
+      UnsetVariable(output, 'RULE_INPUT_NAME')
+      UnsetVariable(output, 'RULE_INPUT_ROOT')
+      UnsetVariable(output, 'RULE_INPUT_EXT')
+
+    # add_custom_target
+    output.write('add_custom_target(')
+    output.write(rule_name)
+    output.write(' DEPENDS\n')
+    for out in var_outputs:
+      output.write('  ')
+      output.write(out)
+      output.write('\n')
+    output.write('SOURCES ')
+    WriteVariable(output, inputs_name)
+    output.write('\n')
+    for rule_source in rule.get('rule_sources', []):
+      output.write('  ')
+      output.write(NormjoinPath(path_to_gyp, rule_source))
+      output.write('\n')
+    output.write(')\n')
+
+    extra_deps.append(rule_name)
+
+
+def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
+  """Write CMake for the 'copies' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  copy_name = target_name + '__copies'
+
+  # CMake gets upset with custom targets with OUTPUT which specify no output.
+  have_copies = any(copy['files'] for copy in copies)
+  if not have_copies:
+    output.write('add_custom_target(')
+    output.write(copy_name)
+    output.write(')\n')
+    extra_deps.append(copy_name)
+    return
+
+  class Copy:
+    def __init__(self, ext, command):
+      self.cmake_inputs = []
+      self.cmake_outputs = []
+      self.gyp_inputs = []
+      self.gyp_outputs = []
+      self.ext = ext
+      self.inputs_name = None
+      self.outputs_name = None
+      self.command = command
+
+  file_copy = Copy('', 'copy')
+  dir_copy = Copy('_dirs', 'copy_directory')
+
+  for copy in copies:
+    files = copy['files']
+    destination = copy['destination']
+    for src in files:
+      path = os.path.normpath(src)
+      basename = os.path.split(path)[1]
+      dst = os.path.join(destination, basename)
+
+      copy = file_copy if os.path.basename(src) else dir_copy
+
+      copy.cmake_inputs.append(NormjoinPath(path_to_gyp, src))
+      copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
+      copy.gyp_inputs.append(src)
+      copy.gyp_outputs.append(dst)
+
+  for copy in (file_copy, dir_copy):
+    if copy.cmake_inputs:
+      copy.inputs_name = copy_name + '__input' + copy.ext
+      SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
+
+      copy.outputs_name = copy_name + '__output' + copy.ext
+      SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
+
+  # add_custom_command
+  output.write('add_custom_command(\n')
+
+  output.write('OUTPUT')
+  for copy in (file_copy, dir_copy):
+    if copy.outputs_name:
+      WriteVariable(output, copy.outputs_name, ' ')
+  output.write('\n')
+
+  for copy in (file_copy, dir_copy):
+    for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
+      # 'cmake -E copy src dst' will create the 'dst' directory if needed.
+      output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
+      output.write(src)
+      output.write(' ')
+      output.write(dst)
+      output.write("\n")
+
+  output.write('DEPENDS')
+  for copy in (file_copy, dir_copy):
+    if copy.inputs_name:
+      WriteVariable(output, copy.inputs_name, ' ')
+  output.write('\n')
+
+  output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+  output.write(path_to_gyp)
+  output.write('\n')
+
+  output.write('COMMENT Copying for ')
+  output.write(target_name)
+  output.write('\n')
+
+  output.write('VERBATIM\n')
+  output.write(')\n')
+
+  # add_custom_target
+  output.write('add_custom_target(')
+  output.write(copy_name)
+  output.write('\n  DEPENDS')
+  for copy in (file_copy, dir_copy):
+    if copy.outputs_name:
+      WriteVariable(output, copy.outputs_name, ' ')
+  output.write('\n  SOURCES')
+  if file_copy.inputs_name:
+    WriteVariable(output, file_copy.inputs_name, ' ')
+  output.write('\n)\n')
+
+  extra_deps.append(copy_name)
+
+
+def CreateCMakeTargetBaseName(qualified_target):
+  """This is the name we would like the target to have."""
+  _, gyp_target_name, gyp_target_toolset = (
+      gyp.common.ParseQualifiedTarget(qualified_target))
+  cmake_target_base_name = gyp_target_name
+  if gyp_target_toolset and gyp_target_toolset != 'target':
+    cmake_target_base_name += '_' + gyp_target_toolset
+  return StringToCMakeTargetName(cmake_target_base_name)
+
+
+def CreateCMakeTargetFullName(qualified_target):
+  """An unambiguous name for the target."""
+  gyp_file, gyp_target_name, gyp_target_toolset = (
+      gyp.common.ParseQualifiedTarget(qualified_target))
+  cmake_target_full_name = gyp_file + ':' + gyp_target_name
+  if gyp_target_toolset and gyp_target_toolset != 'target':
+    cmake_target_full_name += '_' + gyp_target_toolset
+  return StringToCMakeTargetName(cmake_target_full_name)
+
+
+class CMakeNamer(object):
+  """Converts Gyp target names into CMake target names.
+
+  CMake requires that target names be globally unique. One way to ensure
+  this is to fully qualify the names of the targets. Unfortunatly, this
+  ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+  of just "chrome". If this generator were only interested in building, it
+  would be possible to fully qualify all target names, then create
+  unqualified target names which depend on all qualified targets which
+  should have had that name. This is more or less what the 'make' generator
+  does with aliases. However, one goal of this generator is to create CMake
+  files for use with IDEs, and fully qualified names are not as user
+  friendly.
+
+  Since target name collision is rare, we do the above only when required.
+
+  Toolset variants are always qualified from the base, as this is required for
+  building. However, it also makes sense for an IDE, as it is possible for
+  defines to be different.
+  """
+  def __init__(self, target_list):
+    self.cmake_target_base_names_conficting = set()
+
+    cmake_target_base_names_seen = set()
+    for qualified_target in target_list:
+      cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
+
+      if cmake_target_base_name not in cmake_target_base_names_seen:
+        cmake_target_base_names_seen.add(cmake_target_base_name)
+      else:
+        self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+
+  def CreateCMakeTargetName(self, qualified_target):
+    base_name = CreateCMakeTargetBaseName(qualified_target)
+    if base_name in self.cmake_target_base_names_conficting:
+      return CreateCMakeTargetFullName(qualified_target)
+    return base_name
+
+
+def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+                options, generator_flags, all_qualified_targets, output):
+
+  # The make generator does this always.
+  # TODO: It would be nice to be able to tell CMake all dependencies.
+  circular_libs = generator_flags.get('circular', True)
+
+  if not generator_flags.get('standalone', False):
+    output.write('\n#')
+    output.write(qualified_target)
+    output.write('\n')
+
+  gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+  rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
+  rel_gyp_dir = os.path.dirname(rel_gyp_file)
+
+  # Relative path from build dir to top dir.
+  build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+  # Relative path from build dir to gyp dir.
+  build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
+
+  path_from_cmakelists_to_gyp = build_to_gyp
+
+  spec = target_dicts.get(qualified_target, {})
+  config = spec.get('configurations', {}).get(config_to_use, {})
+
+  target_name = spec.get('target_name', '<missing target name>')
+  target_type = spec.get('type', '<missing target type>')
+  target_toolset = spec.get('toolset')
+
+  SetVariable(output, 'TARGET', target_name)
+  SetVariable(output, 'TOOLSET', target_toolset)
+
+  cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
+
+  extra_sources = []
+  extra_deps = []
+
+  # Actions must come first, since they can generate more OBJs for use below.
+  if 'actions' in spec:
+    WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
+                 path_from_cmakelists_to_gyp, output)
+
+  # Rules must be early like actions.
+  if 'rules' in spec:
+    WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
+               path_from_cmakelists_to_gyp, output)
+
+  # Copies
+  if 'copies' in spec:
+    WriteCopies(cmake_target_name, spec['copies'], extra_deps,
+                path_from_cmakelists_to_gyp, output)
+
+  # Target and sources
+  srcs = spec.get('sources', [])
+
+  # Gyp separates the sheep from the goats based on file extensions.
+  def partition(l, p):
+    return reduce(lambda x, e: x[not p(e)].append(e) or x, l, ([], []))
+  compilable_srcs, other_srcs = partition(srcs, Compilable)
+
+  # CMake gets upset when executable targets provide no sources.
+  if target_type == 'executable' and not compilable_srcs and not extra_sources:
+    print ('Executable %s has no complilable sources, treating as "none".' %
+                       target_name                                         )
+    target_type = 'none'
+
+  cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+  if cmake_target_type is None:
+    print ('Target %s has unknown target type %s, skipping.' %
+          (        target_name,               target_type  ) )
+    return
+
+  other_srcs_name = None
+  if other_srcs:
+    other_srcs_name = cmake_target_name + '__other_srcs'
+    SetVariableList(output, other_srcs_name,
+        [NormjoinPath(path_from_cmakelists_to_gyp, src) for src in other_srcs])
+
+  # CMake is opposed to setting linker directories and considers the practice
+  # of setting linker directories dangerous. Instead, it favors the use of
+  # find_library and passing absolute paths to target_link_libraries.
+  # However, CMake does provide the command link_directories, which adds
+  # link directories to targets defined after it is called.
+  # As a result, link_directories must come before the target definition.
+  # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
+  library_dirs = config.get('library_dirs')
+  if library_dirs is not None:
+    output.write('link_directories(')
+    for library_dir in library_dirs:
+      output.write(' ')
+      output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
+      output.write('\n')
+    output.write(')\n')
+
+  output.write(cmake_target_type.command)
+  output.write('(')
+  output.write(cmake_target_name)
+
+  if cmake_target_type.modifier is not None:
+    output.write(' ')
+    output.write(cmake_target_type.modifier)
+
+  if other_srcs_name:
+    WriteVariable(output, other_srcs_name, ' ')
+
+  output.write('\n')
+
+  for src in compilable_srcs:
+    output.write('  ')
+    output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
+    output.write('\n')
+  for extra_source in extra_sources:
+    output.write('  ')
+    src, _ = extra_source
+    output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
+    output.write('\n')
+
+  output.write(')\n')
+
+  # Output name and location.
+  if target_type != 'none':
+    # Mark uncompiled sources as uncompiled.
+    if other_srcs_name:
+      output.write('set_source_files_properties(')
+      WriteVariable(output, other_srcs_name, '')
+      output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+
+    # Output directory
+    target_output_directory = spec.get('product_dir')
+    if target_output_directory is None:
+      if target_type in ('executable', 'loadable_module'):
+        target_output_directory = generator_default_variables['PRODUCT_DIR']
+      elif target_type in ('shared_library'):
+        target_output_directory = '${builddir}/lib.${TOOLSET}'
+      elif spec.get('standalone_static_library', False):
+        target_output_directory = generator_default_variables['PRODUCT_DIR']
+      else:
+        base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
+                                            options.toplevel_dir)
+        target_output_directory = '${obj}.${TOOLSET}'
+        target_output_directory = (
+            os.path.join(target_output_directory, base_path))
+
+    cmake_target_output_directory = NormjoinPathForceCMakeSource(
+                                        path_from_cmakelists_to_gyp,
+                                        target_output_directory)
+    SetTargetProperty(output,
+        cmake_target_name,
+        cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
+        cmake_target_output_directory)
+
+    # Output name
+    default_product_prefix = ''
+    default_product_name = target_name
+    default_product_ext = ''
+    if target_type == 'static_library':
+      static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
+      default_product_name = RemovePrefix(default_product_name,
+                                          static_library_prefix)
+      default_product_prefix = static_library_prefix
+      default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
+
+    elif target_type in ('loadable_module', 'shared_library'):
+      shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
+      default_product_name = RemovePrefix(default_product_name,
+                                          shared_library_prefix)
+      default_product_prefix = shared_library_prefix
+      default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
+
+    elif target_type != 'executable':
+      print ('ERROR: What output file should be generated?',
+              'type', target_type, 'target', target_name)
+
+    product_prefix = spec.get('product_prefix', default_product_prefix)
+    product_name = spec.get('product_name', default_product_name)
+    product_ext = spec.get('product_extension')
+    if product_ext:
+      product_ext = '.' + product_ext
+    else:
+      product_ext = default_product_ext
+
+    SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
+    SetTargetProperty(output, cmake_target_name,
+                        cmake_target_type.property_modifier + '_OUTPUT_NAME',
+                        product_name)
+    SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
+
+    # Make the output of this target referenceable as a source.
+    cmake_target_output_basename = product_prefix + product_name + product_ext
+    cmake_target_output = os.path.join(cmake_target_output_directory,
+                                       cmake_target_output_basename)
+    SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
+
+  # Let CMake know if the 'all' target should depend on this target.
+  exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
+                             else 'FALSE')
+  SetTargetProperty(output, cmake_target_name,
+                      'EXCLUDE_FROM_ALL', exclude_from_all)
+  for extra_target_name in extra_deps:
+    SetTargetProperty(output, extra_target_name,
+                        'EXCLUDE_FROM_ALL', exclude_from_all)
+
+  # Includes
+  includes = config.get('include_dirs')
+  if includes:
+    # This (target include directories) is what requires CMake 2.8.8
+    includes_name = cmake_target_name + '__include_dirs'
+    SetVariableList(output, includes_name,
+        [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+         for include in includes])
+    output.write('set_property(TARGET ')
+    output.write(cmake_target_name)
+    output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
+    WriteVariable(output, includes_name, '')
+    output.write(')\n')
+
+  # Defines
+  defines = config.get('defines')
+  if defines is not None:
+    SetTargetProperty(output,
+                        cmake_target_name,
+                        'COMPILE_DEFINITIONS',
+                        defines,
+                        ';')
+
+  # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+  # CMake currently does not have target C and CXX flags.
+  # So, instead of doing...
+
+  # cflags_c = config.get('cflags_c')
+  # if cflags_c is not None:
+  #   SetTargetProperty(output, cmake_target_name,
+  #                       'C_COMPILE_FLAGS', cflags_c, ' ')
+
+  # cflags_cc = config.get('cflags_cc')
+  # if cflags_cc is not None:
+  #   SetTargetProperty(output, cmake_target_name,
+  #                       'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+  # Instead we must...
+  s_sources = []
+  c_sources = []
+  cxx_sources = []
+  for src in srcs:
+    _, ext = os.path.splitext(src)
+    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+    if src_type == 's':
+      s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cc':
+      c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cxx':
+      cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+  for extra_source in extra_sources:
+    src, real_source = extra_source
+    _, ext = os.path.splitext(real_source)
+    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+    if src_type == 's':
+      s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cc':
+      c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cxx':
+      cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+  cflags = config.get('cflags', [])
+  cflags_c = config.get('cflags_c', [])
+  cflags_cxx = config.get('cflags_cc', [])
+  if c_sources and not (s_sources or cxx_sources):
+    flags = []
+    flags.extend(cflags)
+    flags.extend(cflags_c)
+    SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+  elif cxx_sources and not (s_sources or c_sources):
+    flags = []
+    flags.extend(cflags)
+    flags.extend(cflags_cxx)
+    SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+  else:
+    if s_sources and cflags:
+      SetFilesProperty(output, s_sources, 'COMPILE_FLAGS', cflags, ' ')
+
+    if c_sources and (cflags or cflags_c):
+      flags = []
+      flags.extend(cflags)
+      flags.extend(cflags_c)
+      SetFilesProperty(output, c_sources, 'COMPILE_FLAGS', flags, ' ')
+
+    if cxx_sources and (cflags or cflags_cxx):
+      flags = []
+      flags.extend(cflags)
+      flags.extend(cflags_cxx)
+      SetFilesProperty(output, cxx_sources, 'COMPILE_FLAGS', flags, ' ')
+
+  # Have assembly link as c if there are no other files
+  if not c_sources and not cxx_sources and s_sources:
+    SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
+
+  # Linker flags
+  ldflags = config.get('ldflags')
+  if ldflags is not None:
+    SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+
+  # Note on Dependencies and Libraries:
+  # CMake wants to handle link order, resolving the link line up front.
+  # Gyp does not retain or enforce specifying enough information to do so.
+  # So do as other gyp generators and use --start-group and --end-group.
+  # Give CMake as little information as possible so that it doesn't mess it up.
+
+  # Dependencies
+  rawDeps = spec.get('dependencies', [])
+
+  static_deps = []
+  shared_deps = []
+  other_deps = []
+  for rawDep in rawDeps:
+    dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
+    dep_spec = target_dicts.get(rawDep, {})
+    dep_target_type = dep_spec.get('type', None)
+
+    if dep_target_type == 'static_library':
+      static_deps.append(dep_cmake_name)
+    elif dep_target_type ==  'shared_library':
+      shared_deps.append(dep_cmake_name)
+    else:
+      other_deps.append(dep_cmake_name)
+
+  # ensure all external dependencies are complete before internal dependencies
+  # extra_deps currently only depend on their own deps, so otherwise run early
+  if static_deps or shared_deps or other_deps:
+    for extra_dep in extra_deps:
+      output.write('add_dependencies(')
+      output.write(extra_dep)
+      output.write('\n')
+      for deps in (static_deps, shared_deps, other_deps):
+        for dep in gyp.common.uniquer(deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+      output.write(')\n')
+
+  linkable = target_type in ('executable', 'loadable_module', 'shared_library')
+  other_deps.extend(extra_deps)
+  if other_deps or (not linkable and (static_deps or shared_deps)):
+    output.write('add_dependencies(')
+    output.write(cmake_target_name)
+    output.write('\n')
+    for dep in gyp.common.uniquer(other_deps):
+      output.write('  ')
+      output.write(dep)
+      output.write('\n')
+    if not linkable:
+      for deps in (static_deps, shared_deps):
+        for lib_dep in gyp.common.uniquer(deps):
+          output.write('  ')
+          output.write(lib_dep)
+          output.write('\n')
+    output.write(')\n')
+
+  # Libraries
+  if linkable:
+    external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
+    if external_libs or static_deps or shared_deps:
+      output.write('target_link_libraries(')
+      output.write(cmake_target_name)
+      output.write('\n')
+      if static_deps:
+        write_group = circular_libs and len(static_deps) > 1
+        if write_group:
+          output.write('-Wl,--start-group\n')
+        for dep in gyp.common.uniquer(static_deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+        if write_group:
+          output.write('-Wl,--end-group\n')
+      if shared_deps:
+        for dep in gyp.common.uniquer(shared_deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+      if external_libs:
+        for lib in gyp.common.uniquer(external_libs):
+          output.write('  ')
+          output.write(lib)
+          output.write('\n')
+
+      output.write(')\n')
+
+  UnsetVariable(output, 'TOOLSET')
+  UnsetVariable(output, 'TARGET')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data,
+                            params, config_to_use):
+  options = params['options']
+  generator_flags = params['generator_flags']
+
+  # generator_dir: relative path from pwd to where make puts build files.
+  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+  # Each Gyp configuration creates a different CMakeLists.txt file
+  # to avoid incompatibilities between Gyp and CMake configurations.
+  generator_dir = os.path.relpath(options.generator_output or '.')
+
+  # output_dir: relative path from generator_dir to the build directory.
+  output_dir = generator_flags.get('output_dir', 'out')
+
+  # build_dir: relative path from source root to our output files.
+  # e.g. "out/Debug"
+  build_dir = os.path.normpath(os.path.join(generator_dir,
+                                            output_dir,
+                                            config_to_use))
+
+  toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+  output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
+  EnsureDirectoryExists(output_file)
+
+  output = open(output_file, 'w')
+  output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  output.write('cmake_policy(VERSION 2.8.8)\n')
+
+  _, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+  output.write('project(')
+  output.write(project_target)
+  output.write(')\n')
+
+  SetVariable(output, 'configuration', config_to_use)
+
+  # The following appears to be as-yet undocumented.
+  # http://public.kitware.com/Bug/view.php?id=8392
+  output.write('enable_language(ASM)\n')
+  # ASM-ATT does not support .S files.
+  # output.write('enable_language(ASM-ATT)\n')
+
+  SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
+  SetVariable(output, 'obj', '${builddir}/obj')
+  output.write('\n')
+
+  # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
+  # CMake by default names the object resulting from foo.c to be foo.c.o.
+  # Gyp traditionally names the object resulting from foo.c foo.o.
+  # This should be irrelevant, but some targets extract .o files from .a
+  # and depend on the name of the extracted .o files.
+  output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
+  output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
+  output.write('\n')
+
+  namer = CMakeNamer(target_list)
+
+  # The list of targets upon which the 'all' target should depend.
+  # CMake has it's own implicit 'all' target, one is not created explicitly.
+  all_qualified_targets = set()
+  for build_file in params['build_files']:
+    for qualified_target in gyp.common.AllTargets(target_list,
+                                                  target_dicts,
+                                                  os.path.normpath(build_file)):
+      all_qualified_targets.add(qualified_target)
+
+  for qualified_target in target_list:
+    WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+                options, generator_flags, all_qualified_targets, output)
+
+  output.close()
+
+
+def PerformBuild(data, configurations, params):
+  options = params['options']
+  generator_flags = params['generator_flags']
+
+  # generator_dir: relative path from pwd to where make puts build files.
+  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+  generator_dir = os.path.relpath(options.generator_output or '.')
+
+  # output_dir: relative path from generator_dir to the build directory.
+  output_dir = generator_flags.get('output_dir', 'out')
+
+  for config_name in configurations:
+    # build_dir: relative path from source root to our output files.
+    # e.g. "out/Debug"
+    build_dir = os.path.normpath(os.path.join(generator_dir,
+                                              output_dir,
+                                              config_name))
+    arguments = ['cmake', '-G', 'Ninja']
+    print 'Generating [%s]: %s' % (config_name, arguments)
+    subprocess.check_call(arguments, cwd=build_dir)
+
+    arguments = ['ninja', '-C', build_dir]
+    print 'Building [%s]: %s' % (config_name, arguments)
+    subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+  # Ignore the interrupt signal so that the parent process catches it and
+  # kills all multiprocessing children.
+  signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+  target_list, target_dicts, data, params, config_name = arglist
+  GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+  user_config = params.get('generator_flags', {}).get('config', None)
+  if user_config:
+    GenerateOutputForConfig(target_list, target_dicts, data,
+                            params, user_config)
+  else:
+    config_names = target_dicts[target_list[0]]['configurations'].keys()
+    if params['parallel']:
+      try:
+        pool = multiprocessing.Pool(len(config_names))
+        arglists = []
+        for config_name in config_names:
+          arglists.append((target_list, target_dicts, data,
+                           params, config_name))
+          pool.map(CallGenerateOutputForConfig, arglists)
+      except KeyboardInterrupt, e:
+        pool.terminate()
+        raise e
+    else:
+      for config_name in config_names:
+        GenerateOutputForConfig(target_list, target_dicts, data,
+                                params, config_name)
index 7bef217..d407843 100644 (file)
@@ -57,6 +57,7 @@ generator_wants_sorted_dependencies = False
 generator_additional_non_configuration_keys = []
 generator_additional_path_sections = []
 generator_extra_sources_for_rules = []
+generator_filelist_paths = None
 
 
 def CalculateVariables(default_variables, params):
@@ -103,6 +104,18 @@ def CalculateGeneratorInputInfo(params):
     global generator_wants_sorted_dependencies
     generator_wants_sorted_dependencies = True
 
+  output_dir = params['options'].generator_output or \
+               params['options'].toplevel_dir
+  builddir_name = generator_flags.get('output_dir', 'out')
+  qualified_out_dir = os.path.normpath(os.path.join(
+    output_dir, builddir_name, 'gypfiles'))
+
+  global generator_filelist_paths
+  generator_filelist_paths = {
+    'toplevel': params['options'].toplevel_dir,
+    'qualified_out_dir': qualified_out_dir,
+  }
+
 
 def ensure_directory_exists(path):
   dir = os.path.dirname(path)
index 2f2c3cf..c48a953 100644 (file)
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import collections
 import copy
 import ntpath
 import os
@@ -86,6 +87,46 @@ cached_username = None
 cached_domain = None
 
 
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+  def __init__(self, iterable=None):
+    self.end = end = []
+    end += [None, end, end]         # sentinel node for doubly linked list
+    self.map = {}                   # key --> [key, prev, next]
+    if iterable is not None:
+      self |= iterable
+
+  def __len__(self):
+    return len(self.map)
+
+  def discard(self, key):
+    if key in self.map:
+      key, prev, next = self.map.pop(key)
+      prev[2] = next
+      next[1] = prev
+
+  def __contains__(self, key):
+    return key in self.map
+
+  def add(self, key):
+    if key not in self.map:
+      end = self.end
+      curr = end[1]
+      curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+  def update(self, iterable):
+    for i in iterable:
+      if i not in self:
+        self.add(i)
+
+  def __iter__(self):
+    end = self.end
+    curr = end[2]
+    while curr is not end:
+      yield curr[0]
+      curr = curr[2]
+
+
 # TODO(gspencer): Switch the os.environ calls to be
 # win32api.GetDomainName() and win32api.GetUserName() once the
 # python version in depot_tools has been updated to work on Vista
@@ -179,7 +220,7 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
   if not prefix: prefix = []
   result = []
   excluded_result = []
-  folders = dict()
+  folders = collections.OrderedDict()
   # Gather files into the final result, excluded, or folders.
   for s in sources:
     if len(s) == 1:
@@ -415,13 +456,13 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
         dicts describing the actions attached to that input file.
   """
   for primary_input in actions_dict:
-    inputs = set()
-    outputs = set()
+    inputs = OrderedSet()
+    outputs = OrderedSet()
     descriptions = []
     commands = []
     for action in actions_dict[primary_input]:
-      inputs.update(set(action['inputs']))
-      outputs.update(set(action['outputs']))
+      inputs.update(OrderedSet(action['inputs']))
+      outputs.update(OrderedSet(action['outputs']))
       descriptions.append(action['description'])
       commands.append(action['command'])
     # Add the custom build step for one input file.
@@ -477,8 +518,8 @@ def _RuleInputsAndOutputs(rule, trigger_file):
   """
   raw_inputs = _FixPaths(rule.get('inputs', []))
   raw_outputs = _FixPaths(rule.get('outputs', []))
-  inputs = set()
-  outputs = set()
+  inputs = OrderedSet()
+  outputs = OrderedSet()
   inputs.add(trigger_file)
   for i in raw_inputs:
     inputs.add(_RuleExpandPath(i, trigger_file))
@@ -549,16 +590,16 @@ def _GenerateExternalRules(rules, output_dir, spec,
   mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
   mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
   # Gather stuff needed to emit all: target.
-  all_inputs = set()
-  all_outputs = set()
-  all_output_dirs = set()
+  all_inputs = OrderedSet()
+  all_outputs = OrderedSet()
+  all_output_dirs = OrderedSet()
   first_outputs = []
   for rule in rules:
     trigger_files = _FindRuleTriggerFiles(rule, sources)
     for tf in trigger_files:
       inputs, outputs = _RuleInputsAndOutputs(rule, tf)
-      all_inputs.update(set(inputs))
-      all_outputs.update(set(outputs))
+      all_inputs.update(OrderedSet(inputs))
+      all_outputs.update(OrderedSet(outputs))
       # Only use one target from each rule as the dependency for
       # 'all' so we don't try to build each rule multiple times.
       first_outputs.append(list(outputs)[0])
@@ -799,8 +840,8 @@ def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
       trigger_files = _FindRuleTriggerFiles(rule, sources)
       for trigger_file in trigger_files:
         inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
-        inputs = set(_FixPaths(inputs))
-        outputs = set(_FixPaths(outputs))
+        inputs = OrderedSet(_FixPaths(inputs))
+        outputs = OrderedSet(_FixPaths(outputs))
         inputs.remove(_FixPath(trigger_file))
         sources.update(inputs)
         if not spec.get('msvs_external_builder'):
@@ -817,7 +858,7 @@ def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
   Returns:
     excluded_sources with files that have actions attached removed.
   """
-  must_keep = set(_FixPaths(actions_to_add.keys()))
+  must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
   return [s for s in excluded_sources if s not in must_keep]
 
 
@@ -965,7 +1006,7 @@ def _GetUniquePlatforms(spec):
     The MSVSUserFile object created.
   """
   # Gather list of unique platforms.
-  platforms = set()
+  platforms = OrderedSet()
   for configuration in spec['configurations']:
     platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
   platforms = list(platforms)
@@ -1152,7 +1193,7 @@ def _GetLibraries(spec):
   # in libraries that are assumed to be in the default library path).
   # Also remove duplicate entries, leaving only the last duplicate, while
   # preserving order.
-  found = set()
+  found = OrderedSet()
   unique_libraries_list = []
   for entry in reversed(libraries):
     library = re.sub('^\-l', '', entry)
@@ -1331,8 +1372,7 @@ def _GetMSVSAttributes(spec, config, config_type):
 
 
 def _AddNormalizedSources(sources_set, sources_array):
-  sources = [_NormalizedSource(s) for s in sources_array]
-  sources_set.update(set(sources))
+  sources_set.update(_NormalizedSource(s) for s in sources_array)
 
 
 def _PrepareListOfSources(spec, generator_flags, gyp_file):
@@ -1350,9 +1390,9 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
     A pair of (list of sources, list of excluded sources).
     The sources will be relative to the gyp file.
   """
-  sources = set()
+  sources = OrderedSet()
   _AddNormalizedSources(sources, spec.get('sources', []))
-  excluded_sources = set()
+  excluded_sources = OrderedSet()
   # Add in the gyp file.
   if not generator_flags.get('standalone'):
     sources.add(gyp_file)
@@ -1362,7 +1402,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
     inputs = a['inputs']
     inputs = [_NormalizedSource(i) for i in inputs]
     # Add all inputs to sources and excluded sources.
-    inputs = set(inputs)
+    inputs = OrderedSet(inputs)
     sources.update(inputs)
     if not spec.get('msvs_external_builder'):
       excluded_sources.update(inputs)
@@ -1391,7 +1431,7 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
                path of excluded IDL file)
   """
   # Exclude excluded sources coming into the generator.
-  excluded_sources.update(set(spec.get('sources_excluded', [])))
+  excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
   # Add excluded sources into sources for good measure.
   sources.update(excluded_sources)
   # Convert to proper windows form.
@@ -1412,6 +1452,11 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
   sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
                                              list_excluded=list_excluded)
 
+  # Prune filters with a single child to flatten ugly directory structures
+  # such as ../../src/modules/module1 etc.
+  while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+    sources = sources[0].contents
+
   return sources, excluded_sources, excluded_idl
 
 
@@ -1479,7 +1524,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
 
 def _AddToolFilesToMSVS(p, spec):
   # Add in tool files (rules).
-  tool_files = set()
+  tool_files = OrderedSet()
   for _, config in spec['configurations'].iteritems():
     for f in config.get('msvs_tool_files', []):
       tool_files.add(f)
@@ -3202,16 +3247,16 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
   Returns:
     A pair of (action specification, the sources handled by this action).
   """
-  sources_handled_by_action = set()
+  sources_handled_by_action = OrderedSet()
   actions_spec = []
   for primary_input, actions in actions_to_add.iteritems():
-    inputs = set()
-    outputs = set()
+    inputs = OrderedSet()
+    outputs = OrderedSet()
     descriptions = []
     commands = []
     for action in actions:
-      inputs.update(set(action['inputs']))
-      outputs.update(set(action['outputs']))
+      inputs.update(OrderedSet(action['inputs']))
+      outputs.update(OrderedSet(action['outputs']))
       descriptions.append(action['description'])
       cmd = action['command']
       # For most actions, add 'call' so that actions that invoke batch files
index 23fb9b8..a40c7fe 100644 (file)
@@ -814,15 +814,18 @@ class NinjaWriter:
       cflags_c = self.msvs_settings.GetCflagsC(config_name)
       cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
       extra_defines = self.msvs_settings.GetComputedDefines(config_name)
-      pdbpath = self.msvs_settings.GetCompilerPdbName(
+      # See comment at cc_command for why there's two .pdb files.
+      pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
           config_name, self.ExpandSpecial)
-      if not pdbpath:
+      if not pdbpath_c:
         obj = 'obj'
         if self.toolset != 'target':
           obj += '.' + self.toolset
-        pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
-                                                self.name + '.pdb'))
-      self.WriteVariableList(ninja_file, 'pdbname', [pdbpath])
+        pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
+        pdbpath_c = pdbpath + '.c.pdb'
+        pdbpath_cc = pdbpath + '.cc.pdb'
+      self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
+      self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
       self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
     else:
       cflags = config.get('cflags', [])
@@ -1578,18 +1581,24 @@ def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental):
 def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
   """Adds link rules for Windows platform to |master_ninja|."""
   def FullLinkCommand(ldcmd, out, binary_type):
-    cmd = ('cmd /c %(ldcmd)s'
-           ' && %(python)s gyp-win-tool manifest-wrapper $arch'
-           ' cmd /c if exist %(out)s.manifest del %(out)s.manifest'
-           ' && %(python)s gyp-win-tool manifest-wrapper $arch'
-           ' $mt -nologo -manifest $manifests')
+    """Returns a one-liner written for cmd.exe to handle multiphase linker
+    operations including manifest file generation. The command will be
+    structured as follows:
+      cmd /c (linkcmd1 a b) && (linkcmd2 x y) && ... &&
+      if not "$manifests"=="" ((manifestcmd1 a b) && (manifestcmd2 x y) && ... )
+    Note that $manifests becomes empty when no manifest file is generated."""
+    link_commands = ['%(ldcmd)s',
+                     'if exist %(out)s.manifest del %(out)s.manifest']
+    mt_cmd = ('%(python)s gyp-win-tool manifest-wrapper'
+              ' $arch $mt -nologo -manifest $manifests')
     if embed_manifest and not link_incremental:
       # Embed manifest into a binary. If incremental linking is enabled,
       # embedding is postponed to the re-linking stage (see below).
-      cmd += ' -outputresource:%(out)s;%(resname)s'
+      mt_cmd += ' -outputresource:%(out)s;%(resname)s'
     else:
       # Save manifest as an external file.
-      cmd += ' -out:%(out)s.manifest'
+      mt_cmd += ' -out:%(out)s.manifest'
+    manifest_commands = [mt_cmd]
     if link_incremental:
       # There is no point in generating separate rule for the case when
       # incremental linking is enabled, but manifest embedding is disabled.
@@ -1597,11 +1606,14 @@ def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
       # See also implementation of _GetWinLinkRuleNameSuffix().
       assert embed_manifest
       # Make .rc file out of manifest, compile it to .res file and re-link.
-      cmd += (' && %(python)s gyp-win-tool manifest-to-rc $arch'
-              ' %(out)s.manifest %(out)s.manifest.rc %(resname)s'
-              ' && %(python)s gyp-win-tool rc-wrapper $arch $rc'
-              ' %(out)s.manifest.rc'
-              ' && %(ldcmd)s %(out)s.manifest.res')
+      manifest_commands += [
+        ('%(python)s gyp-win-tool manifest-to-rc $arch %(out)s.manifest'
+         ' %(out)s.manifest.rc %(resname)s'),
+        '%(python)s gyp-win-tool rc-wrapper $arch $rc %(out)s.manifest.rc',
+        '%(ldcmd)s %(out)s.manifest.res']
+    cmd = 'cmd /c %s && if not "$manifests"=="" (%s)' % (
+      ' && '.join(['(%s)' % c for c in link_commands]),
+      ' && '.join(['(%s)' % c for c in manifest_commands]))
     resource_name = {
       'exe': '1',
       'dll': '2',
@@ -1656,9 +1668,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
 
   toplevel_build = os.path.join(options.toplevel_dir, build_dir)
 
-  master_ninja = ninja_syntax.Writer(
-      OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
-      width=120)
+  master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
+  master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
 
   # Put build-time support tools in out/{config_name}.
   gyp.common.CopyTool(flavor, toplevel_build)
@@ -1679,8 +1690,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
     ld = 'link.exe'
     ld_host = '$ld'
   else:
-    cc = 'gcc'
-    cxx = 'g++'
+    cc = 'cc'
+    cxx = 'c++'
     ld = '$cc'
     ldxx = '$cxx'
     ld_host = '$cc_host'
@@ -1798,14 +1809,20 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
       depfile='$out.d',
       deps=deps)
   else:
+    # TODO(scottmg) Separate pdb names is a test to see if it works around
+    # http://crbug.com/142362. It seems there's a race between the creation of
+    # the .pdb by the precompiled header step for .cc and the compilation of
+    # .c files. This should be handled by mspdbsrv, but rarely errors out with
+    #   c1xx : fatal error C1033: cannot open program database
+    # By making the rules target separate pdb files this might be avoided.
     cc_command = ('ninja -t msvc -e $arch ' +
                   '-- '
                   '$cc /nologo /showIncludes /FC '
-                  '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+                  '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
     cxx_command = ('ninja -t msvc -e $arch ' +
                    '-- '
                    '$cxx /nologo /showIncludes /FC '
-                   '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+                   '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
     master_ninja.rule(
       'cc',
       description='CC $out',
@@ -2098,6 +2115,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
     master_ninja.build('all', 'phony', list(all_outputs))
     master_ninja.default(generator_flags.get('default_target', 'all'))
 
+  master_ninja_file.close()
+
 
 def PerformBuild(data, configurations, params):
   options = params['options']
index 45e791d..9bc449d 100644 (file)
@@ -2278,6 +2278,7 @@ def ProcessListFiltersInDict(name, the_dict):
       continue
 
     if not isinstance(the_dict[list_key], list):
+      value = the_dict[list_key]
       raise ValueError, name + ' key ' + list_key + \
                         ' must be list, not ' + \
                         value.__class__.__name__ + ' when applying ' + \
index 20b3a48..c61a3ef 100755 (executable)
@@ -9,6 +9,8 @@ These functions are executed via gyp-mac-tool when using the Makefile generator.
 """
 
 import fcntl
+import fnmatch
+import glob
 import json
 import os
 import plistlib
@@ -17,6 +19,7 @@ import shutil
 import string
 import subprocess
 import sys
+import tempfile
 
 
 def main(args):
@@ -259,6 +262,249 @@ class MacTool(object):
       os.remove(link)
     os.symlink(dest, link)
 
+  def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+    """Code sign a bundle.
+
+    This function tries to code sign an iOS bundle, following the same
+    algorithm as Xcode:
+      1. copy ResourceRules.plist from the user or the SDK into the bundle,
+      2. pick the provisioning profile that best match the bundle identifier,
+         and copy it into the bundle as embedded.mobileprovision,
+      3. copy Entitlements.plist from user or SDK next to the bundle,
+      4. code sign the bundle.
+    """
+    resource_rules_path = self._InstallResourceRules(resource_rules)
+    substitutions, overrides = self._InstallProvisioningProfile(
+        provisioning, self._GetCFBundleIdentifier())
+    entitlements_path = self._InstallEntitlements(
+        entitlements, substitutions, overrides)
+    subprocess.check_call([
+        'codesign', '--force', '--sign', key, '--resource-rules',
+        resource_rules_path, '--entitlements', entitlements_path,
+        os.path.join(
+            os.environ['TARGET_BUILD_DIR'],
+            os.environ['FULL_PRODUCT_NAME'])])
+
+  def _InstallResourceRules(self, resource_rules):
+    """Installs ResourceRules.plist from user or SDK into the bundle.
+
+    Args:
+      resource_rules: string, optional, path to the ResourceRules.plist file
+        to use, default to "${SDKROOT}/ResourceRules.plist"
+
+    Returns:
+      Path to the copy of ResourceRules.plist into the bundle.
+    """
+    source_path = resource_rules
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['CONTENTS_FOLDER_PATH'],
+        'ResourceRules.plist')
+    if not source_path:
+      source_path = os.path.join(
+          os.environ['SDKROOT'], 'ResourceRules.plist')
+    shutil.copy2(source_path, target_path)
+    return target_path
+
+  def _InstallProvisioningProfile(self, profile, bundle_identifier):
+    """Installs embedded.mobileprovision into the bundle.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple containing two dictionary: variables substitutions and values
+      to overrides when generating the entitlements file.
+    """
+    source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+        profile, bundle_identifier)
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['CONTENTS_FOLDER_PATH'],
+        'embedded.mobileprovision')
+    shutil.copy2(source_path, target_path)
+    substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
+    return substitutions, provisioning_data['Entitlements']
+
+  def _FindProvisioningProfile(self, profile, bundle_identifier):
+    """Finds the .mobileprovision file to use for signing the bundle.
+
+    Checks all the installed provisioning profiles (or if the user specified
+    the PROVISIONING_PROFILE variable, only consult it) and select the most
+    specific that correspond to the bundle identifier.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple of the path to the selected provisioning profile, the data of
+      the embedded plist in the provisioning profile and the team identifier
+      to use for code signing.
+
+    Raises:
+      SystemExit: if no .mobileprovision can be used to sign the bundle.
+    """
+    profiles_dir = os.path.join(
+        os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+    if not os.path.isdir(profiles_dir):
+      print >>sys.stderr, (
+          'cannot find mobile provisioning for %s' % bundle_identifier)
+      sys.exit(1)
+    provisioning_profiles = None
+    if profile:
+      profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
+      if os.path.exists(profile_path):
+        provisioning_profiles = [profile_path]
+    if not provisioning_profiles:
+      provisioning_profiles = glob.glob(
+          os.path.join(profiles_dir, '*.mobileprovision'))
+    valid_provisioning_profiles = {}
+    for profile_path in provisioning_profiles:
+      profile_data = self._LoadProvisioningProfile(profile_path)
+      app_id_pattern = profile_data.get(
+          'Entitlements', {}).get('application-identifier', '')
+      for team_identifier in profile_data.get('TeamIdentifier', []):
+        app_id = '%s.%s' % (team_identifier, bundle_identifier)
+        if fnmatch.fnmatch(app_id, app_id_pattern):
+          valid_provisioning_profiles[app_id_pattern] = (
+              profile_path, profile_data, team_identifier)
+    if not valid_provisioning_profiles:
+      print >>sys.stderr, (
+          'cannot find mobile provisioning for %s' % bundle_identifier)
+      sys.exit(1)
+    # If the user has multiple provisioning profiles installed that can be
+    # used for ${bundle_identifier}, pick the most specific one (ie. the
+    # provisioning profile whose pattern is the longest).
+    selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+    return valid_provisioning_profiles[selected_key]
+
+  def _LoadProvisioningProfile(self, profile_path):
+    """Extracts the plist embedded in a provisioning profile.
+
+    Args:
+      profile_path: string, path to the .mobileprovision file
+
+    Returns:
+      Content of the plist embedded in the provisioning profile as a dictionary.
+    """
+    with tempfile.NamedTemporaryFile() as temp:
+      subprocess.check_call([
+          'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
+      return self._LoadPlistMaybeBinary(temp.name)
+
+  def _LoadPlistMaybeBinary(self, plist_path):
+    """Loads into a memory a plist possibly encoded in binary format.
+
+    This is a wrapper around plistlib.readPlist that tries to convert the
+    plist to the XML format if it can't be parsed (assuming that it is in
+    the binary format).
+
+    Args:
+      plist_path: string, path to a plist file, in XML or binary format
+
+    Returns:
+      Content of the plist as a dictionary.
+    """
+    try:
+      # First, try to read the file using plistlib that only supports XML,
+      # and if an exception is raised, convert a temporary copy to XML and
+      # load that copy.
+      return plistlib.readPlist(plist_path)
+    except:
+      pass
+    with tempfile.NamedTemporaryFile() as temp:
+      shutil.copy2(plist_path, temp.name)
+      subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
+      return plistlib.readPlist(temp.name)
+
+  def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+    """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+    Args:
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+      app_identifier_prefix: string, value for AppIdentifierPrefix
+
+    Returns:
+      Dictionary of substitutions to apply when generating Entitlements.plist.
+    """
+    return {
+      'CFBundleIdentifier': bundle_identifier,
+      'AppIdentifierPrefix': app_identifier_prefix,
+    }
+
+  def _GetCFBundleIdentifier(self):
+    """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+    Returns:
+      Value of CFBundleIdentifier in the Info.plist located in the bundle.
+    """
+    info_plist_path = os.path.join(
+        os.environ['TARGET_BUILD_DIR'],
+        os.environ['INFOPLIST_PATH'])
+    info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+    return info_plist_data['CFBundleIdentifier']
+
+  def _InstallEntitlements(self, entitlements, substitutions, overrides):
+    """Generates and install the ${BundleName}.xcent entitlements file.
+
+    Expands variables "$(variable)" pattern in the source entitlements file,
+    add extra entitlements defined in the .mobileprovision file and the copy
+    the generated plist to "${BundlePath}.xcent".
+
+    Args:
+      entitlements: string, optional, path to the Entitlements.plist template
+        to use, defaults to "${SDKROOT}/Entitlements.plist"
+      substitutions: dictionary, variable substitutions
+      overrides: dictionary, values to add to the entitlements
+
+    Returns:
+      Path to the generated entitlements file.
+    """
+    source_path = entitlements
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['PRODUCT_NAME'] + '.xcent')
+    if not source_path:
+      source_path = os.path.join(
+          os.environ['SDKROOT'],
+          'Entitlements.plist')
+    shutil.copy2(source_path, target_path)
+    data = self._LoadPlistMaybeBinary(target_path)
+    data = self._ExpandVariables(data, substitutions)
+    if overrides:
+      for key in overrides:
+        if key not in data:
+          data[key] = overrides[key]
+    plistlib.writePlist(data, target_path)
+    return target_path
+
+  def _ExpandVariables(self, data, substitutions):
+    """Expands variables "$(variable)" in data.
+
+    Args:
+      data: object, can be either string, list or dictionary
+      substitutions: dictionary, variable substitutions to perform
+
+    Returns:
+      Copy of data where each references to "$(variable)" has been replaced
+      by the corresponding value found in substitutions, or left intact if
+      the key was not found.
+    """
+    if isinstance(data, str):
+      for key, value in substitutions.iteritems():
+        data = data.replace('$(%s)' % key, value)
+      return data
+    if isinstance(data, list):
+      return [self._ExpandVariables(v, substitutions) for v in data]
+    if isinstance(data, dict):
+      return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+    return data
 
 if __name__ == '__main__':
   sys.exit(main(sys.argv[1:]))
index 3ac153d..723201e 100644 (file)
@@ -420,6 +420,7 @@ class MsvsSettings(object):
     libflags.extend(self._GetAdditionalLibraryDirectories(
         'VCLibrarianTool', config, gyp_to_build_path))
     lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
+    lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
     lib('AdditionalOptions')
     return libflags
 
@@ -441,6 +442,17 @@ class MsvsSettings(object):
     if def_file:
       ldflags.append('/DEF:"%s"' % def_file)
 
+  def GetPGDName(self, config, expand_special):
+    """Gets the explicitly overridden pgd name for a target or returns None
+    if it's not overridden."""
+    config = self._TargetConfig(config)
+    output_file = self._Setting(
+        ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
+    if output_file:
+      output_file = expand_special(self.ConvertVSMacros(
+          output_file, config=config))
+    return output_file
+
   def GetLdflags(self, config, gyp_to_build_path, expand_special,
                  manifest_base_name, is_executable):
     """Returns the flags that need to be added to link commands, and the
@@ -455,20 +467,35 @@ class MsvsSettings(object):
     ldflags.extend(self._GetAdditionalLibraryDirectories(
         'VCLinkerTool', config, gyp_to_build_path))
     ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
+    ld('TreatLinkerWarningAsErrors', prefix='/WX',
+       map={'true': '', 'false': ':NO'})
     out = self.GetOutputName(config, expand_special)
     if out:
       ldflags.append('/OUT:' + out)
     pdb = self.GetPDBName(config, expand_special)
     if pdb:
       ldflags.append('/PDB:' + pdb)
+    pgd = self.GetPGDName(config, expand_special)
+    if pgd:
+      ldflags.append('/PGD:' + pgd)
     map_file = self.GetMapFileName(config, expand_special)
     ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
         else '/MAP'})
     ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
     ld('AdditionalOptions', prefix='')
-    ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
+
+    minimum_required_version = self._Setting(
+        ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
+    if minimum_required_version:
+      minimum_required_version = ',' + minimum_required_version
+    ld('SubSystem',
+       map={'1': 'CONSOLE%s' % minimum_required_version,
+            '2': 'WINDOWS%s' % minimum_required_version},
+       prefix='/SUBSYSTEM:')
+
     ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
     ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
+    ld('BaseAddress', prefix='/BASE:')
     ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
     ld('RandomizedBaseAddress',
         map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
@@ -476,7 +503,10 @@ class MsvsSettings(object):
         map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
     ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
     ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
-    ld('LinkTimeCodeGeneration', map={'1': '/LTCG'})
+    ld('LinkTimeCodeGeneration',
+        map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
+             '4': ':PGUPDATE'},
+        prefix='/LTCG')
     ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
     ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
     ld('EntryPointSymbol', prefix='/ENTRY:')
@@ -501,18 +531,26 @@ class MsvsSettings(object):
       ldflags.append('/NXCOMPAT')
 
     have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
-    manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags(
-        config, manifest_base_name, is_executable and not have_def_file)
+    manifest_flags, manifest_files = self._GetLdManifestFlags(
+        config, manifest_base_name, gyp_to_build_path,
+        is_executable and not have_def_file)
     ldflags.extend(manifest_flags)
-    manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path)
-    manifest_files.append(intermediate_manifest_file)
-
     return ldflags, manifest_files
 
-  def _GetLdManifestFlags(self, config, name, allow_isolation):
+  def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
+                          allow_isolation):
     """Returns the set of flags that need to be added to the link to generate
-    a default manifest, as well as the name of the generated file."""
-    # The manifest is generated by default.
+    a default manifest, as well as the list of all the manifest files to be
+    merged by the manifest tool."""
+    generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
+                                      config,
+                                      default='true')
+    if generate_manifest != 'true':
+      # This means not only that the linker should not generate the intermediate
+      # manifest but also that the manifest tool should do nothing even when
+      # additional manifests are specified.
+      return ['/MANIFEST:NO'], []
+
     output_name = name + '.intermediate.manifest'
     flags = [
       '/MANIFEST',
@@ -540,7 +578,11 @@ class MsvsSettings(object):
 
     if allow_isolation:
       flags.append('/ALLOWISOLATION')
-    return flags, output_name
+
+    manifest_files = [output_name]
+    manifest_files += self._GetAdditionalManifestFiles(config,
+                                                       gyp_to_build_path)
+    return flags, manifest_files
 
   def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
     """Gets additional manifest files that are added to the default one
index 3424c01..7f3b0a5 100755 (executable)
@@ -10,12 +10,16 @@ These functions are executed via gyp-win-tool when using the ninja generator.
 """
 
 import os
+import re
 import shutil
 import subprocess
 import sys
 
 BASE_DIR = os.path.dirname(os.path.abspath(__file__))
 
+# A regex matching an argument corresponding to a PDB filename passed as an
+# argument to link.exe.
+_LINK_EXE_PDB_ARG = re.compile('/PDB:(?P<pdb>.+\.exe\.pdb)$', re.IGNORECASE)
 
 def main(args):
   executor = WinTool()
@@ -28,6 +32,35 @@ class WinTool(object):
   """This class performs all the Windows tooling steps. The methods can either
   be executed directly, or dispatched from an argument list."""
 
+  def _MaybeUseSeparateMspdbsrv(self, env, args):
+    """Allows to use a unique instance of mspdbsrv.exe for the linkers linking
+    an .exe target if GYP_USE_SEPARATE_MSPDBSRV has been set."""
+    if not os.environ.get('GYP_USE_SEPARATE_MSPDBSRV'):
+      return
+
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    if args[0] != 'link.exe':
+      return
+
+    # Checks if this linker produces a PDB for an .exe target. If so use the
+    # name of this PDB to generate an endpoint name for mspdbsrv.exe.
+    endpoint_name = None
+    for arg in args:
+      m = _LINK_EXE_PDB_ARG.match(arg)
+      if m:
+        endpoint_name = '%s_%d' % (m.group('pdb'), os.getpid())
+        break
+
+    if endpoint_name is None:
+      return
+
+    # Adds the appropriate environment variable. This will be read by link.exe
+    # to know which instance of mspdbsrv.exe it should connect to (if it's
+    # not set then the default endpoint is used).
+    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
   def Dispatch(self, args):
     """Dispatches a string command to a method."""
     if len(args) < 1:
@@ -71,13 +104,17 @@ class WinTool(object):
     This happens when there are exports from the dll or exe.
     """
     env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                              stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
+    self._MaybeUseSeparateMspdbsrv(env, args)
+    link = subprocess.Popen(args,
+                            shell=True,
+                            env=env,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.STDOUT)
+    out, _ = link.communicate()
     for line in out.splitlines():
       if not line.startswith('   Creating library '):
         print line
-    return popen.returncode
+    return link.returncode
 
   def ExecManifestWrapper(self, arch, *args):
     """Run manifest tool with environment set. Strip out undesirable warning
@@ -168,9 +205,7 @@ class WinTool(object):
     env = self._GetEnv(arch)
     args = open(rspfile).read()
     dir = dir[0] if dir else None
-    popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
-    popen.wait()
-    return popen.returncode
+    return subprocess.call(args, shell=True, env=env, cwd=dir)
 
 if __name__ == '__main__':
   sys.exit(main(sys.argv[1:]))
index f9cec33..520dcc4 100644 (file)
@@ -9,11 +9,13 @@ other build systems, such as make and ninja.
 
 import copy
 import gyp.common
+import os
 import os.path
 import re
 import shlex
 import subprocess
 import sys
+import tempfile
 from gyp.common import GypError
 
 class XcodeSettings(object):
@@ -22,6 +24,7 @@ class XcodeSettings(object):
   # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
   # at class-level for efficiency.
   _sdk_path_cache = {}
+  _sdk_root_cache = {}
 
   # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
   # cached at class-level for efficiency.
@@ -31,6 +34,10 @@ class XcodeSettings(object):
   # cached at class-level for efficiency.
   _codesigning_key_cache = {}
 
+  # Populated lazily by _XcodeVersion.  Shared by all XcodeSettings, so cached
+  # at class-level for efficiency.
+  _xcode_version_cache = ()
+
   def __init__(self, spec):
     self.spec = spec
 
@@ -262,7 +269,7 @@ class XcodeSettings(object):
     """Returns the architectures this target should be built for."""
     # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
     # CURRENT_ARCH / NATIVE_ARCH env vars?
-    return self.xcode_settings[configname].get('ARCHS', ['i386'])
+    return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
 
   def _GetStdout(self, cmdlist):
     job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
@@ -284,9 +291,14 @@ class XcodeSettings(object):
     sdk_root = self._SdkRoot(configname)
     if sdk_root.startswith('/'):
       return sdk_root
+    return self._XcodeSdkPath(sdk_root)
+
+  def _XcodeSdkPath(self, sdk_root):
     if sdk_root not in XcodeSettings._sdk_path_cache:
-      XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
-          sdk_root, 'Path')
+      sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
+      XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
+      if sdk_root:
+        XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
     return XcodeSettings._sdk_path_cache[sdk_root]
 
   def _AppendPlatformVersionMinFlags(self, lst):
@@ -377,7 +389,7 @@ class XcodeSettings(object):
     if arch is not None:
       archs = [arch]
     else:
-      archs = self._Settings().get('ARCHS', ['i386'])
+      archs = self._Settings().get('ARCHS', [self._DefaultArch()])
     if len(archs) != 1:
       # TODO: Supporting fat binaries will be annoying.
       self._WarnUnimplemented('ARCHS')
@@ -630,7 +642,7 @@ class XcodeSettings(object):
     if arch is not None:
       archs = [arch]
     else:
-      archs = self._Settings().get('ARCHS', ['i386'])
+      archs = self._Settings().get('ARCHS', [self._DefaultArch()])
     if len(archs) != 1:
       # TODO: Supporting fat binaries will be annoying.
       self._WarnUnimplemented('ARCHS')
@@ -780,33 +792,38 @@ class XcodeSettings(object):
     if not (self.isIOS and self.spec['type'] == "executable"):
       return []
 
-    identity = self.xcode_settings[configname].get('CODE_SIGN_IDENTITY', '')
-    if identity == '':
+    settings = self.xcode_settings[configname]
+    key = self._GetIOSCodeSignIdentityKey(settings)
+    if not key:
       return []
+
+    # Warn for any unimplemented signing xcode keys.
+    unimpl = ['OTHER_CODE_SIGN_FLAGS']
+    unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
+    if unimpl:
+      print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
+          ', '.join(sorted(unimpl)))
+
+    return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
+        os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+        settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
+        settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+        settings.get('PROVISIONING_PROFILE', ''))
+    ]
+
+  def _GetIOSCodeSignIdentityKey(self, settings):
+    identity = settings.get('CODE_SIGN_IDENTITY')
+    if not identity:
+      return None
     if identity not in XcodeSettings._codesigning_key_cache:
-      proc = subprocess.Popen(['security', 'find-identity', '-p', 'codesigning',
-                               '-v'], stdout=subprocess.PIPE)
-      output = proc.communicate()[0].strip()
-      key = None
-      for item in output.split("\n"):
-        if identity in item:
-          assert key == None, (
-              "Multiple codesigning identities for identity: %s" %
-              identity)
-          key = item.split(' ')[1]
-      XcodeSettings._codesigning_key_cache[identity] = key
-    key = XcodeSettings._codesigning_key_cache[identity]
-    if key:
-      # Warn for any unimplemented signing xcode keys.
-      unimpl = ['CODE_SIGN_RESOURCE_RULES_PATH', 'OTHER_CODE_SIGN_FLAGS',
-                'CODE_SIGN_ENTITLEMENTS']
-      keys = set(self.xcode_settings[configname].keys())
-      unimpl = set(unimpl) & keys
-      if unimpl:
-        print 'Warning: Some codesign keys not implemented, ignoring:', \
-            ' '.join(unimpl)
-      return ['codesign --force --sign %s %s' % (key, output_binary)]
-    return []
+      output = subprocess.check_output(
+          ['security', 'find-identity', '-p', 'codesigning', '-v'])
+      for line in output.splitlines():
+        if identity in line:
+          assert identity not in XcodeSettings._codesigning_key_cache, (
+              "Multiple codesigning identities for identity: %s" % identity)
+          XcodeSettings._codesigning_key_cache[identity] = line.split()[1]
+    return XcodeSettings._codesigning_key_cache.get(identity, '')
 
   def AddImplicitPostbuilds(self, configname, output, output_binary,
                             postbuilds=[], quiet=False):
@@ -848,14 +865,16 @@ class XcodeSettings(object):
     #    Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
     #    BuildVersion: 10M2518
     # Convert that to '0463', '4H1503'.
-    version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
-    version = version_list[0]
-    build = version_list[-1]
-    # Be careful to convert "4.2" to "0420":
-    version = version.split()[-1].replace('.', '')
-    version = (version + '0' * (3 - len(version))).zfill(4)
-    build = build.split()[-1]
-    return version, build
+    if len(XcodeSettings._xcode_version_cache) == 0:
+      version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
+      version = version_list[0]
+      build = version_list[-1]
+      # Be careful to convert "4.2" to "0420":
+      version = version.split()[-1].replace('.', '')
+      version = (version + '0' * (3 - len(version))).zfill(4)
+      build = build.split()[-1]
+      XcodeSettings._xcode_version_cache = (version, build)
+    return XcodeSettings._xcode_version_cache
 
   def _XcodeIOSDeviceFamily(self, configname):
     family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
@@ -872,6 +891,8 @@ class XcodeSettings(object):
       cache['DTXcodeBuild'] = xcode_build
 
       sdk_root = self._SdkRoot(configname)
+      if not sdk_root:
+        sdk_root = self._DefaultSdkRoot()
       cache['DTSDKName'] = sdk_root
       if xcode >= '0430':
         cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
@@ -896,6 +917,51 @@ class XcodeSettings(object):
       items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
     return items
 
+  def _DefaultSdkRoot(self):
+    """Returns the default SDKROOT to use.
+
+    Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+    project, then the environment variable was empty. Starting with this
+    version, Xcode uses the name of the newest SDK installed.
+    """
+    if self._XcodeVersion() < '0500':
+      return ''
+    default_sdk_path = self._XcodeSdkPath('')
+    default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
+    if default_sdk_root:
+      return default_sdk_root
+    all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
+    for line in all_sdks.splitlines():
+      items = line.split()
+      if len(items) >= 3 and items[-2] == '-sdk':
+        sdk_root = items[-1]
+        sdk_path = self._XcodeSdkPath(sdk_root)
+        if sdk_path == default_sdk_path:
+          return sdk_root
+    return ''
+
+  def _DefaultArch(self):
+    # For Mac projects, Xcode changed the default value used when ARCHS is not
+    # set from "i386" to "x86_64".
+    #
+    # For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
+    # building for a device, and the simulator binaries are always build for
+    # "i386".
+    #
+    # For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
+    # which correspond to "armv7 armv7s arm64", and when building the simulator
+    # the architecture is either "i386" or "x86_64" depending on the simulated
+    # device (respectively 32-bit or 64-bit device).
+    #
+    # Since the value returned by this function is only used when ARCHS is not
+    # set, then on iOS we return "i386", as the default xcode project generator
+    # does not set ARCHS if it is not set in the .gyp file.
+    if self.isIOS:
+      return 'i386'
+    version, build = self._XcodeVersion()
+    if version >= '0500':
+      return 'x86_64'
+    return 'i386'
 
 class MacPrefixHeader(object):
   """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.