1 # Copyright (c) 2012 Google Inc. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 from compiler.ast import Const
6 from compiler.ast import Dict
7 from compiler.ast import Discard
8 from compiler.ast import List
9 from compiler.ast import Module
10 from compiler.ast import Node
11 from compiler.ast import Stmt
14 import gyp.simple_copy
15 import multiprocessing
26 from gyp.common import GypError
27 from gyp.common import OrderedSet
30 # A list of types that are treated as linkable.
31 linkable_types = ['executable', 'shared_library', 'loadable_module']
33 # A list of sections that contain links to other targets.
34 dependency_sections = ['dependencies', 'export_dependent_settings']
36 # base_path_sections is a list of sections defined by GYP that contain
37 # pathnames. The generators can provide more keys, the two lists are merged
38 # into path_sections, but you should call IsPathSection instead of using either
40 base_path_sections = [
51 def IsPathSection(section):
52 # If section ends in one of the '=+?!' characters, it's applied to a section
53 # without the trailing characters. '/' is notably absent from this list,
54 # because there's no way for a regular expression to be treated as a path.
55 while section[-1:] in '=+?!':
56 section = section[:-1]
58 if section in path_sections:
61 # Sections mathing the regexp '_(dir|file|path)s?$' are also
62 # considered PathSections. Using manual string matching since that
63 # is much faster than the regexp and this can be called hundreds of
64 # thousands of times so micro performance matters.
69 if tail[-5:] in ('_file', '_path'):
71 return tail[-4:] == '_dir'
75 # base_non_configuration_keys is a list of key names that belong in the target
76 # itself and should not be propagated into its configurations. It is merged
77 # with a list that can come from the generator to
78 # create non_configuration_keys.
79 base_non_configuration_keys = [
80 # Sections that must exist inside targets and not configurations.
84 'default_configuration',
86 'dependencies_original',
96 'standalone_static_library',
103 # Sections that can be found inside targets or configurations, but that
104 # should not be propagated from targets into their configurations.
107 non_configuration_keys = []
109 # Keys that do not belong inside a configuration dictionary.
110 invalid_configuration_keys = [
112 'all_dependent_settings',
115 'direct_dependent_settings',
119 'standalone_static_library',
124 # Controls whether or not the generator supports multiple toolsets.
125 multiple_toolsets = False
127 # Paths for converting filelist paths to output paths: {
129 # qualified_output_dir,
131 generator_filelist_paths = None
133 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
134 """Return a list of all build files included into build_file_path.
136 The returned list will contain build_file_path as well as all other files
137 that it included, either directly or indirectly. Note that the list may
138 contain files that were included into a conditional section that evaluated
139 to false and was not merged into build_file_path's dict.
141 aux_data is a dict containing a key for each build file or included build
142 file. Those keys provide access to dicts whose "included" keys contain
143 lists of all other files included by the build file.
145 included should be left at its default None value by external callers. It
146 is used for recursion.
148 The returned list will not contain any duplicate entries. Each build file
149 in the list will be relative to the current directory.
155 if build_file_path in included:
158 included.append(build_file_path)
160 for included_build_file in aux_data[build_file_path].get('included', []):
161 GetIncludedBuildFiles(included_build_file, aux_data, included)
166 def CheckedEval(file_contents):
167 """Return the eval of a gyp file.
169 The gyp file is restricted to dictionaries and lists only, and
170 repeated keys are not allowed.
172 Note that this is slower than eval() is.
175 ast = compiler.parse(file_contents)
176 assert isinstance(ast, Module)
177 c1 = ast.getChildren()
179 assert isinstance(c1[1], Stmt)
180 c2 = c1[1].getChildren()
181 assert isinstance(c2[0], Discard)
182 c3 = c2[0].getChildren()
184 return CheckNode(c3[0], [])
187 def CheckNode(node, keypath):
188 if isinstance(node, Dict):
189 c = node.getChildren()
191 for n in range(0, len(c), 2):
192 assert isinstance(c[n], Const)
193 key = c[n].getChildren()[0]
195 raise GypError("Key '" + key + "' repeated at level " +
196 repr(len(keypath) + 1) + " with key path '" +
197 '.'.join(keypath) + "'")
198 kp = list(keypath) # Make a copy of the list for descending this node.
200 dict[key] = CheckNode(c[n + 1], kp)
202 elif isinstance(node, List):
203 c = node.getChildren()
205 for index, child in enumerate(c):
206 kp = list(keypath) # Copy list.
207 kp.append(repr(index))
208 children.append(CheckNode(child, kp))
210 elif isinstance(node, Const):
211 return node.getChildren()[0]
213 raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
217 def LoadOneBuildFile(build_file_path, data, aux_data, includes,
219 if build_file_path in data:
220 return data[build_file_path]
222 if os.path.exists(build_file_path):
223 build_file_contents = open(build_file_path).read()
225 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
227 build_file_data = None
230 build_file_data = CheckedEval(build_file_contents)
232 build_file_data = eval(build_file_contents, {'__builtins__': None},
234 except SyntaxError, e:
235 e.filename = build_file_path
238 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
241 if type(build_file_data) is not dict:
242 raise GypError("%s does not evaluate to a dictionary." % build_file_path)
244 data[build_file_path] = build_file_data
245 aux_data[build_file_path] = {}
247 # Scan for includes and merge them in.
248 if ('skip_includes' not in build_file_data or
249 not build_file_data['skip_includes']):
252 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
253 aux_data, includes, check)
255 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
256 aux_data, None, check)
258 gyp.common.ExceptionAppend(e,
259 'while reading includes of ' + build_file_path)
262 return build_file_data
265 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
269 includes_list.extend(includes)
270 if 'includes' in subdict:
271 for include in subdict['includes']:
272 # "include" is specified relative to subdict_path, so compute the real
273 # path to include by appending the provided "include" to the directory
274 # in which subdict_path resides.
276 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
277 includes_list.append(relative_include)
278 # Unhook the includes list, it's no longer needed.
279 del subdict['includes']
281 # Merge in the included files.
282 for include in includes_list:
283 if not 'included' in aux_data[subdict_path]:
284 aux_data[subdict_path]['included'] = []
285 aux_data[subdict_path]['included'].append(include)
287 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
290 LoadOneBuildFile(include, data, aux_data, None, False, check),
291 subdict_path, include)
293 # Recurse into subdictionaries.
294 for k, v in subdict.iteritems():
296 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
298 elif type(v) is list:
299 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
303 # This recurses into lists so that it can look for dicts.
304 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
306 if type(item) is dict:
307 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
309 elif type(item) is list:
310 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
312 # Processes toolsets in all the targets. This recurses into condition entries
313 # since they can contain toolsets as well.
314 def ProcessToolsetsInDict(data):
315 if 'targets' in data:
316 target_list = data['targets']
318 for target in target_list:
319 # If this target already has an explicit 'toolset', and no 'toolsets'
320 # list, don't modify it further.
321 if 'toolset' in target and 'toolsets' not in target:
322 new_target_list.append(target)
324 if multiple_toolsets:
325 toolsets = target.get('toolsets', ['target'])
327 toolsets = ['target']
328 # Make sure this 'toolsets' definition is only processed once.
329 if 'toolsets' in target:
330 del target['toolsets']
331 if len(toolsets) > 0:
332 # Optimization: only do copies if more than one toolset is specified.
333 for build in toolsets[1:]:
334 new_target = gyp.simple_copy.deepcopy(target)
335 new_target['toolset'] = build
336 new_target_list.append(new_target)
337 target['toolset'] = toolsets[0]
338 new_target_list.append(target)
339 data['targets'] = new_target_list
340 if 'conditions' in data:
341 for condition in data['conditions']:
342 if type(condition) is list:
343 for condition_dict in condition[1:]:
344 ProcessToolsetsInDict(condition_dict)
347 # TODO(mark): I don't love this name. It just means that it's going to load
348 # a build file that contains targets and is expected to provide a targets dict
349 # that contains the targets...
350 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
351 depth, check, load_dependencies):
352 # If depth is set, predefine the DEPTH variable to be a relative path from
353 # this build file's directory to the directory identified by depth.
355 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
356 # temporary measure. This should really be addressed by keeping all paths
357 # in POSIX until actual project generation.
358 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
360 variables['DEPTH'] = '.'
362 variables['DEPTH'] = d.replace('\\', '/')
364 if build_file_path in data['target_build_files']:
367 data['target_build_files'].add(build_file_path)
369 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
370 "Loading Target Build File '%s'", build_file_path)
372 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
373 includes, True, check)
375 # Store DEPTH for later use in generators.
376 build_file_data['_DEPTH'] = depth
378 # Set up the included_files key indicating which .gyp files contributed to
380 if 'included_files' in build_file_data:
381 raise GypError(build_file_path + ' must not contain included_files key')
383 included = GetIncludedBuildFiles(build_file_path, aux_data)
384 build_file_data['included_files'] = []
385 for included_file in included:
386 # included_file is relative to the current directory, but it needs to
387 # be made relative to build_file_path's directory.
388 included_relative = \
389 gyp.common.RelativePath(included_file,
390 os.path.dirname(build_file_path))
391 build_file_data['included_files'].append(included_relative)
393 # Do a first round of toolsets expansion so that conditions can be defined
395 ProcessToolsetsInDict(build_file_data)
397 # Apply "pre"/"early" variable expansions and condition evaluations.
398 ProcessVariablesAndConditionsInDict(
399 build_file_data, PHASE_EARLY, variables, build_file_path)
401 # Since some toolsets might have been defined conditionally, perform
402 # a second round of toolsets expansion now.
403 ProcessToolsetsInDict(build_file_data)
405 # Look at each project's target_defaults dict, and merge settings into
407 if 'target_defaults' in build_file_data:
408 if 'targets' not in build_file_data:
409 raise GypError("Unable to find targets in build file %s" %
413 while index < len(build_file_data['targets']):
414 # This procedure needs to give the impression that target_defaults is
415 # used as defaults, and the individual targets inherit from that.
416 # The individual targets need to be merged into the defaults. Make
417 # a deep copy of the defaults for each target, merge the target dict
418 # as found in the input file into that copy, and then hook up the
419 # copy with the target-specific data merged into it as the replacement
421 old_target_dict = build_file_data['targets'][index]
422 new_target_dict = gyp.simple_copy.deepcopy(
423 build_file_data['target_defaults'])
424 MergeDicts(new_target_dict, old_target_dict,
425 build_file_path, build_file_path)
426 build_file_data['targets'][index] = new_target_dict
430 del build_file_data['target_defaults']
432 # Look for dependencies. This means that dependency resolution occurs
433 # after "pre" conditionals and variable expansion, but before "post" -
434 # in other words, you can't put a "dependencies" section inside a "post"
435 # conditional within a target.
438 if 'targets' in build_file_data:
439 for target_dict in build_file_data['targets']:
440 if 'dependencies' not in target_dict:
442 for dependency in target_dict['dependencies']:
444 gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
446 if load_dependencies:
447 for dependency in dependencies:
449 LoadTargetBuildFile(dependency, data, aux_data, variables,
450 includes, depth, check, load_dependencies)
452 gyp.common.ExceptionAppend(
453 e, 'while loading dependencies of %s' % build_file_path)
456 return (build_file_path, dependencies)
459 def CallLoadTargetBuildFile(global_flags,
460 build_file_path, data,
462 includes, depth, check,
463 generator_input_info):
464 """Wrapper around LoadTargetBuildFile for parallel processing.
466 This wrapper is used when LoadTargetBuildFile is executed in
471 signal.signal(signal.SIGINT, signal.SIG_IGN)
473 # Apply globals so that the worker process behaves the same.
474 for key, value in global_flags.iteritems():
475 globals()[key] = value
477 # Save the keys so we can return data that changed.
478 data_keys = set(data)
479 aux_data_keys = set(aux_data)
481 SetGeneratorGlobals(generator_input_info)
482 result = LoadTargetBuildFile(build_file_path, data,
484 includes, depth, check, False)
488 (build_file_path, dependencies) = result
492 if key == 'target_build_files':
494 if key not in data_keys:
495 data_out[key] = data[key]
498 if key not in aux_data_keys:
499 aux_data_out[key] = aux_data[key]
501 # This gets serialized and sent back to the main process via a pipe.
502 # It's handled in LoadTargetBuildFileCallback.
503 return (build_file_path,
508 sys.stderr.write("gyp: %s\n" % e)
511 print >>sys.stderr, 'Exception:', e
512 print >>sys.stderr, traceback.format_exc()
516 class ParallelProcessingError(Exception):
520 class ParallelState(object):
521 """Class to keep track of state when processing input files in parallel.
523 If build files are loaded in parallel, use this to keep track of
524 state during farming out and processing parallel jobs. It's stored
525 in a global so that the callback function can have access to it.
529 # The multiprocessing pool.
531 # The condition variable used to protect this object and notify
532 # the main loop when there might be more data to process.
533 self.condition = None
534 # The "data" dict that was passed to LoadTargetBuildFileParallel
536 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
538 # The number of parallel calls outstanding; decremented when a response
541 # The set of all build files that have been scheduled, so we don't
542 # schedule the same one twice.
543 self.scheduled = set()
544 # A list of dependency build file paths that haven't been scheduled yet.
545 self.dependencies = []
546 # Flag to indicate if there was an error in a child process.
549 def LoadTargetBuildFileCallback(self, result):
550 """Handle the results of running LoadTargetBuildFile in another process.
552 self.condition.acquire()
555 self.condition.notify()
556 self.condition.release()
558 (build_file_path0, data0, aux_data0, dependencies0) = result
559 self.data['target_build_files'].add(build_file_path0)
561 self.data[key] = data0[key]
562 for key in aux_data0:
563 self.aux_data[key] = aux_data0[key]
564 for new_dependency in dependencies0:
565 if new_dependency not in self.scheduled:
566 self.scheduled.add(new_dependency)
567 self.dependencies.append(new_dependency)
569 self.condition.notify()
570 self.condition.release()
573 def LoadTargetBuildFilesParallel(build_files, data, aux_data,
574 variables, includes, depth, check,
575 generator_input_info):
576 parallel_state = ParallelState()
577 parallel_state.condition = threading.Condition()
578 # Make copies of the build_files argument that we can modify while working.
579 parallel_state.dependencies = list(build_files)
580 parallel_state.scheduled = set(build_files)
581 parallel_state.pending = 0
582 parallel_state.data = data
583 parallel_state.aux_data = aux_data
586 parallel_state.condition.acquire()
587 while parallel_state.dependencies or parallel_state.pending:
588 if parallel_state.error:
590 if not parallel_state.dependencies:
591 parallel_state.condition.wait()
594 dependency = parallel_state.dependencies.pop()
596 parallel_state.pending += 1
598 data_in['target_build_files'] = data['target_build_files']
601 'path_sections': globals()['path_sections'],
602 'non_configuration_keys': globals()['non_configuration_keys'],
603 'multiple_toolsets': globals()['multiple_toolsets']}
605 if not parallel_state.pool:
606 parallel_state.pool = multiprocessing.Pool(8)
607 parallel_state.pool.apply_async(
608 CallLoadTargetBuildFile,
609 args = (global_flags, dependency,
610 data_in, aux_data_in,
611 variables, includes, depth, check, generator_input_info),
612 callback = parallel_state.LoadTargetBuildFileCallback)
613 except KeyboardInterrupt, e:
614 parallel_state.pool.terminate()
617 parallel_state.condition.release()
619 parallel_state.pool.close()
620 parallel_state.pool.join()
621 parallel_state.pool = None
623 if parallel_state.error:
626 # Look for the bracket that matches the first bracket seen in a
627 # string, and return the start and end as a tuple. For example, if
628 # the input is something like "<(foo <(bar)) blah", then it would
629 # return (1, 13), indicating the entire string except for the leading
630 # "<" and trailing " blah".
631 LBRACKETS= set('{[(')
632 BRACKETS = {'}': '{', ']': '[', ')': '('}
633 def FindEnclosingBracketGroup(input_str):
636 for index, char in enumerate(input_str):
637 if char in LBRACKETS:
641 elif char in BRACKETS:
644 if stack.pop() != BRACKETS[char]:
647 return (start, index + 1)
651 def IsStrCanonicalInt(string):
652 """Returns True if |string| is in its canonical integer form.
654 The canonical form is such that str(int(string)) == string.
656 if type(string) is str:
657 # This function is called a lot so for maximum performance, avoid
658 # involving regexps which would otherwise make the code much
659 # shorter. Regexps would need twice the time of this function.
667 if '1' <= string[0] <= '9':
668 return string.isdigit()
673 # This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
674 # "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
675 # In the last case, the inner "<()" is captured in match['content'].
676 early_variable_re = re.compile(
677 '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
678 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
679 '\((?P<is_array>\s*\[?)'
680 '(?P<content>.*?)(\]?)\))')
682 # This matches the same as early_variable_re, but with '>' instead of '<'.
683 late_variable_re = re.compile(
684 '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
685 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
686 '\((?P<is_array>\s*\[?)'
687 '(?P<content>.*?)(\]?)\))')
689 # This matches the same as early_variable_re, but with '^' instead of '<'.
690 latelate_variable_re = re.compile(
691 '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
692 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
693 '\((?P<is_array>\s*\[?)'
694 '(?P<content>.*?)(\]?)\))')
696 # Global cache of results from running commands so they don't have to be run
698 cached_command_results = {}
701 def FixupPlatformCommand(cmd):
702 if sys.platform == 'win32':
703 if type(cmd) is list:
704 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
706 cmd = re.sub('^cat ', 'type ', cmd)
715 def ExpandVariables(input, phase, variables, build_file):
716 # Look for the pattern that gets expanded into variables
717 if phase == PHASE_EARLY:
718 variable_re = early_variable_re
719 expansion_symbol = '<'
720 elif phase == PHASE_LATE:
721 variable_re = late_variable_re
722 expansion_symbol = '>'
723 elif phase == PHASE_LATELATE:
724 variable_re = latelate_variable_re
725 expansion_symbol = '^'
729 input_str = str(input)
730 if IsStrCanonicalInt(input_str):
731 return int(input_str)
733 # Do a quick scan to determine if an expensive regex search is warranted.
734 if expansion_symbol not in input_str:
737 # Get the entire list of matches as a list of MatchObject instances.
738 # (using findall here would return strings instead of MatchObjects).
739 matches = list(variable_re.finditer(input_str))
744 # Reverse the list of matches so that replacements are done right-to-left.
745 # That ensures that earlier replacements won't mess up the string in a
746 # way that causes later calls to find the earlier substituted text instead
747 # of what's intended for replacement.
749 for match_group in matches:
750 match = match_group.groupdict()
751 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
752 # match['replace'] is the substring to look for, match['type']
753 # is the character code for the replacement type (< > <! >! <| >| <@
754 # >@ <!@ >!@), match['is_array'] contains a '[' for command
755 # arrays, and match['content'] is the name of the variable (< >)
756 # or command to run (<! >!). match['command_string'] is an optional
757 # command string. Currently, only 'pymod_do_main' is supported.
759 # run_command is true if a ! variant is used.
760 run_command = '!' in match['type']
761 command_string = match['command_string']
763 # file_list is true if a | variant is used.
764 file_list = '|' in match['type']
766 # Capture these now so we can adjust them later.
767 replace_start = match_group.start('replace')
768 replace_end = match_group.end('replace')
770 # Find the ending paren, and re-evaluate the contained string.
771 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
773 # Adjust the replacement range to match the entire command
774 # found by FindEnclosingBracketGroup (since the variable_re
775 # probably doesn't match the entire command if it contained
777 replace_end = replace_start + c_end
779 # Find the "real" replacement, matching the appropriate closing
780 # paren, and adjust the replacement start and end.
781 replacement = input_str[replace_start:replace_end]
783 # Figure out what the contents of the variable parens are.
784 contents_start = replace_start + c_start + 1
785 contents_end = replace_end - 1
786 contents = input_str[contents_start:contents_end]
788 # Do filter substitution now for <|().
789 # Admittedly, this is different than the evaluation order in other
790 # contexts. However, since filtration has no chance to run on <|(),
791 # this seems like the only obvious way to give them access to filters.
793 processed_variables = gyp.simple_copy.deepcopy(variables)
794 ProcessListFiltersInDict(contents, processed_variables)
795 # Recurse to expand variables in the contents
796 contents = ExpandVariables(contents, phase,
797 processed_variables, build_file)
799 # Recurse to expand variables in the contents
800 contents = ExpandVariables(contents, phase, variables, build_file)
802 # Strip off leading/trailing whitespace so that variable matches are
803 # simpler below (and because they are rarely needed).
804 contents = contents.strip()
806 # expand_to_list is true if an @ variant is used. In that case,
807 # the expansion should result in a list. Note that the caller
808 # is to be expecting a list in return, and not all callers do
809 # because not all are working in list context. Also, for list
810 # expansions, there can be no other text besides the variable
811 # expansion in the input string.
812 expand_to_list = '@' in match['type'] and input_str == replacement
814 if run_command or file_list:
815 # Find the build file's directory, so commands can be run or file lists
816 # generated relative to it.
817 build_file_dir = os.path.dirname(build_file)
818 if build_file_dir == '' and not file_list:
819 # If build_file is just a leaf filename indicating a file in the
820 # current directory, build_file_dir might be an empty string. Set
821 # it to None to signal to subprocess.Popen that it should run the
822 # command in the current directory.
823 build_file_dir = None
825 # Support <|(listfile.txt ...) which generates a file
826 # containing items from a gyp list, generated at gyp time.
827 # This works around actions/rules which have more inputs than will
828 # fit on the command line.
830 if type(contents) is list:
831 contents_list = contents
833 contents_list = contents.split(' ')
834 replacement = contents_list[0]
835 if os.path.isabs(replacement):
836 raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
838 if not generator_filelist_paths:
839 path = os.path.join(build_file_dir, replacement)
841 if os.path.isabs(build_file_dir):
842 toplevel = generator_filelist_paths['toplevel']
843 rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
845 rel_build_file_dir = build_file_dir
846 qualified_out_dir = generator_filelist_paths['qualified_out_dir']
847 path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
848 gyp.common.EnsureDirExists(path)
850 replacement = gyp.common.RelativePath(path, build_file_dir)
851 f = gyp.common.WriteOnDiff(path)
852 for i in contents_list[1:]:
858 if match['is_array']:
859 contents = eval(contents)
862 # Check for a cached value to avoid executing commands, or generating
863 # file lists more than once. The cache key contains the command to be
864 # run as well as the directory to run it from, to account for commands
865 # that depend on their current directory.
866 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
867 # someone could author a set of GYP files where each time the command
868 # is invoked it produces different output by design. When the need
869 # arises, the syntax should be extended to support no caching off a
870 # command's output so it is run every time.
871 cache_key = (str(contents), build_file_dir)
872 cached_value = cached_command_results.get(cache_key, None)
873 if cached_value is None:
874 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
875 "Executing command '%s' in directory '%s'",
876 contents, build_file_dir)
880 if command_string == 'pymod_do_main':
881 # <!pymod_do_main(modulename param eters) loads |modulename| as a
882 # python module and then calls that module's DoMain() function,
883 # passing ["param", "eters"] as a single list argument. For modules
884 # that don't load quickly, this can be faster than
885 # <!(python modulename param eters). Do this in |build_file_dir|.
886 oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
887 if build_file_dir: # build_file_dir may be None (see above).
888 os.chdir(build_file_dir)
891 parsed_contents = shlex.split(contents)
893 py_module = __import__(parsed_contents[0])
894 except ImportError as e:
895 raise GypError("Error importing pymod_do_main"
896 "module (%s): %s" % (parsed_contents[0], e))
897 replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
900 assert replacement != None
902 raise GypError("Unknown command string '%s' in '%s'." %
903 (command_string, contents))
905 # Fix up command with platform specific workarounds.
906 contents = FixupPlatformCommand(contents)
907 p = subprocess.Popen(contents, shell=use_shell,
908 stdout=subprocess.PIPE,
909 stderr=subprocess.PIPE,
910 stdin=subprocess.PIPE,
913 p_stdout, p_stderr = p.communicate('')
915 if p.wait() != 0 or p_stderr:
916 sys.stderr.write(p_stderr)
917 # Simulate check_call behavior, since check_call only exists
918 # in python 2.5 and later.
919 raise GypError("Call to '%s' returned exit status %d." %
920 (contents, p.returncode))
921 replacement = p_stdout.rstrip()
923 cached_command_results[cache_key] = replacement
925 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
926 "Had cache value for command '%s' in directory '%s'",
927 contents,build_file_dir)
928 replacement = cached_value
931 if not contents in variables:
932 if contents[-1] in ['!', '/']:
933 # In order to allow cross-compiles (nacl) to happen more naturally,
934 # we will allow references to >(sources/) etc. to resolve to
935 # and empty list if undefined. This allows actions to:
944 raise GypError('Undefined variable ' + contents +
947 replacement = variables[contents]
949 if type(replacement) is list:
950 for item in replacement:
951 if not contents[-1] == '/' and type(item) not in (str, int):
952 raise GypError('Variable ' + contents +
953 ' must expand to a string or list of strings; ' +
955 item.__class__.__name__)
956 # Run through the list and handle variable expansions in it. Since
957 # the list is guaranteed not to contain dicts, this won't do anything
958 # with conditions sections.
959 ProcessVariablesAndConditionsInList(replacement, phase, variables,
961 elif type(replacement) not in (str, int):
962 raise GypError('Variable ' + contents +
963 ' must expand to a string or list of strings; ' +
964 'found a ' + replacement.__class__.__name__)
967 # Expanding in list context. It's guaranteed that there's only one
968 # replacement to do in |input_str| and that it's this replacement. See
970 if type(replacement) is list:
971 # If it's already a list, make a copy.
972 output = replacement[:]
974 # Split it the same way sh would split arguments.
975 output = shlex.split(str(replacement))
977 # Expanding in string context.
978 encoded_replacement = ''
979 if type(replacement) is list:
980 # When expanding a list into string context, turn the list items
981 # into a string in a way that will work with a subprocess call.
983 # TODO(mark): This isn't completely correct. This should
984 # call a generator-provided function that observes the
985 # proper list-to-argument quoting rules on a specific
986 # platform instead of just calling the POSIX encoding
988 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
990 encoded_replacement = replacement
992 output = output[:replace_start] + str(encoded_replacement) + \
994 # Prepare for the next match iteration.
997 # Look for more matches now that we've replaced some, to deal with
998 # expanding local variables (variables defined in the same
999 # variables block as this one).
1000 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
1001 if type(output) is list:
1002 if output and type(output[0]) is list:
1003 # Leave output alone if it's a list of lists.
1004 # We don't want such lists to be stringified.
1010 ExpandVariables(item, phase, variables, build_file))
1013 output = ExpandVariables(output, phase, variables, build_file)
1015 # Convert all strings that are canonically-represented integers into integers.
1016 if type(output) is list:
1017 for index in xrange(0, len(output)):
1018 if IsStrCanonicalInt(output[index]):
1019 output[index] = int(output[index])
1020 elif IsStrCanonicalInt(output):
1021 output = int(output)
1025 # The same condition is often evaluated over and over again so it
1026 # makes sense to cache as much as possible between evaluations.
1027 cached_conditions_asts = {}
1029 def EvalCondition(condition, conditions_key, phase, variables, build_file):
1030 """Returns the dict that should be used or None if the result was
1031 that nothing should be used."""
1032 if type(condition) is not list:
1033 raise GypError(conditions_key + ' must be a list')
1034 if len(condition) != 2 and len(condition) != 3:
1035 # It's possible that condition[0] won't work in which case this
1036 # attempt will raise its own IndexError. That's probably fine.
1037 raise GypError(conditions_key + ' ' + condition[0] +
1038 ' must be length 2 or 3, not ' + str(len(condition)))
1040 [cond_expr, true_dict] = condition[0:2]
1042 if len(condition) == 3:
1043 false_dict = condition[2]
1045 # Do expansions on the condition itself. Since the conditon can naturally
1046 # contain variable references without needing to resort to GYP expansion
1047 # syntax, this is of dubious value for variables, but someone might want to
1048 # use a command expansion directly inside a condition.
1049 cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1051 if type(cond_expr_expanded) not in (str, int):
1053 'Variable expansion in this context permits str and int ' + \
1054 'only, found ' + cond_expr_expanded.__class__.__name__
1057 if cond_expr_expanded in cached_conditions_asts:
1058 ast_code = cached_conditions_asts[cond_expr_expanded]
1060 ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1061 cached_conditions_asts[cond_expr_expanded] = ast_code
1062 if eval(ast_code, {'__builtins__': None}, variables):
1065 except SyntaxError, e:
1066 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1067 'at character %d.' %
1068 (str(e.args[0]), e.text, build_file, e.offset),
1069 e.filename, e.lineno, e.offset, e.text)
1071 except NameError, e:
1072 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1073 (cond_expr_expanded, build_file))
1077 def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1078 # Process a 'conditions' or 'target_conditions' section in the_dict,
1079 # depending on phase.
1080 # early -> conditions
1081 # late -> target_conditions
1082 # latelate -> no conditions
1084 # Each item in a conditions list consists of cond_expr, a string expression
1085 # evaluated as the condition, and true_dict, a dict that will be merged into
1086 # the_dict if cond_expr evaluates to true. Optionally, a third item,
1087 # false_dict, may be present. false_dict is merged into the_dict if
1088 # cond_expr evaluates to false.
1090 # Any dict merged into the_dict will be recursively processed for nested
1091 # conditionals and other expansions, also according to phase, immediately
1092 # prior to being merged.
1094 if phase == PHASE_EARLY:
1095 conditions_key = 'conditions'
1096 elif phase == PHASE_LATE:
1097 conditions_key = 'target_conditions'
1098 elif phase == PHASE_LATELATE:
1103 if not conditions_key in the_dict:
1106 conditions_list = the_dict[conditions_key]
1107 # Unhook the conditions list, it's no longer needed.
1108 del the_dict[conditions_key]
1110 for condition in conditions_list:
1111 merge_dict = EvalCondition(condition, conditions_key, phase, variables,
1114 if merge_dict != None:
1115 # Expand variables and nested conditinals in the merge_dict before
1117 ProcessVariablesAndConditionsInDict(merge_dict, phase,
1118 variables, build_file)
1120 MergeDicts(the_dict, merge_dict, build_file, build_file)
1123 def LoadAutomaticVariablesFromDict(variables, the_dict):
1124 # Any keys with plain string values in the_dict become automatic variables.
1125 # The variable name is the key name with a "_" character prepended.
1126 for key, value in the_dict.iteritems():
1127 if type(value) in (str, int, list):
1128 variables['_' + key] = value
1131 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1132 # Any keys in the_dict's "variables" dict, if it has one, becomes a
1133 # variable. The variable name is the key name in the "variables" dict.
1134 # Variables that end with the % character are set only if they are unset in
1135 # the variables dict. the_dict_key is the name of the key that accesses
1136 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
1137 # (it could be a list or it could be parentless because it is a root dict),
1138 # the_dict_key will be None.
1139 for key, value in the_dict.get('variables', {}).iteritems():
1140 if type(value) not in (str, int, list):
1143 if key.endswith('%'):
1144 variable_name = key[:-1]
1145 if variable_name in variables:
1146 # If the variable is already set, don't set it.
1148 if the_dict_key is 'variables' and variable_name in the_dict:
1149 # If the variable is set without a % in the_dict, and the_dict is a
1150 # variables dict (making |variables| a varaibles sub-dict of a
1151 # variables dict), use the_dict's definition.
1152 value = the_dict[variable_name]
1156 variables[variable_name] = value
1159 def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1160 build_file, the_dict_key=None):
1161 """Handle all variable and command expansion and conditional evaluation.
1163 This function is the public entry point for all variable expansions and
1164 conditional evaluations. The variables_in dictionary will not be modified
1168 # Make a copy of the variables_in dict that can be modified during the
1169 # loading of automatics and the loading of the variables dict.
1170 variables = variables_in.copy()
1171 LoadAutomaticVariablesFromDict(variables, the_dict)
1173 if 'variables' in the_dict:
1174 # Make sure all the local variables are added to the variables
1175 # list before we process them so that you can reference one
1176 # variable from another. They will be fully expanded by recursion
1177 # in ExpandVariables.
1178 for key, value in the_dict['variables'].iteritems():
1179 variables[key] = value
1181 # Handle the associated variables dict first, so that any variable
1182 # references within can be resolved prior to using them as variables.
1183 # Pass a copy of the variables dict to avoid having it be tainted.
1184 # Otherwise, it would have extra automatics added for everything that
1185 # should just be an ordinary variable in this scope.
1186 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1187 variables, build_file, 'variables')
1189 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1191 for key, value in the_dict.iteritems():
1192 # Skip "variables", which was already processed if present.
1193 if key != 'variables' and type(value) is str:
1194 expanded = ExpandVariables(value, phase, variables, build_file)
1195 if type(expanded) not in (str, int):
1197 'Variable expansion in this context permits str and int ' + \
1198 'only, found ' + expanded.__class__.__name__ + ' for ' + key
1199 the_dict[key] = expanded
1201 # Variable expansion may have resulted in changes to automatics. Reload.
1202 # TODO(mark): Optimization: only reload if no changes were made.
1203 variables = variables_in.copy()
1204 LoadAutomaticVariablesFromDict(variables, the_dict)
1205 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1207 # Process conditions in this dict. This is done after variable expansion
1208 # so that conditions may take advantage of expanded variables. For example,
1209 # if the_dict contains:
1210 # {'type': '<(library_type)',
1211 # 'conditions': [['_type=="static_library"', { ... }]]},
1212 # _type, as used in the condition, will only be set to the value of
1213 # library_type if variable expansion is performed before condition
1214 # processing. However, condition processing should occur prior to recursion
1215 # so that variables (both automatic and "variables" dict type) may be
1216 # adjusted by conditions sections, merged into the_dict, and have the
1217 # intended impact on contained dicts.
1219 # This arrangement means that a "conditions" section containing a "variables"
1220 # section will only have those variables effective in subdicts, not in
1221 # the_dict. The workaround is to put a "conditions" section within a
1222 # "variables" section. For example:
1223 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1224 # 'defines': ['<(define)'],
1225 # 'my_subdict': {'defines': ['<(define)']}},
1226 # will not result in "IS_MAC" being appended to the "defines" list in the
1227 # current scope but would result in it being appended to the "defines" list
1228 # within "my_subdict". By comparison:
1229 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1230 # 'defines': ['<(define)'],
1231 # 'my_subdict': {'defines': ['<(define)']}},
1232 # will append "IS_MAC" to both "defines" lists.
1234 # Evaluate conditions sections, allowing variable expansions within them
1235 # as well as nested conditionals. This will process a 'conditions' or
1236 # 'target_conditions' section, perform appropriate merging and recursive
1237 # conditional and variable processing, and then remove the conditions section
1238 # from the_dict if it is present.
1239 ProcessConditionsInDict(the_dict, phase, variables, build_file)
1241 # Conditional processing may have resulted in changes to automatics or the
1242 # variables dict. Reload.
1243 variables = variables_in.copy()
1244 LoadAutomaticVariablesFromDict(variables, the_dict)
1245 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1247 # Recurse into child dicts, or process child lists which may result in
1248 # further recursion into descendant dicts.
1249 for key, value in the_dict.iteritems():
1250 # Skip "variables" and string values, which were already processed if
1252 if key == 'variables' or type(value) is str:
1254 if type(value) is dict:
1255 # Pass a copy of the variables dict so that subdicts can't influence
1257 ProcessVariablesAndConditionsInDict(value, phase, variables,
1259 elif type(value) is list:
1260 # The list itself can't influence the variables dict, and
1261 # ProcessVariablesAndConditionsInList will make copies of the variables
1262 # dict if it needs to pass it to something that can influence it. No
1263 # copy is necessary here.
1264 ProcessVariablesAndConditionsInList(value, phase, variables,
1266 elif type(value) is not int:
1267 raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1271 def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1273 # Iterate using an index so that new values can be assigned into the_list.
1275 while index < len(the_list):
1276 item = the_list[index]
1277 if type(item) is dict:
1278 # Make a copy of the variables dict so that it won't influence anything
1279 # outside of its own scope.
1280 ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1281 elif type(item) is list:
1282 ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1283 elif type(item) is str:
1284 expanded = ExpandVariables(item, phase, variables, build_file)
1285 if type(expanded) in (str, int):
1286 the_list[index] = expanded
1287 elif type(expanded) is list:
1288 the_list[index:index+1] = expanded
1289 index += len(expanded)
1291 # index now identifies the next item to examine. Continue right now
1292 # without falling into the index increment below.
1296 'Variable expansion in this context permits strings and ' + \
1297 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1299 elif type(item) is not int:
1300 raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1301 ' at index ' + index
1305 def BuildTargetsDict(data):
1306 """Builds a dict mapping fully-qualified target names to their target dicts.
1308 |data| is a dict mapping loaded build files by pathname relative to the
1309 current directory. Values in |data| are build file contents. For each
1310 |data| value with a "targets" key, the value of the "targets" key is taken
1311 as a list containing target dicts. Each target's fully-qualified name is
1312 constructed from the pathname of the build file (|data| key) and its
1313 "target_name" property. These fully-qualified names are used as the keys
1314 in the returned dict. These keys provide access to the target dicts,
1315 the dicts in the "targets" lists.
1319 for build_file in data['target_build_files']:
1320 for target in data[build_file].get('targets', []):
1321 target_name = gyp.common.QualifiedTarget(build_file,
1322 target['target_name'],
1324 if target_name in targets:
1325 raise GypError('Duplicate target definitions for ' + target_name)
1326 targets[target_name] = target
1331 def QualifyDependencies(targets):
1332 """Make dependency links fully-qualified relative to the current directory.
1334 |targets| is a dict mapping fully-qualified target names to their target
1335 dicts. For each target in this dict, keys known to contain dependency
1336 links are examined, and any dependencies referenced will be rewritten
1337 so that they are fully-qualified and relative to the current directory.
1338 All rewritten dependencies are suitable for use as keys to |targets| or a
1342 all_dependency_sections = [dep + op
1343 for dep in dependency_sections
1344 for op in ('', '!', '/')]
1346 for target, target_dict in targets.iteritems():
1347 target_build_file = gyp.common.BuildFile(target)
1348 toolset = target_dict['toolset']
1349 for dependency_key in all_dependency_sections:
1350 dependencies = target_dict.get(dependency_key, [])
1351 for index in xrange(0, len(dependencies)):
1352 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1353 target_build_file, dependencies[index], toolset)
1354 if not multiple_toolsets:
1355 # Ignore toolset specification in the dependency if it is specified.
1356 dep_toolset = toolset
1357 dependency = gyp.common.QualifiedTarget(dep_file,
1360 dependencies[index] = dependency
1362 # Make sure anything appearing in a list other than "dependencies" also
1363 # appears in the "dependencies" list.
1364 if dependency_key != 'dependencies' and \
1365 dependency not in target_dict['dependencies']:
1366 raise GypError('Found ' + dependency + ' in ' + dependency_key +
1367 ' of ' + target + ', but not in dependencies')
1370 def ExpandWildcardDependencies(targets, data):
1371 """Expands dependencies specified as build_file:*.
1373 For each target in |targets|, examines sections containing links to other
1374 targets. If any such section contains a link of the form build_file:*, it
1375 is taken as a wildcard link, and is expanded to list each target in
1376 build_file. The |data| dict provides access to build file dicts.
1378 Any target that does not wish to be included by wildcard can provide an
1379 optional "suppress_wildcard" key in its target dict. When present and
1380 true, a wildcard dependency link will not include such targets.
1382 All dependency names, including the keys to |targets| and the values in each
1383 dependency list, must be qualified when this function is called.
1386 for target, target_dict in targets.iteritems():
1387 toolset = target_dict['toolset']
1388 target_build_file = gyp.common.BuildFile(target)
1389 for dependency_key in dependency_sections:
1390 dependencies = target_dict.get(dependency_key, [])
1392 # Loop this way instead of "for dependency in" or "for index in xrange"
1393 # because the dependencies list will be modified within the loop body.
1395 while index < len(dependencies):
1396 (dependency_build_file, dependency_target, dependency_toolset) = \
1397 gyp.common.ParseQualifiedTarget(dependencies[index])
1398 if dependency_target != '*' and dependency_toolset != '*':
1399 # Not a wildcard. Keep it moving.
1403 if dependency_build_file == target_build_file:
1404 # It's an error for a target to depend on all other targets in
1405 # the same file, because a target cannot depend on itself.
1406 raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1407 target + ' referring to same build file')
1409 # Take the wildcard out and adjust the index so that the next
1410 # dependency in the list will be processed the next time through the
1412 del dependencies[index]
1415 # Loop through the targets in the other build file, adding them to
1416 # this target's list of dependencies in place of the removed
1418 dependency_target_dicts = data[dependency_build_file]['targets']
1419 for dependency_target_dict in dependency_target_dicts:
1420 if int(dependency_target_dict.get('suppress_wildcard', False)):
1422 dependency_target_name = dependency_target_dict['target_name']
1423 if (dependency_target != '*' and
1424 dependency_target != dependency_target_name):
1426 dependency_target_toolset = dependency_target_dict['toolset']
1427 if (dependency_toolset != '*' and
1428 dependency_toolset != dependency_target_toolset):
1430 dependency = gyp.common.QualifiedTarget(dependency_build_file,
1431 dependency_target_name,
1432 dependency_target_toolset)
1434 dependencies.insert(index, dependency)
1440 """Removes duplicate elements from l, keeping the first element."""
1442 return [seen.setdefault(e, e) for e in l if e not in seen]
1445 def RemoveDuplicateDependencies(targets):
1446 """Makes sure every dependency appears only once in all targets's dependency
1448 for target_name, target_dict in targets.iteritems():
1449 for dependency_key in dependency_sections:
1450 dependencies = target_dict.get(dependency_key, [])
1452 target_dict[dependency_key] = Unify(dependencies)
1455 def Filter(l, item):
1456 """Removes item from l."""
1458 return [res.setdefault(e, e) for e in l if e != item]
1461 def RemoveSelfDependencies(targets):
1462 """Remove self dependencies from targets that have the prune_self_dependency
1464 for target_name, target_dict in targets.iteritems():
1465 for dependency_key in dependency_sections:
1466 dependencies = target_dict.get(dependency_key, [])
1468 for t in dependencies:
1469 if t == target_name:
1470 if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1471 target_dict[dependency_key] = Filter(dependencies, target_name)
1474 def RemoveLinkDependenciesFromNoneTargets(targets):
1475 """Remove dependencies having the 'link_dependency' attribute from the 'none'
1477 for target_name, target_dict in targets.iteritems():
1478 for dependency_key in dependency_sections:
1479 dependencies = target_dict.get(dependency_key, [])
1481 for t in dependencies:
1482 if target_dict.get('type', None) == 'none':
1483 if targets[t].get('variables', {}).get('link_dependency', 0):
1484 target_dict[dependency_key] = \
1485 Filter(target_dict[dependency_key], t)
1488 class DependencyGraphNode(object):
1492 ref: A reference to an object that this DependencyGraphNode represents.
1493 dependencies: List of DependencyGraphNodes on which this one depends.
1494 dependents: List of DependencyGraphNodes that depend on this one.
1497 class CircularException(GypError):
1500 def __init__(self, ref):
1502 self.dependencies = []
1503 self.dependents = []
1506 return '<DependencyGraphNode: %r>' % self.ref
1508 def FlattenToList(self):
1509 # flat_list is the sorted list of dependencies - actually, the list items
1510 # are the "ref" attributes of DependencyGraphNodes. Every target will
1511 # appear in flat_list after all of its dependencies, and before all of its
1513 flat_list = OrderedSet()
1515 # in_degree_zeros is the list of DependencyGraphNodes that have no
1516 # dependencies not in flat_list. Initially, it is a copy of the children
1517 # of this node, because when the graph was built, nodes with no
1518 # dependencies were made implicit dependents of the root node.
1519 in_degree_zeros = set(self.dependents[:])
1521 while in_degree_zeros:
1522 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1523 # can be appended to flat_list. Take these nodes out of in_degree_zeros
1524 # as work progresses, so that the next node to process from the list can
1525 # always be accessed at a consistent position.
1526 node = in_degree_zeros.pop()
1527 flat_list.add(node.ref)
1529 # Look at dependents of the node just added to flat_list. Some of them
1530 # may now belong in in_degree_zeros.
1531 for node_dependent in node.dependents:
1532 is_in_degree_zero = True
1533 # TODO: We want to check through the
1534 # node_dependent.dependencies list but if it's long and we
1535 # always start at the beginning, then we get O(n^2) behaviour.
1536 for node_dependent_dependency in node_dependent.dependencies:
1537 if not node_dependent_dependency.ref in flat_list:
1538 # The dependent one or more dependencies not in flat_list. There
1539 # will be more chances to add it to flat_list when examining
1540 # it again as a dependent of those other dependencies, provided
1541 # that there are no cycles.
1542 is_in_degree_zero = False
1545 if is_in_degree_zero:
1546 # All of the dependent's dependencies are already in flat_list. Add
1547 # it to in_degree_zeros where it will be processed in a future
1548 # iteration of the outer loop.
1549 in_degree_zeros.add(node_dependent)
1551 return list(flat_list)
1553 def FindCycles(self, path=None):
1555 Returns a list of cycles in the graph, where each cycle is its own list.
1561 for node in self.dependents:
1568 results.append(tuple(cycle))
1570 results.extend(node.FindCycles([node] + path))
1572 return list(set(results))
1574 def DirectDependencies(self, dependencies=None):
1575 """Returns a list of just direct dependencies."""
1576 if dependencies == None:
1579 for dependency in self.dependencies:
1580 # Check for None, corresponding to the root node.
1581 if dependency.ref != None and dependency.ref not in dependencies:
1582 dependencies.append(dependency.ref)
1586 def _AddImportedDependencies(self, targets, dependencies=None):
1587 """Given a list of direct dependencies, adds indirect dependencies that
1588 other dependencies have declared to export their settings.
1590 This method does not operate on self. Rather, it operates on the list
1591 of dependencies in the |dependencies| argument. For each dependency in
1592 that list, if any declares that it exports the settings of one of its
1593 own dependencies, those dependencies whose settings are "passed through"
1594 are added to the list. As new items are added to the list, they too will
1595 be processed, so it is possible to import settings through multiple levels
1598 This method is not terribly useful on its own, it depends on being
1599 "primed" with a list of direct dependencies such as one provided by
1600 DirectDependencies. DirectAndImportedDependencies is intended to be the
1604 if dependencies == None:
1608 while index < len(dependencies):
1609 dependency = dependencies[index]
1610 dependency_dict = targets[dependency]
1611 # Add any dependencies whose settings should be imported to the list
1612 # if not already present. Newly-added items will be checked for
1613 # their own imports when the list iteration reaches them.
1614 # Rather than simply appending new items, insert them after the
1615 # dependency that exported them. This is done to more closely match
1616 # the depth-first method used by DeepDependencies.
1618 for imported_dependency in \
1619 dependency_dict.get('export_dependent_settings', []):
1620 if imported_dependency not in dependencies:
1621 dependencies.insert(index + add_index, imported_dependency)
1622 add_index = add_index + 1
1627 def DirectAndImportedDependencies(self, targets, dependencies=None):
1628 """Returns a list of a target's direct dependencies and all indirect
1629 dependencies that a dependency has advertised settings should be exported
1630 through the dependency for.
1633 dependencies = self.DirectDependencies(dependencies)
1634 return self._AddImportedDependencies(targets, dependencies)
1636 def DeepDependencies(self, dependencies=None):
1637 """Returns an OrderedSet of all of a target's dependencies, recursively."""
1638 if dependencies is None:
1639 # Using a list to get ordered output and a set to do fast "is it
1640 # already added" checks.
1641 dependencies = OrderedSet()
1643 for dependency in self.dependencies:
1644 # Check for None, corresponding to the root node.
1645 if dependency.ref is None:
1647 if dependency.ref not in dependencies:
1648 dependencies.add(dependency.ref)
1649 dependency.DeepDependencies(dependencies)
1653 def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1654 dependencies=None, initial=True):
1655 """Returns an OrderedSet of dependency targets that are linked
1658 This function has a split personality, depending on the setting of
1659 |initial|. Outside callers should always leave |initial| at its default
1662 When adding a target to the list of dependencies, this function will
1663 recurse into itself with |initial| set to False, to collect dependencies
1664 that are linked into the linkable target for which the list is being built.
1666 If |include_shared_libraries| is False, the resulting dependencies will not
1667 include shared_library targets that are linked into this target.
1669 if dependencies is None:
1670 # Using a list to get ordered output and a set to do fast "is it
1671 # already added" checks.
1672 dependencies = OrderedSet()
1674 # Check for None, corresponding to the root node.
1675 if self.ref is None:
1678 # It's kind of sucky that |targets| has to be passed into this function,
1679 # but that's presently the easiest way to access the target dicts so that
1680 # this function can find target types.
1682 if 'target_name' not in targets[self.ref]:
1683 raise GypError("Missing 'target_name' field in target.")
1685 if 'type' not in targets[self.ref]:
1686 raise GypError("Missing 'type' field in target %s" %
1687 targets[self.ref]['target_name'])
1689 target_type = targets[self.ref]['type']
1691 is_linkable = target_type in linkable_types
1693 if initial and not is_linkable:
1694 # If this is the first target being examined and it's not linkable,
1695 # return an empty list of link dependencies, because the link
1696 # dependencies are intended to apply to the target itself (initial is
1697 # True) and this target won't be linked.
1700 # Don't traverse 'none' targets if explicitly excluded.
1701 if (target_type == 'none' and
1702 not targets[self.ref].get('dependencies_traverse', True)):
1703 dependencies.add(self.ref)
1706 # Executables and loadable modules are already fully and finally linked.
1707 # Nothing else can be a link dependency of them, there can only be
1708 # dependencies in the sense that a dependent target might run an
1709 # executable or load the loadable_module.
1710 if not initial and target_type in ('executable', 'loadable_module'):
1713 # Shared libraries are already fully linked. They should only be included
1714 # in |dependencies| when adjusting static library dependencies (in order to
1715 # link against the shared_library's import lib), but should not be included
1716 # in |dependencies| when propagating link_settings.
1717 # The |include_shared_libraries| flag controls which of these two cases we
1719 if (not initial and target_type == 'shared_library' and
1720 not include_shared_libraries):
1723 # The target is linkable, add it to the list of link dependencies.
1724 if self.ref not in dependencies:
1725 dependencies.add(self.ref)
1726 if initial or not is_linkable:
1727 # If this is a subsequent target and it's linkable, don't look any
1728 # further for linkable dependencies, as they'll already be linked into
1729 # this target linkable. Always look at dependencies of the initial
1730 # target, and always look at dependencies of non-linkables.
1731 for dependency in self.dependencies:
1732 dependency._LinkDependenciesInternal(targets,
1733 include_shared_libraries,
1734 dependencies, False)
1738 def DependenciesForLinkSettings(self, targets):
1740 Returns a list of dependency targets whose link_settings should be merged
1744 # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1745 # link_settings are propagated. So for now, we will allow it, unless the
1746 # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1747 # False. Once chrome is fixed, we can remove this flag.
1748 include_shared_libraries = \
1749 targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1750 return self._LinkDependenciesInternal(targets, include_shared_libraries)
1752 def DependenciesToLinkAgainst(self, targets):
1754 Returns a list of dependency targets that are linked into this target.
1756 return self._LinkDependenciesInternal(targets, True)
1759 def BuildDependencyList(targets):
1760 # Create a DependencyGraphNode for each target. Put it into a dict for easy
1762 dependency_nodes = {}
1763 for target, spec in targets.iteritems():
1764 if target not in dependency_nodes:
1765 dependency_nodes[target] = DependencyGraphNode(target)
1767 # Set up the dependency links. Targets that have no dependencies are treated
1768 # as dependent on root_node.
1769 root_node = DependencyGraphNode(None)
1770 for target, spec in targets.iteritems():
1771 target_node = dependency_nodes[target]
1772 target_build_file = gyp.common.BuildFile(target)
1773 dependencies = spec.get('dependencies')
1774 if not dependencies:
1775 target_node.dependencies = [root_node]
1776 root_node.dependents.append(target_node)
1778 for dependency in dependencies:
1779 dependency_node = dependency_nodes.get(dependency)
1780 if not dependency_node:
1781 raise GypError("Dependency '%s' not found while "
1782 "trying to load target %s" % (dependency, target))
1783 target_node.dependencies.append(dependency_node)
1784 dependency_node.dependents.append(target_node)
1786 flat_list = root_node.FlattenToList()
1788 # If there's anything left unvisited, there must be a circular dependency
1789 # (cycle). If you need to figure out what's wrong, look for elements of
1790 # targets that are not in flat_list.
1791 if len(flat_list) != len(targets):
1792 raise DependencyGraphNode.CircularException(
1793 'Some targets not reachable, cycle in dependency graph detected: ' +
1794 ' '.join(set(flat_list) ^ set(targets)))
1796 return [dependency_nodes, flat_list]
1799 def VerifyNoGYPFileCircularDependencies(targets):
1800 # Create a DependencyGraphNode for each gyp file containing a target. Put
1801 # it into a dict for easy access.
1802 dependency_nodes = {}
1803 for target in targets.iterkeys():
1804 build_file = gyp.common.BuildFile(target)
1805 if not build_file in dependency_nodes:
1806 dependency_nodes[build_file] = DependencyGraphNode(build_file)
1808 # Set up the dependency links.
1809 for target, spec in targets.iteritems():
1810 build_file = gyp.common.BuildFile(target)
1811 build_file_node = dependency_nodes[build_file]
1812 target_dependencies = spec.get('dependencies', [])
1813 for dependency in target_dependencies:
1815 dependency_build_file = gyp.common.BuildFile(dependency)
1817 gyp.common.ExceptionAppend(
1818 e, 'while computing dependencies of .gyp file %s' % build_file)
1821 if dependency_build_file == build_file:
1822 # A .gyp file is allowed to refer back to itself.
1824 dependency_node = dependency_nodes.get(dependency_build_file)
1825 if not dependency_node:
1826 raise GypError("Dependancy '%s' not found" % dependency_build_file)
1827 if dependency_node not in build_file_node.dependencies:
1828 build_file_node.dependencies.append(dependency_node)
1829 dependency_node.dependents.append(build_file_node)
1832 # Files that have no dependencies are treated as dependent on root_node.
1833 root_node = DependencyGraphNode(None)
1834 for build_file_node in dependency_nodes.itervalues():
1835 if len(build_file_node.dependencies) == 0:
1836 build_file_node.dependencies.append(root_node)
1837 root_node.dependents.append(build_file_node)
1839 flat_list = root_node.FlattenToList()
1841 # If there's anything left unvisited, there must be a circular dependency
1843 if len(flat_list) != len(dependency_nodes):
1845 for file in dependency_nodes.iterkeys():
1846 if not file in flat_list:
1847 bad_files.append(file)
1848 common_path_prefix = os.path.commonprefix(dependency_nodes)
1850 for cycle in root_node.FindCycles():
1851 simplified_paths = []
1853 assert(node.ref.startswith(common_path_prefix))
1854 simplified_paths.append(node.ref[len(common_path_prefix):])
1855 cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
1856 raise DependencyGraphNode.CircularException, \
1857 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
1860 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1861 # key should be one of all_dependent_settings, direct_dependent_settings,
1864 for target in flat_list:
1865 target_dict = targets[target]
1866 build_file = gyp.common.BuildFile(target)
1868 if key == 'all_dependent_settings':
1869 dependencies = dependency_nodes[target].DeepDependencies()
1870 elif key == 'direct_dependent_settings':
1872 dependency_nodes[target].DirectAndImportedDependencies(targets)
1873 elif key == 'link_settings':
1875 dependency_nodes[target].DependenciesForLinkSettings(targets)
1877 raise GypError("DoDependentSettings doesn't know how to determine "
1878 'dependencies for ' + key)
1880 for dependency in dependencies:
1881 dependency_dict = targets[dependency]
1882 if not key in dependency_dict:
1884 dependency_build_file = gyp.common.BuildFile(dependency)
1885 MergeDicts(target_dict, dependency_dict[key],
1886 build_file, dependency_build_file)
1889 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1891 # Recompute target "dependencies" properties. For each static library
1892 # target, remove "dependencies" entries referring to other static libraries,
1893 # unless the dependency has the "hard_dependency" attribute set. For each
1894 # linkable target, add a "dependencies" entry referring to all of the
1895 # target's computed list of link dependencies (including static libraries
1896 # if no such entry is already present.
1897 for target in flat_list:
1898 target_dict = targets[target]
1899 target_type = target_dict['type']
1901 if target_type == 'static_library':
1902 if not 'dependencies' in target_dict:
1905 target_dict['dependencies_original'] = target_dict.get(
1906 'dependencies', [])[:]
1908 # A static library should not depend on another static library unless
1909 # the dependency relationship is "hard," which should only be done when
1910 # a dependent relies on some side effect other than just the build
1911 # product, like a rule or action output. Further, if a target has a
1912 # non-hard dependency, but that dependency exports a hard dependency,
1913 # the non-hard dependency can safely be removed, but the exported hard
1914 # dependency must be added to the target to keep the same dependency
1917 dependency_nodes[target].DirectAndImportedDependencies(targets)
1919 while index < len(dependencies):
1920 dependency = dependencies[index]
1921 dependency_dict = targets[dependency]
1923 # Remove every non-hard static library dependency and remove every
1924 # non-static library dependency that isn't a direct dependency.
1925 if (dependency_dict['type'] == 'static_library' and \
1926 not dependency_dict.get('hard_dependency', False)) or \
1927 (dependency_dict['type'] != 'static_library' and \
1928 not dependency in target_dict['dependencies']):
1929 # Take the dependency out of the list, and don't increment index
1930 # because the next dependency to analyze will shift into the index
1931 # formerly occupied by the one being removed.
1932 del dependencies[index]
1936 # Update the dependencies. If the dependencies list is empty, it's not
1937 # needed, so unhook it.
1938 if len(dependencies) > 0:
1939 target_dict['dependencies'] = dependencies
1941 del target_dict['dependencies']
1943 elif target_type in linkable_types:
1944 # Get a list of dependency targets that should be linked into this
1945 # target. Add them to the dependencies list if they're not already
1948 link_dependencies = \
1949 dependency_nodes[target].DependenciesToLinkAgainst(targets)
1950 for dependency in link_dependencies:
1951 if dependency == target:
1953 if not 'dependencies' in target_dict:
1954 target_dict['dependencies'] = []
1955 if not dependency in target_dict['dependencies']:
1956 target_dict['dependencies'].append(dependency)
1957 # Sort the dependencies list in the order from dependents to dependencies.
1958 # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1959 # Note: flat_list is already sorted in the order from dependencies to
1961 if sort_dependencies and 'dependencies' in target_dict:
1962 target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1963 if dep in target_dict['dependencies']]
1966 # Initialize this here to speed up MakePathRelative.
1967 exception_re = re.compile(r'''["']?[-/$<>^]''')
1970 def MakePathRelative(to_file, fro_file, item):
1971 # If item is a relative path, it's relative to the build file dict that it's
1972 # coming from. Fix it up to make it relative to the build file dict that
1974 # Exception: any |item| that begins with these special characters is
1975 # returned without modification.
1976 # / Used when a path is already absolute (shortcut optimization;
1977 # such paths would be returned as absolute anyway)
1978 # $ Used for build environment variables
1979 # - Used for some build environment flags (such as -lapr-1 in a
1980 # "libraries" section)
1981 # < Used for our own variable and command expansions (see ExpandVariables)
1982 # > Used for our own variable and command expansions (see ExpandVariables)
1983 # ^ Used for our own variable and command expansions (see ExpandVariables)
1985 # "/' Used when a value is quoted. If these are present, then we
1986 # check the second character instead.
1988 if to_file == fro_file or exception_re.match(item):
1991 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1992 # temporary measure. This should really be addressed by keeping all paths
1993 # in POSIX until actual project generation.
1994 ret = os.path.normpath(os.path.join(
1995 gyp.common.RelativePath(os.path.dirname(fro_file),
1996 os.path.dirname(to_file)),
1997 item)).replace('\\', '/')
2002 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
2003 # Python documentation recommends objects which do not support hash
2004 # set this value to None. Python library objects follow this rule.
2005 is_hashable = lambda val: val.__hash__
2007 # If x is hashable, returns whether x is in s. Else returns whether x is in l.
2008 def is_in_set_or_list(x, s, l):
2015 # Make membership testing of hashables in |to| (in particular, strings)
2017 hashable_to_set = set(x for x in to if is_hashable(x))
2020 if type(item) in (str, int):
2021 # The cheap and easy case.
2023 to_item = MakePathRelative(to_file, fro_file, item)
2027 if not (type(item) is str and item.startswith('-')):
2028 # Any string that doesn't begin with a "-" is a singleton - it can
2029 # only appear once in a list, to be enforced by the list merge append
2032 elif type(item) is dict:
2033 # Make a copy of the dictionary, continuing to look for paths to fix.
2034 # The other intelligent aspects of merge processing won't apply because
2035 # item is being merged into an empty dict.
2037 MergeDicts(to_item, item, to_file, fro_file)
2038 elif type(item) is list:
2039 # Recurse, making a copy of the list. If the list contains any
2040 # descendant dicts, path fixing will occur. Note that here, custom
2041 # values for is_paths and append are dropped; those are only to be
2042 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
2043 # matter anyway because the new |to_item| list is empty.
2045 MergeLists(to_item, item, to_file, fro_file)
2048 'Attempt to merge list item of unsupported type ' + \
2049 item.__class__.__name__
2052 # If appending a singleton that's already in the list, don't append.
2053 # This ensures that the earliest occurrence of the item will stay put.
2054 if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2056 if is_hashable(to_item):
2057 hashable_to_set.add(to_item)
2059 # If prepending a singleton that's already in the list, remove the
2060 # existing instance and proceed with the prepend. This ensures that the
2061 # item appears at the earliest possible position in the list.
2062 while singleton and to_item in to:
2065 # Don't just insert everything at index 0. That would prepend the new
2066 # items to the list in reverse order, which would be an unwelcome
2068 to.insert(prepend_index, to_item)
2069 if is_hashable(to_item):
2070 hashable_to_set.add(to_item)
2071 prepend_index = prepend_index + 1
2074 def MergeDicts(to, fro, to_file, fro_file):
2075 # I wanted to name the parameter "from" but it's a Python keyword...
2076 for k, v in fro.iteritems():
2077 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2078 # copy semantics. Something else may want to merge from the |fro| dict
2079 # later, and having the same dict ref pointed to twice in the tree isn't
2080 # what anyone wants considering that the dicts may subsequently be
2084 if type(v) in (str, int):
2085 if type(to[k]) not in (str, int):
2087 elif type(v) is not type(to[k]):
2092 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2093 ' into incompatible type ' + to[k].__class__.__name__ + \
2095 if type(v) in (str, int):
2096 # Overwrite the existing value, if any. Cheap and easy.
2097 is_path = IsPathSection(k)
2099 to[k] = MakePathRelative(to_file, fro_file, v)
2102 elif type(v) is dict:
2103 # Recurse, guaranteeing copies will be made of objects that require it.
2106 MergeDicts(to[k], v, to_file, fro_file)
2107 elif type(v) is list:
2108 # Lists in dicts can be merged with different policies, depending on
2109 # how the key in the "from" dict (k, the from-key) is written.
2111 # If the from-key has ...the to-list will have this action
2112 # this character appended:... applied when receiving the from-list:
2115 # ? set, only if to-list does not yet exist
2118 # This logic is list-specific, but since it relies on the associated
2119 # dict key, it's checked in this dict-oriented function.
2124 lists_incompatible = [list_base, list_base + '?']
2128 lists_incompatible = [list_base + '=', list_base + '?']
2132 lists_incompatible = [list_base, list_base + '=', list_base + '+']
2135 lists_incompatible = [list_base + '=', list_base + '?']
2137 # Some combinations of merge policies appearing together are meaningless.
2138 # It's stupid to replace and append simultaneously, for example. Append
2139 # and prepend are the only policies that can coexist.
2140 for list_incompatible in lists_incompatible:
2141 if list_incompatible in fro:
2142 raise GypError('Incompatible list policies ' + k + ' and ' +
2147 # If the key ends in "?", the list will only be merged if it doesn't
2150 elif type(to[list_base]) is not list:
2151 # This may not have been checked above if merging in a list with an
2152 # extension character.
2154 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2155 ' into incompatible type ' + to[list_base].__class__.__name__ + \
2156 ' for key ' + list_base + '(' + k + ')'
2160 # Call MergeLists, which will make copies of objects that require it.
2161 # MergeLists can recurse back into MergeDicts, although this will be
2162 # to make copies of dicts (with paths fixed), there will be no
2163 # subsequent dict "merging" once entering a list because lists are
2164 # always replaced, appended to, or prepended to.
2165 is_paths = IsPathSection(list_base)
2166 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2169 'Attempt to merge dict value of unsupported type ' + \
2170 v.__class__.__name__ + ' for key ' + k
2173 def MergeConfigWithInheritance(new_configuration_dict, build_file,
2174 target_dict, configuration, visited):
2175 # Skip if previously visted.
2176 if configuration in visited:
2179 # Look at this configuration.
2180 configuration_dict = target_dict['configurations'][configuration]
2183 for parent in configuration_dict.get('inherit_from', []):
2184 MergeConfigWithInheritance(new_configuration_dict, build_file,
2185 target_dict, parent, visited + [configuration])
2187 # Merge it into the new config.
2188 MergeDicts(new_configuration_dict, configuration_dict,
2189 build_file, build_file)
2192 if 'abstract' in new_configuration_dict:
2193 del new_configuration_dict['abstract']
2196 def SetUpConfigurations(target, target_dict):
2197 # key_suffixes is a list of key suffixes that might appear on key names.
2198 # These suffixes are handled in conditional evaluations (for =, +, and ?)
2199 # and rules/exclude processing (for ! and /). Keys with these suffixes
2200 # should be treated the same as keys without.
2201 key_suffixes = ['=', '+', '?', '!', '/']
2203 build_file = gyp.common.BuildFile(target)
2205 # Provide a single configuration by default if none exists.
2206 # TODO(mark): Signal an error if default_configurations exists but
2207 # configurations does not.
2208 if not 'configurations' in target_dict:
2209 target_dict['configurations'] = {'Default': {}}
2210 if not 'default_configuration' in target_dict:
2211 concrete = [i for (i, config) in target_dict['configurations'].iteritems()
2212 if not config.get('abstract')]
2213 target_dict['default_configuration'] = sorted(concrete)[0]
2215 merged_configurations = {}
2216 configs = target_dict['configurations']
2217 for (configuration, old_configuration_dict) in configs.iteritems():
2218 # Skip abstract configurations (saves work only).
2219 if old_configuration_dict.get('abstract'):
2221 # Configurations inherit (most) settings from the enclosing target scope.
2222 # Get the inheritance relationship right by making a copy of the target
2224 new_configuration_dict = {}
2225 for (key, target_val) in target_dict.iteritems():
2227 if key_ext in key_suffixes:
2231 if not key_base in non_configuration_keys:
2232 new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
2234 # Merge in configuration (with all its parents first).
2235 MergeConfigWithInheritance(new_configuration_dict, build_file,
2236 target_dict, configuration, [])
2238 merged_configurations[configuration] = new_configuration_dict
2240 # Put the new configurations back into the target dict as a configuration.
2241 for configuration in merged_configurations.keys():
2242 target_dict['configurations'][configuration] = (
2243 merged_configurations[configuration])
2245 # Now drop all the abstract ones.
2246 for configuration in target_dict['configurations'].keys():
2247 old_configuration_dict = target_dict['configurations'][configuration]
2248 if old_configuration_dict.get('abstract'):
2249 del target_dict['configurations'][configuration]
2251 # Now that all of the target's configurations have been built, go through
2252 # the target dict's keys and remove everything that's been moved into a
2253 # "configurations" section.
2255 for key in target_dict:
2257 if key_ext in key_suffixes:
2261 if not key_base in non_configuration_keys:
2262 delete_keys.append(key)
2263 for key in delete_keys:
2264 del target_dict[key]
2266 # Check the configurations to see if they contain invalid keys.
2267 for configuration in target_dict['configurations'].keys():
2268 configuration_dict = target_dict['configurations'][configuration]
2269 for key in configuration_dict.keys():
2270 if key in invalid_configuration_keys:
2271 raise GypError('%s not allowed in the %s configuration, found in '
2272 'target %s' % (key, configuration, target))
2276 def ProcessListFiltersInDict(name, the_dict):
2277 """Process regular expression and exclusion-based filters on lists.
2279 An exclusion list is in a dict key named with a trailing "!", like
2280 "sources!". Every item in such a list is removed from the associated
2281 main list, which in this example, would be "sources". Removed items are
2282 placed into a "sources_excluded" list in the dict.
2284 Regular expression (regex) filters are contained in dict keys named with a
2285 trailing "/", such as "sources/" to operate on the "sources" list. Regex
2286 filters in a dict take the form:
2287 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2288 ['include', '_mac\\.cc$'] ],
2289 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2290 _win.cc. The second filter then includes all files ending in _mac.cc that
2291 are now or were once in the "sources" list. Items matching an "exclude"
2292 filter are subject to the same processing as would occur if they were listed
2293 by name in an exclusion list (ending in "!"). Items matching an "include"
2294 filter are brought back into the main list if previously excluded by an
2295 exclusion list or exclusion regex filter. Subsequent matching "exclude"
2296 patterns can still cause items to be excluded after matching an "include".
2299 # Look through the dictionary for any lists whose keys end in "!" or "/".
2300 # These are lists that will be treated as exclude lists and regular
2301 # expression-based exclude/include lists. Collect the lists that are
2302 # needed first, looking for the lists that they operate on, and assemble
2303 # then into |lists|. This is done in a separate loop up front, because
2304 # the _included and _excluded keys need to be added to the_dict, and that
2305 # can't be done while iterating through it.
2309 for key, value in the_dict.iteritems():
2311 if operation != '!' and operation != '/':
2314 if type(value) is not list:
2315 raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2316 value.__class__.__name__
2319 if list_key not in the_dict:
2320 # This happens when there's a list like "sources!" but no corresponding
2321 # "sources" list. Since there's nothing for it to operate on, queue up
2322 # the "sources!" list for deletion now.
2323 del_lists.append(key)
2326 if type(the_dict[list_key]) is not list:
2327 value = the_dict[list_key]
2328 raise ValueError, name + ' key ' + list_key + \
2329 ' must be list, not ' + \
2330 value.__class__.__name__ + ' when applying ' + \
2331 {'!': 'exclusion', '/': 'regex'}[operation]
2333 if not list_key in lists:
2334 lists.append(list_key)
2336 # Delete the lists that are known to be unneeded at this point.
2337 for del_list in del_lists:
2338 del the_dict[del_list]
2340 for list_key in lists:
2341 the_list = the_dict[list_key]
2343 # Initialize the list_actions list, which is parallel to the_list. Each
2344 # item in list_actions identifies whether the corresponding item in
2345 # the_list should be excluded, unconditionally preserved (included), or
2346 # whether no exclusion or inclusion has been applied. Items for which
2347 # no exclusion or inclusion has been applied (yet) have value -1, items
2348 # excluded have value 0, and items included have value 1. Includes and
2349 # excludes override previous actions. All items in list_actions are
2350 # initialized to -1 because no excludes or includes have been processed
2352 list_actions = list((-1,) * len(the_list))
2354 exclude_key = list_key + '!'
2355 if exclude_key in the_dict:
2356 for exclude_item in the_dict[exclude_key]:
2357 for index in xrange(0, len(the_list)):
2358 if exclude_item == the_list[index]:
2359 # This item matches the exclude_item, so set its action to 0
2361 list_actions[index] = 0
2363 # The "whatever!" list is no longer needed, dump it.
2364 del the_dict[exclude_key]
2366 regex_key = list_key + '/'
2367 if regex_key in the_dict:
2368 for regex_item in the_dict[regex_key]:
2369 [action, pattern] = regex_item
2370 pattern_re = re.compile(pattern)
2372 if action == 'exclude':
2373 # This item matches an exclude regex, so set its value to 0 (exclude).
2375 elif action == 'include':
2376 # This item matches an include regex, so set its value to 1 (include).
2379 # This is an action that doesn't make any sense.
2380 raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2383 for index in xrange(0, len(the_list)):
2384 list_item = the_list[index]
2385 if list_actions[index] == action_value:
2386 # Even if the regex matches, nothing will change so continue (regex
2387 # searches are expensive).
2389 if pattern_re.search(list_item):
2390 # Regular expression match.
2391 list_actions[index] = action_value
2393 # The "whatever/" list is no longer needed, dump it.
2394 del the_dict[regex_key]
2396 # Add excluded items to the excluded list.
2398 # Note that exclude_key ("sources!") is different from excluded_key
2399 # ("sources_excluded"). The exclude_key list is input and it was already
2400 # processed and deleted; the excluded_key list is output and it's about
2402 excluded_key = list_key + '_excluded'
2403 if excluded_key in the_dict:
2404 raise GypError(name + ' key ' + excluded_key +
2405 ' must not be present prior '
2406 ' to applying exclusion/regex filters for ' + list_key)
2410 # Go backwards through the list_actions list so that as items are deleted,
2411 # the indices of items that haven't been seen yet don't shift. That means
2412 # that things need to be prepended to excluded_list to maintain them in the
2413 # same order that they existed in the_list.
2414 for index in xrange(len(list_actions) - 1, -1, -1):
2415 if list_actions[index] == 0:
2416 # Dump anything with action 0 (exclude). Keep anything with action 1
2417 # (include) or -1 (no include or exclude seen for the item).
2418 excluded_list.insert(0, the_list[index])
2421 # If anything was excluded, put the excluded list into the_dict at
2423 if len(excluded_list) > 0:
2424 the_dict[excluded_key] = excluded_list
2426 # Now recurse into subdicts and lists that may contain dicts.
2427 for key, value in the_dict.iteritems():
2428 if type(value) is dict:
2429 ProcessListFiltersInDict(key, value)
2430 elif type(value) is list:
2431 ProcessListFiltersInList(key, value)
2434 def ProcessListFiltersInList(name, the_list):
2435 for item in the_list:
2436 if type(item) is dict:
2437 ProcessListFiltersInDict(name, item)
2438 elif type(item) is list:
2439 ProcessListFiltersInList(name, item)
2442 def ValidateTargetType(target, target_dict):
2443 """Ensures the 'type' field on the target is one of the known types.
2446 target: string, name of target.
2447 target_dict: dict, target spec.
2449 Raises an exception on error.
2451 VALID_TARGET_TYPES = ('executable', 'loadable_module',
2452 'static_library', 'shared_library',
2454 target_type = target_dict.get('type', None)
2455 if target_type not in VALID_TARGET_TYPES:
2456 raise GypError("Target %s has an invalid target type '%s'. "
2457 "Must be one of %s." %
2458 (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2459 if (target_dict.get('standalone_static_library', 0) and
2460 not target_type == 'static_library'):
2461 raise GypError('Target %s has type %s but standalone_static_library flag is'
2462 ' only valid for static_library type.' % (target,
2466 def ValidateSourcesInTarget(target, target_dict, build_file):
2467 # TODO: Check if MSVC allows this for loadable_module targets.
2468 if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2470 sources = target_dict.get('sources', [])
2472 for source in sources:
2473 name, ext = os.path.splitext(source)
2474 is_compiled_file = ext in [
2475 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2476 if not is_compiled_file:
2478 basename = os.path.basename(name) # Don't include extension.
2479 basenames.setdefault(basename, []).append(source)
2482 for basename, files in basenames.iteritems():
2484 error += ' %s: %s\n' % (basename, ' '.join(files))
2487 print('static library %s has several files with the same basename:\n' %
2488 target + error + 'Some build systems, e.g. MSVC08, '
2489 'cannot handle that.')
2490 raise GypError('Duplicate basenames in sources section, see list above')
2493 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2494 """Ensures that the rules sections in target_dict are valid and consistent,
2495 and determines which sources they apply to.
2498 target: string, name of target.
2499 target_dict: dict, target spec containing "rules" and "sources" lists.
2500 extra_sources_for_rules: a list of keys to scan for rule matches in
2501 addition to 'sources'.
2504 # Dicts to map between values found in rules' 'rule_name' and 'extension'
2505 # keys and the rule dicts themselves.
2507 rule_extensions = {}
2509 rules = target_dict.get('rules', [])
2511 # Make sure that there's no conflict among rule names and extensions.
2512 rule_name = rule['rule_name']
2513 if rule_name in rule_names:
2514 raise GypError('rule %s exists in duplicate, target %s' %
2515 (rule_name, target))
2516 rule_names[rule_name] = rule
2518 rule_extension = rule['extension']
2519 if rule_extension.startswith('.'):
2520 rule_extension = rule_extension[1:]
2521 if rule_extension in rule_extensions:
2522 raise GypError(('extension %s associated with multiple rules, ' +
2523 'target %s rules %s and %s') %
2524 (rule_extension, target,
2525 rule_extensions[rule_extension]['rule_name'],
2527 rule_extensions[rule_extension] = rule
2529 # Make sure rule_sources isn't already there. It's going to be
2530 # created below if needed.
2531 if 'rule_sources' in rule:
2533 'rule_sources must not exist in input, target %s rule %s' %
2534 (target, rule_name))
2537 source_keys = ['sources']
2538 source_keys.extend(extra_sources_for_rules)
2539 for source_key in source_keys:
2540 for source in target_dict.get(source_key, []):
2541 (source_root, source_extension) = os.path.splitext(source)
2542 if source_extension.startswith('.'):
2543 source_extension = source_extension[1:]
2544 if source_extension == rule_extension:
2545 rule_sources.append(source)
2547 if len(rule_sources) > 0:
2548 rule['rule_sources'] = rule_sources
2551 def ValidateRunAsInTarget(target, target_dict, build_file):
2552 target_name = target_dict.get('target_name')
2553 run_as = target_dict.get('run_as')
2556 if type(run_as) is not dict:
2557 raise GypError("The 'run_as' in target %s from file %s should be a "
2559 (target_name, build_file))
2560 action = run_as.get('action')
2562 raise GypError("The 'run_as' in target %s from file %s must have an "
2563 "'action' section." %
2564 (target_name, build_file))
2565 if type(action) is not list:
2566 raise GypError("The 'action' for 'run_as' in target %s from file %s "
2568 (target_name, build_file))
2569 working_directory = run_as.get('working_directory')
2570 if working_directory and type(working_directory) is not str:
2571 raise GypError("The 'working_directory' for 'run_as' in target %s "
2572 "in file %s should be a string." %
2573 (target_name, build_file))
2574 environment = run_as.get('environment')
2575 if environment and type(environment) is not dict:
2576 raise GypError("The 'environment' for 'run_as' in target %s "
2577 "in file %s should be a dictionary." %
2578 (target_name, build_file))
2581 def ValidateActionsInTarget(target, target_dict, build_file):
2582 '''Validates the inputs to the actions in a target.'''
2583 target_name = target_dict.get('target_name')
2584 actions = target_dict.get('actions', [])
2585 for action in actions:
2586 action_name = action.get('action_name')
2588 raise GypError("Anonymous action in target %s. "
2589 "An action must have an 'action_name' field." %
2591 inputs = action.get('inputs', None)
2593 raise GypError('Action in target %s has no inputs.' % target_name)
2594 action_command = action.get('action')
2595 if action_command and not action_command[0]:
2596 raise GypError("Empty action as command in target %s." % target_name)
2599 def TurnIntIntoStrInDict(the_dict):
2600 """Given dict the_dict, recursively converts all integers into strings.
2602 # Use items instead of iteritems because there's no need to try to look at
2603 # reinserted keys and their associated values.
2604 for k, v in the_dict.items():
2608 elif type(v) is dict:
2609 TurnIntIntoStrInDict(v)
2610 elif type(v) is list:
2611 TurnIntIntoStrInList(v)
2615 the_dict[str(k)] = v
2618 def TurnIntIntoStrInList(the_list):
2619 """Given list the_list, recursively converts all integers into strings.
2621 for index in xrange(0, len(the_list)):
2622 item = the_list[index]
2623 if type(item) is int:
2624 the_list[index] = str(item)
2625 elif type(item) is dict:
2626 TurnIntIntoStrInDict(item)
2627 elif type(item) is list:
2628 TurnIntIntoStrInList(item)
2631 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2633 """Return only the targets that are deep dependencies of |root_targets|."""
2634 qualified_root_targets = []
2635 for target in root_targets:
2636 target = target.strip()
2637 qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2638 if not qualified_targets:
2639 raise GypError("Could not find target %s" % target)
2640 qualified_root_targets.extend(qualified_targets)
2643 for target in qualified_root_targets:
2644 wanted_targets[target] = targets[target]
2645 for dependency in dependency_nodes[target].DeepDependencies():
2646 wanted_targets[dependency] = targets[dependency]
2648 wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2650 # Prune unwanted targets from each build_file's data dict.
2651 for build_file in data['target_build_files']:
2652 if not 'targets' in data[build_file]:
2655 for target in data[build_file]['targets']:
2656 qualified_name = gyp.common.QualifiedTarget(build_file,
2657 target['target_name'],
2659 if qualified_name in wanted_targets:
2660 new_targets.append(target)
2661 data[build_file]['targets'] = new_targets
2663 return wanted_targets, wanted_flat_list
2666 def VerifyNoCollidingTargets(targets):
2667 """Verify that no two targets in the same directory share the same name.
2670 targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2672 # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2674 for target in targets:
2675 # Separate out 'path/to/file.gyp, 'target_name' from
2676 # 'path/to/file.gyp:target_name'.
2677 path, name = target.rsplit(':', 1)
2678 # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2679 subdir, gyp = os.path.split(path)
2680 # Use '.' for the current directory '', so that the error messages make
2684 # Prepare a key like 'path/to:target_name'.
2685 key = subdir + ':' + name
2687 # Complain if this target is already used.
2688 raise GypError('Duplicate target name "%s" in directory "%s" used both '
2689 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2693 def SetGeneratorGlobals(generator_input_info):
2694 # Set up path_sections and non_configuration_keys with the default data plus
2695 # the generator-specific data.
2696 global path_sections
2697 path_sections = set(base_path_sections)
2698 path_sections.update(generator_input_info['path_sections'])
2700 global non_configuration_keys
2701 non_configuration_keys = base_non_configuration_keys[:]
2702 non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2704 global multiple_toolsets
2705 multiple_toolsets = generator_input_info[
2706 'generator_supports_multiple_toolsets']
2708 global generator_filelist_paths
2709 generator_filelist_paths = generator_input_info['generator_filelist_paths']
2712 def Load(build_files, variables, includes, depth, generator_input_info, check,
2713 circular_check, parallel, root_targets):
2714 SetGeneratorGlobals(generator_input_info)
2715 # A generator can have other lists (in addition to sources) be processed
2717 extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2719 # Load build files. This loads every target-containing build file into
2720 # the |data| dictionary such that the keys to |data| are build file names,
2721 # and the values are the entire build file contents after "early" or "pre"
2722 # processing has been done and includes have been resolved.
2723 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2724 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2725 # track of the keys corresponding to "target" files.
2726 data = {'target_build_files': set()}
2728 # Normalize paths everywhere. This is important because paths will be
2729 # used as keys to the data dict and for references between input files.
2730 build_files = set(map(os.path.normpath, build_files))
2732 LoadTargetBuildFilesParallel(build_files, data, aux_data,
2733 variables, includes, depth, check,
2734 generator_input_info)
2736 for build_file in build_files:
2738 LoadTargetBuildFile(build_file, data, aux_data,
2739 variables, includes, depth, check, True)
2740 except Exception, e:
2741 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2744 # Build a dict to access each target's subdict by qualified name.
2745 targets = BuildTargetsDict(data)
2747 # Fully qualify all dependency links.
2748 QualifyDependencies(targets)
2750 # Remove self-dependencies from targets that have 'prune_self_dependencies'
2752 RemoveSelfDependencies(targets)
2754 # Expand dependencies specified as build_file:*.
2755 ExpandWildcardDependencies(targets, data)
2757 # Remove all dependencies marked as 'link_dependency' from the targets of
2759 RemoveLinkDependenciesFromNoneTargets(targets)
2761 # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2762 for target_name, target_dict in targets.iteritems():
2764 for key_base in dependency_sections:
2765 for op in ('', '!', '/'):
2767 if key in target_dict:
2768 tmp_dict[key] = target_dict[key]
2769 del target_dict[key]
2770 ProcessListFiltersInDict(target_name, tmp_dict)
2771 # Write the results back to |target_dict|.
2772 for key in tmp_dict:
2773 target_dict[key] = tmp_dict[key]
2775 # Make sure every dependency appears at most once.
2776 RemoveDuplicateDependencies(targets)
2779 # Make sure that any targets in a.gyp don't contain dependencies in other
2780 # .gyp files that further depend on a.gyp.
2781 VerifyNoGYPFileCircularDependencies(targets)
2783 [dependency_nodes, flat_list] = BuildDependencyList(targets)
2786 # Remove, from |targets| and |flat_list|, the targets that are not deep
2787 # dependencies of the targets specified in |root_targets|.
2788 targets, flat_list = PruneUnwantedTargets(
2789 targets, flat_list, dependency_nodes, root_targets, data)
2791 # Check that no two targets in the same directory have the same name.
2792 VerifyNoCollidingTargets(flat_list)
2794 # Handle dependent settings of various types.
2795 for settings_type in ['all_dependent_settings',
2796 'direct_dependent_settings',
2798 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2800 # Take out the dependent settings now that they've been published to all
2801 # of the targets that require them.
2802 for target in flat_list:
2803 if settings_type in targets[target]:
2804 del targets[target][settings_type]
2806 # Make sure static libraries don't declare dependencies on other static
2807 # libraries, but that linkables depend on all unlinked static libraries
2808 # that they need so that their link steps will be correct.
2809 gii = generator_input_info
2810 if gii['generator_wants_static_library_dependencies_adjusted']:
2811 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2812 gii['generator_wants_sorted_dependencies'])
2814 # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2815 for target in flat_list:
2816 target_dict = targets[target]
2817 build_file = gyp.common.BuildFile(target)
2818 ProcessVariablesAndConditionsInDict(
2819 target_dict, PHASE_LATE, variables, build_file)
2821 # Move everything that can go into a "configurations" section into one.
2822 for target in flat_list:
2823 target_dict = targets[target]
2824 SetUpConfigurations(target, target_dict)
2826 # Apply exclude (!) and regex (/) list filters.
2827 for target in flat_list:
2828 target_dict = targets[target]
2829 ProcessListFiltersInDict(target, target_dict)
2831 # Apply "latelate" variable expansions and condition evaluations.
2832 for target in flat_list:
2833 target_dict = targets[target]
2834 build_file = gyp.common.BuildFile(target)
2835 ProcessVariablesAndConditionsInDict(
2836 target_dict, PHASE_LATELATE, variables, build_file)
2838 # Make sure that the rules make sense, and build up rule_sources lists as
2839 # needed. Not all generators will need to use the rule_sources lists, but
2840 # some may, and it seems best to build the list in a common spot.
2841 # Also validate actions and run_as elements in targets.
2842 for target in flat_list:
2843 target_dict = targets[target]
2844 build_file = gyp.common.BuildFile(target)
2845 ValidateTargetType(target, target_dict)
2846 # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2847 # scalesystemdependent_arm_additions.c or similar.
2848 if 'arm' not in variables.get('target_arch', ''):
2849 ValidateSourcesInTarget(target, target_dict, build_file)
2850 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2851 ValidateRunAsInTarget(target, target_dict, build_file)
2852 ValidateActionsInTarget(target, target_dict, build_file)
2854 # Generators might not expect ints. Turn them into strs.
2855 TurnIntIntoStrInDict(data)
2857 # TODO(mark): Return |data| for now because the generator needs a list of
2858 # build files that came in. In the future, maybe it should just accept
2859 # a list, and not the whole data dict.
2860 return [flat_list, targets, data]