1 # Copyright (c) 2012 Google Inc. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 from compiler.ast import Const
6 from compiler.ast import Dict
7 from compiler.ast import Discard
8 from compiler.ast import List
9 from compiler.ast import Module
10 from compiler.ast import Node
11 from compiler.ast import Stmt
15 import multiprocessing
26 from gyp.common import GypError
29 # A list of types that are treated as linkable.
30 linkable_types = ['executable', 'shared_library', 'loadable_module']
32 # A list of sections that contain links to other targets.
33 dependency_sections = ['dependencies', 'export_dependent_settings']
35 # base_path_sections is a list of sections defined by GYP that contain
36 # pathnames. The generators can provide more keys, the two lists are merged
37 # into path_sections, but you should call IsPathSection instead of using either
39 base_path_sections = [
50 is_path_section_charset = set('=+?!')
51 is_path_section_match_re = re.compile('_(dir|file|path)s?$')
53 def IsPathSection(section):
54 # If section ends in one of these characters, it's applied to a section
55 # without the trailing characters. '/' is notably absent from this list,
56 # because there's no way for a regular expression to be treated as a path.
57 while section[-1:] in is_path_section_charset:
58 section = section[:-1]
59 return section in path_sections or is_path_section_match_re.search(section)
61 # base_non_configuration_keys is a list of key names that belong in the target
62 # itself and should not be propagated into its configurations. It is merged
63 # with a list that can come from the generator to
64 # create non_configuration_keys.
65 base_non_configuration_keys = [
66 # Sections that must exist inside targets and not configurations.
70 'default_configuration',
72 'dependencies_original',
82 'standalone_static_library',
89 # Sections that can be found inside targets or configurations, but that
90 # should not be propagated from targets into their configurations.
93 non_configuration_keys = []
95 # Keys that do not belong inside a configuration dictionary.
96 invalid_configuration_keys = [
98 'all_dependent_settings',
101 'direct_dependent_settings',
105 'standalone_static_library',
110 # Controls whether or not the generator supports multiple toolsets.
111 multiple_toolsets = False
113 # Paths for converting filelist paths to output paths: {
115 # qualified_output_dir,
117 generator_filelist_paths = None
119 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
120 """Return a list of all build files included into build_file_path.
122 The returned list will contain build_file_path as well as all other files
123 that it included, either directly or indirectly. Note that the list may
124 contain files that were included into a conditional section that evaluated
125 to false and was not merged into build_file_path's dict.
127 aux_data is a dict containing a key for each build file or included build
128 file. Those keys provide access to dicts whose "included" keys contain
129 lists of all other files included by the build file.
131 included should be left at its default None value by external callers. It
132 is used for recursion.
134 The returned list will not contain any duplicate entries. Each build file
135 in the list will be relative to the current directory.
141 if build_file_path in included:
144 included.append(build_file_path)
146 for included_build_file in aux_data[build_file_path].get('included', []):
147 GetIncludedBuildFiles(included_build_file, aux_data, included)
152 def CheckedEval(file_contents):
153 """Return the eval of a gyp file.
155 The gyp file is restricted to dictionaries and lists only, and
156 repeated keys are not allowed.
158 Note that this is slower than eval() is.
161 ast = compiler.parse(file_contents)
162 assert isinstance(ast, Module)
163 c1 = ast.getChildren()
165 assert isinstance(c1[1], Stmt)
166 c2 = c1[1].getChildren()
167 assert isinstance(c2[0], Discard)
168 c3 = c2[0].getChildren()
170 return CheckNode(c3[0], [])
173 def CheckNode(node, keypath):
174 if isinstance(node, Dict):
175 c = node.getChildren()
177 for n in range(0, len(c), 2):
178 assert isinstance(c[n], Const)
179 key = c[n].getChildren()[0]
181 raise GypError("Key '" + key + "' repeated at level " +
182 repr(len(keypath) + 1) + " with key path '" +
183 '.'.join(keypath) + "'")
184 kp = list(keypath) # Make a copy of the list for descending this node.
186 dict[key] = CheckNode(c[n + 1], kp)
188 elif isinstance(node, List):
189 c = node.getChildren()
191 for index, child in enumerate(c):
192 kp = list(keypath) # Copy list.
193 kp.append(repr(index))
194 children.append(CheckNode(child, kp))
196 elif isinstance(node, Const):
197 return node.getChildren()[0]
199 raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
203 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
205 if build_file_path in data:
206 return data[build_file_path]
208 if os.path.exists(build_file_path):
209 build_file_contents = open(build_file_path).read()
211 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
213 build_file_data = None
216 build_file_data = CheckedEval(build_file_contents)
218 build_file_data = eval(build_file_contents, {'__builtins__': None},
220 except SyntaxError, e:
221 e.filename = build_file_path
224 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
227 if not isinstance(build_file_data, dict):
228 raise GypError("%s does not evaluate to a dictionary." % build_file_path)
230 data[build_file_path] = build_file_data
231 aux_data[build_file_path] = {}
233 # Scan for includes and merge them in.
234 if ('skip_includes' not in build_file_data or
235 not build_file_data['skip_includes']):
238 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
239 aux_data, variables, includes, check)
241 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
242 aux_data, variables, None, check)
244 gyp.common.ExceptionAppend(e,
245 'while reading includes of ' + build_file_path)
248 return build_file_data
251 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
252 variables, includes, check):
255 includes_list.extend(includes)
256 if 'includes' in subdict:
257 for include in subdict['includes']:
258 # "include" is specified relative to subdict_path, so compute the real
259 # path to include by appending the provided "include" to the directory
260 # in which subdict_path resides.
262 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
263 includes_list.append(relative_include)
264 # Unhook the includes list, it's no longer needed.
265 del subdict['includes']
267 # Merge in the included files.
268 for include in includes_list:
269 if not 'included' in aux_data[subdict_path]:
270 aux_data[subdict_path]['included'] = []
271 aux_data[subdict_path]['included'].append(include)
273 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
276 LoadOneBuildFile(include, data, aux_data, variables, None,
278 subdict_path, include)
280 # Recurse into subdictionaries.
281 for k, v in subdict.iteritems():
282 if v.__class__ == dict:
283 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
285 elif v.__class__ == list:
286 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
290 # This recurses into lists so that it can look for dicts.
291 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
294 if item.__class__ == dict:
295 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
296 variables, None, check)
297 elif item.__class__ == list:
298 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
301 # Processes toolsets in all the targets. This recurses into condition entries
302 # since they can contain toolsets as well.
303 def ProcessToolsetsInDict(data):
304 if 'targets' in data:
305 target_list = data['targets']
307 for target in target_list:
308 # If this target already has an explicit 'toolset', and no 'toolsets'
309 # list, don't modify it further.
310 if 'toolset' in target and 'toolsets' not in target:
311 new_target_list.append(target)
313 if multiple_toolsets:
314 toolsets = target.get('toolsets', ['target'])
316 toolsets = ['target']
317 # Make sure this 'toolsets' definition is only processed once.
318 if 'toolsets' in target:
319 del target['toolsets']
320 if len(toolsets) > 0:
321 # Optimization: only do copies if more than one toolset is specified.
322 for build in toolsets[1:]:
323 new_target = copy.deepcopy(target)
324 new_target['toolset'] = build
325 new_target_list.append(new_target)
326 target['toolset'] = toolsets[0]
327 new_target_list.append(target)
328 data['targets'] = new_target_list
329 if 'conditions' in data:
330 for condition in data['conditions']:
331 if isinstance(condition, list):
332 for condition_dict in condition[1:]:
333 ProcessToolsetsInDict(condition_dict)
336 # TODO(mark): I don't love this name. It just means that it's going to load
337 # a build file that contains targets and is expected to provide a targets dict
338 # that contains the targets...
339 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
340 depth, check, load_dependencies):
341 # If depth is set, predefine the DEPTH variable to be a relative path from
342 # this build file's directory to the directory identified by depth.
344 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
345 # temporary measure. This should really be addressed by keeping all paths
346 # in POSIX until actual project generation.
347 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
349 variables['DEPTH'] = '.'
351 variables['DEPTH'] = d.replace('\\', '/')
353 if build_file_path in data['target_build_files']:
356 data['target_build_files'].add(build_file_path)
358 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
359 "Loading Target Build File '%s'", build_file_path)
361 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
362 includes, True, check)
364 # Store DEPTH for later use in generators.
365 build_file_data['_DEPTH'] = depth
367 # Set up the included_files key indicating which .gyp files contributed to
369 if 'included_files' in build_file_data:
370 raise GypError(build_file_path + ' must not contain included_files key')
372 included = GetIncludedBuildFiles(build_file_path, aux_data)
373 build_file_data['included_files'] = []
374 for included_file in included:
375 # included_file is relative to the current directory, but it needs to
376 # be made relative to build_file_path's directory.
377 included_relative = \
378 gyp.common.RelativePath(included_file,
379 os.path.dirname(build_file_path))
380 build_file_data['included_files'].append(included_relative)
382 # Do a first round of toolsets expansion so that conditions can be defined
384 ProcessToolsetsInDict(build_file_data)
386 # Apply "pre"/"early" variable expansions and condition evaluations.
387 ProcessVariablesAndConditionsInDict(
388 build_file_data, PHASE_EARLY, variables, build_file_path)
390 # Since some toolsets might have been defined conditionally, perform
391 # a second round of toolsets expansion now.
392 ProcessToolsetsInDict(build_file_data)
394 # Look at each project's target_defaults dict, and merge settings into
396 if 'target_defaults' in build_file_data:
397 if 'targets' not in build_file_data:
398 raise GypError("Unable to find targets in build file %s" %
402 while index < len(build_file_data['targets']):
403 # This procedure needs to give the impression that target_defaults is
404 # used as defaults, and the individual targets inherit from that.
405 # The individual targets need to be merged into the defaults. Make
406 # a deep copy of the defaults for each target, merge the target dict
407 # as found in the input file into that copy, and then hook up the
408 # copy with the target-specific data merged into it as the replacement
410 old_target_dict = build_file_data['targets'][index]
411 new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
412 MergeDicts(new_target_dict, old_target_dict,
413 build_file_path, build_file_path)
414 build_file_data['targets'][index] = new_target_dict
418 del build_file_data['target_defaults']
420 # Look for dependencies. This means that dependency resolution occurs
421 # after "pre" conditionals and variable expansion, but before "post" -
422 # in other words, you can't put a "dependencies" section inside a "post"
423 # conditional within a target.
426 if 'targets' in build_file_data:
427 for target_dict in build_file_data['targets']:
428 if 'dependencies' not in target_dict:
430 for dependency in target_dict['dependencies']:
432 gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
434 if load_dependencies:
435 for dependency in dependencies:
437 LoadTargetBuildFile(dependency, data, aux_data, variables,
438 includes, depth, check, load_dependencies)
440 gyp.common.ExceptionAppend(
441 e, 'while loading dependencies of %s' % build_file_path)
444 return (build_file_path, dependencies)
447 def CallLoadTargetBuildFile(global_flags,
448 build_file_path, data,
450 includes, depth, check,
451 generator_input_info):
452 """Wrapper around LoadTargetBuildFile for parallel processing.
454 This wrapper is used when LoadTargetBuildFile is executed in
459 signal.signal(signal.SIGINT, signal.SIG_IGN)
461 # Apply globals so that the worker process behaves the same.
462 for key, value in global_flags.iteritems():
463 globals()[key] = value
465 # Save the keys so we can return data that changed.
466 data_keys = set(data)
467 aux_data_keys = set(aux_data)
469 SetGeneratorGlobals(generator_input_info)
470 result = LoadTargetBuildFile(build_file_path, data,
472 includes, depth, check, False)
476 (build_file_path, dependencies) = result
480 if key == 'target_build_files':
482 if key not in data_keys:
483 data_out[key] = data[key]
486 if key not in aux_data_keys:
487 aux_data_out[key] = aux_data[key]
489 # This gets serialized and sent back to the main process via a pipe.
490 # It's handled in LoadTargetBuildFileCallback.
491 return (build_file_path,
496 sys.stderr.write("gyp: %s\n" % e)
499 print >>sys.stderr, 'Exception:', e
500 print >>sys.stderr, traceback.format_exc()
504 class ParallelProcessingError(Exception):
508 class ParallelState(object):
509 """Class to keep track of state when processing input files in parallel.
511 If build files are loaded in parallel, use this to keep track of
512 state during farming out and processing parallel jobs. It's stored
513 in a global so that the callback function can have access to it.
517 # The multiprocessing pool.
519 # The condition variable used to protect this object and notify
520 # the main loop when there might be more data to process.
521 self.condition = None
522 # The "data" dict that was passed to LoadTargetBuildFileParallel
524 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
526 # The number of parallel calls outstanding; decremented when a response
529 # The set of all build files that have been scheduled, so we don't
530 # schedule the same one twice.
531 self.scheduled = set()
532 # A list of dependency build file paths that haven't been scheduled yet.
533 self.dependencies = []
534 # Flag to indicate if there was an error in a child process.
537 def LoadTargetBuildFileCallback(self, result):
538 """Handle the results of running LoadTargetBuildFile in another process.
540 self.condition.acquire()
543 self.condition.notify()
544 self.condition.release()
546 (build_file_path0, data0, aux_data0, dependencies0) = result
547 self.data['target_build_files'].add(build_file_path0)
549 self.data[key] = data0[key]
550 for key in aux_data0:
551 self.aux_data[key] = aux_data0[key]
552 for new_dependency in dependencies0:
553 if new_dependency not in self.scheduled:
554 self.scheduled.add(new_dependency)
555 self.dependencies.append(new_dependency)
557 self.condition.notify()
558 self.condition.release()
561 def LoadTargetBuildFilesParallel(build_files, data, aux_data,
562 variables, includes, depth, check,
563 generator_input_info):
564 parallel_state = ParallelState()
565 parallel_state.condition = threading.Condition()
566 # Make copies of the build_files argument that we can modify while working.
567 parallel_state.dependencies = list(build_files)
568 parallel_state.scheduled = set(build_files)
569 parallel_state.pending = 0
570 parallel_state.data = data
571 parallel_state.aux_data = aux_data
574 parallel_state.condition.acquire()
575 while parallel_state.dependencies or parallel_state.pending:
576 if parallel_state.error:
578 if not parallel_state.dependencies:
579 parallel_state.condition.wait()
582 dependency = parallel_state.dependencies.pop()
584 parallel_state.pending += 1
586 data_in['target_build_files'] = data['target_build_files']
589 'path_sections': globals()['path_sections'],
590 'non_configuration_keys': globals()['non_configuration_keys'],
591 'multiple_toolsets': globals()['multiple_toolsets']}
593 if not parallel_state.pool:
594 parallel_state.pool = multiprocessing.Pool(8)
595 parallel_state.pool.apply_async(
596 CallLoadTargetBuildFile,
597 args = (global_flags, dependency,
598 data_in, aux_data_in,
599 variables, includes, depth, check, generator_input_info),
600 callback = parallel_state.LoadTargetBuildFileCallback)
601 except KeyboardInterrupt, e:
602 parallel_state.pool.terminate()
605 parallel_state.condition.release()
607 parallel_state.pool.close()
608 parallel_state.pool.join()
609 parallel_state.pool = None
611 if parallel_state.error:
614 # Look for the bracket that matches the first bracket seen in a
615 # string, and return the start and end as a tuple. For example, if
616 # the input is something like "<(foo <(bar)) blah", then it would
617 # return (1, 13), indicating the entire string except for the leading
618 # "<" and trailing " blah".
619 LBRACKETS= set('{[(')
620 BRACKETS = {'}': '{', ']': '[', ')': '('}
621 def FindEnclosingBracketGroup(input_str):
624 for index, char in enumerate(input_str):
625 if char in LBRACKETS:
629 elif char in BRACKETS:
632 if stack.pop() != BRACKETS[char]:
635 return (start, index + 1)
639 canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
642 def IsStrCanonicalInt(string):
643 """Returns True if |string| is in its canonical integer form.
645 The canonical form is such that str(int(string)) == string.
647 return isinstance(string, str) and canonical_int_re.match(string)
650 # This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
651 # "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
652 # In the last case, the inner "<()" is captured in match['content'].
653 early_variable_re = re.compile(
654 '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
655 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
656 '\((?P<is_array>\s*\[?)'
657 '(?P<content>.*?)(\]?)\))')
659 # This matches the same as early_variable_re, but with '>' instead of '<'.
660 late_variable_re = re.compile(
661 '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
662 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
663 '\((?P<is_array>\s*\[?)'
664 '(?P<content>.*?)(\]?)\))')
666 # This matches the same as early_variable_re, but with '^' instead of '<'.
667 latelate_variable_re = re.compile(
668 '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
669 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
670 '\((?P<is_array>\s*\[?)'
671 '(?P<content>.*?)(\]?)\))')
673 # Global cache of results from running commands so they don't have to be run
675 cached_command_results = {}
678 def FixupPlatformCommand(cmd):
679 if sys.platform == 'win32':
680 if type(cmd) == list:
681 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
683 cmd = re.sub('^cat ', 'type ', cmd)
692 def ExpandVariables(input, phase, variables, build_file):
693 # Look for the pattern that gets expanded into variables
694 if phase == PHASE_EARLY:
695 variable_re = early_variable_re
696 expansion_symbol = '<'
697 elif phase == PHASE_LATE:
698 variable_re = late_variable_re
699 expansion_symbol = '>'
700 elif phase == PHASE_LATELATE:
701 variable_re = latelate_variable_re
702 expansion_symbol = '^'
706 input_str = str(input)
707 if IsStrCanonicalInt(input_str):
708 return int(input_str)
710 # Do a quick scan to determine if an expensive regex search is warranted.
711 if expansion_symbol not in input_str:
714 # Get the entire list of matches as a list of MatchObject instances.
715 # (using findall here would return strings instead of MatchObjects).
716 matches = list(variable_re.finditer(input_str))
721 # Reverse the list of matches so that replacements are done right-to-left.
722 # That ensures that earlier replacements won't mess up the string in a
723 # way that causes later calls to find the earlier substituted text instead
724 # of what's intended for replacement.
726 for match_group in matches:
727 match = match_group.groupdict()
728 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
729 # match['replace'] is the substring to look for, match['type']
730 # is the character code for the replacement type (< > <! >! <| >| <@
731 # >@ <!@ >!@), match['is_array'] contains a '[' for command
732 # arrays, and match['content'] is the name of the variable (< >)
733 # or command to run (<! >!). match['command_string'] is an optional
734 # command string. Currently, only 'pymod_do_main' is supported.
736 # run_command is true if a ! variant is used.
737 run_command = '!' in match['type']
738 command_string = match['command_string']
740 # file_list is true if a | variant is used.
741 file_list = '|' in match['type']
743 # Capture these now so we can adjust them later.
744 replace_start = match_group.start('replace')
745 replace_end = match_group.end('replace')
747 # Find the ending paren, and re-evaluate the contained string.
748 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
750 # Adjust the replacement range to match the entire command
751 # found by FindEnclosingBracketGroup (since the variable_re
752 # probably doesn't match the entire command if it contained
754 replace_end = replace_start + c_end
756 # Find the "real" replacement, matching the appropriate closing
757 # paren, and adjust the replacement start and end.
758 replacement = input_str[replace_start:replace_end]
760 # Figure out what the contents of the variable parens are.
761 contents_start = replace_start + c_start + 1
762 contents_end = replace_end - 1
763 contents = input_str[contents_start:contents_end]
765 # Do filter substitution now for <|().
766 # Admittedly, this is different than the evaluation order in other
767 # contexts. However, since filtration has no chance to run on <|(),
768 # this seems like the only obvious way to give them access to filters.
770 processed_variables = copy.deepcopy(variables)
771 ProcessListFiltersInDict(contents, processed_variables)
772 # Recurse to expand variables in the contents
773 contents = ExpandVariables(contents, phase,
774 processed_variables, build_file)
776 # Recurse to expand variables in the contents
777 contents = ExpandVariables(contents, phase, variables, build_file)
779 # Strip off leading/trailing whitespace so that variable matches are
780 # simpler below (and because they are rarely needed).
781 contents = contents.strip()
783 # expand_to_list is true if an @ variant is used. In that case,
784 # the expansion should result in a list. Note that the caller
785 # is to be expecting a list in return, and not all callers do
786 # because not all are working in list context. Also, for list
787 # expansions, there can be no other text besides the variable
788 # expansion in the input string.
789 expand_to_list = '@' in match['type'] and input_str == replacement
791 if run_command or file_list:
792 # Find the build file's directory, so commands can be run or file lists
793 # generated relative to it.
794 build_file_dir = os.path.dirname(build_file)
795 if build_file_dir == '' and not file_list:
796 # If build_file is just a leaf filename indicating a file in the
797 # current directory, build_file_dir might be an empty string. Set
798 # it to None to signal to subprocess.Popen that it should run the
799 # command in the current directory.
800 build_file_dir = None
802 # Support <|(listfile.txt ...) which generates a file
803 # containing items from a gyp list, generated at gyp time.
804 # This works around actions/rules which have more inputs than will
805 # fit on the command line.
807 if type(contents) == list:
808 contents_list = contents
810 contents_list = contents.split(' ')
811 replacement = contents_list[0]
812 if os.path.isabs(replacement):
813 raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
815 if not generator_filelist_paths:
816 path = os.path.join(build_file_dir, replacement)
818 if os.path.isabs(build_file_dir):
819 toplevel = generator_filelist_paths['toplevel']
820 rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
822 rel_build_file_dir = build_file_dir
823 qualified_out_dir = generator_filelist_paths['qualified_out_dir']
824 path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
825 gyp.common.EnsureDirExists(path)
827 replacement = gyp.common.RelativePath(path, build_file_dir)
828 f = gyp.common.WriteOnDiff(path)
829 for i in contents_list[1:]:
835 if match['is_array']:
836 contents = eval(contents)
839 # Check for a cached value to avoid executing commands, or generating
840 # file lists more than once.
841 # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
842 # possible that the command being invoked depends on the current
843 # directory. For that case the syntax needs to be extended so that the
844 # directory is also used in cache_key (it becomes a tuple).
845 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
846 # someone could author a set of GYP files where each time the command
847 # is invoked it produces different output by design. When the need
848 # arises, the syntax should be extended to support no caching off a
849 # command's output so it is run every time.
850 cache_key = str(contents)
851 cached_value = cached_command_results.get(cache_key, None)
852 if cached_value is None:
853 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
854 "Executing command '%s' in directory '%s'",
855 contents, build_file_dir)
859 if command_string == 'pymod_do_main':
860 # <!pymod_do_main(modulename param eters) loads |modulename| as a
861 # python module and then calls that module's DoMain() function,
862 # passing ["param", "eters"] as a single list argument. For modules
863 # that don't load quickly, this can be faster than
864 # <!(python modulename param eters). Do this in |build_file_dir|.
865 oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
866 if build_file_dir: # build_file_dir may be None (see above).
867 os.chdir(build_file_dir)
870 parsed_contents = shlex.split(contents)
872 py_module = __import__(parsed_contents[0])
873 except ImportError as e:
874 raise GypError("Error importing pymod_do_main"
875 "module (%s): %s" % (parsed_contents[0], e))
876 replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
879 assert replacement != None
881 raise GypError("Unknown command string '%s' in '%s'." %
882 (command_string, contents))
884 # Fix up command with platform specific workarounds.
885 contents = FixupPlatformCommand(contents)
886 p = subprocess.Popen(contents, shell=use_shell,
887 stdout=subprocess.PIPE,
888 stderr=subprocess.PIPE,
889 stdin=subprocess.PIPE,
892 p_stdout, p_stderr = p.communicate('')
894 if p.wait() != 0 or p_stderr:
895 sys.stderr.write(p_stderr)
896 # Simulate check_call behavior, since check_call only exists
897 # in python 2.5 and later.
898 raise GypError("Call to '%s' returned exit status %d." %
899 (contents, p.returncode))
900 replacement = p_stdout.rstrip()
902 cached_command_results[cache_key] = replacement
904 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
905 "Had cache value for command '%s' in directory '%s'",
906 contents,build_file_dir)
907 replacement = cached_value
910 if not contents in variables:
911 if contents[-1] in ['!', '/']:
912 # In order to allow cross-compiles (nacl) to happen more naturally,
913 # we will allow references to >(sources/) etc. to resolve to
914 # and empty list if undefined. This allows actions to:
923 raise GypError('Undefined variable ' + contents +
926 replacement = variables[contents]
928 if isinstance(replacement, list):
929 for item in replacement:
930 if (not contents[-1] == '/' and
931 not isinstance(item, str) and not isinstance(item, int)):
932 raise GypError('Variable ' + contents +
933 ' must expand to a string or list of strings; ' +
935 item.__class__.__name__)
936 # Run through the list and handle variable expansions in it. Since
937 # the list is guaranteed not to contain dicts, this won't do anything
938 # with conditions sections.
939 ProcessVariablesAndConditionsInList(replacement, phase, variables,
941 elif not isinstance(replacement, str) and \
942 not isinstance(replacement, int):
943 raise GypError('Variable ' + contents +
944 ' must expand to a string or list of strings; ' +
945 'found a ' + replacement.__class__.__name__)
948 # Expanding in list context. It's guaranteed that there's only one
949 # replacement to do in |input_str| and that it's this replacement. See
951 if isinstance(replacement, list):
952 # If it's already a list, make a copy.
953 output = replacement[:]
955 # Split it the same way sh would split arguments.
956 output = shlex.split(str(replacement))
958 # Expanding in string context.
959 encoded_replacement = ''
960 if isinstance(replacement, list):
961 # When expanding a list into string context, turn the list items
962 # into a string in a way that will work with a subprocess call.
964 # TODO(mark): This isn't completely correct. This should
965 # call a generator-provided function that observes the
966 # proper list-to-argument quoting rules on a specific
967 # platform instead of just calling the POSIX encoding
969 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
971 encoded_replacement = replacement
973 output = output[:replace_start] + str(encoded_replacement) + \
975 # Prepare for the next match iteration.
978 # Look for more matches now that we've replaced some, to deal with
979 # expanding local variables (variables defined in the same
980 # variables block as this one).
981 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
982 if isinstance(output, list):
983 if output and isinstance(output[0], list):
984 # Leave output alone if it's a list of lists.
985 # We don't want such lists to be stringified.
991 ExpandVariables(item, phase, variables, build_file))
994 output = ExpandVariables(output, phase, variables, build_file)
996 # Convert all strings that are canonically-represented integers into integers.
997 if isinstance(output, list):
998 for index in xrange(0, len(output)):
999 if IsStrCanonicalInt(output[index]):
1000 output[index] = int(output[index])
1001 elif IsStrCanonicalInt(output):
1002 output = int(output)
1007 def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1008 # Process a 'conditions' or 'target_conditions' section in the_dict,
1009 # depending on phase.
1010 # early -> conditions
1011 # late -> target_conditions
1012 # latelate -> no conditions
1014 # Each item in a conditions list consists of cond_expr, a string expression
1015 # evaluated as the condition, and true_dict, a dict that will be merged into
1016 # the_dict if cond_expr evaluates to true. Optionally, a third item,
1017 # false_dict, may be present. false_dict is merged into the_dict if
1018 # cond_expr evaluates to false.
1020 # Any dict merged into the_dict will be recursively processed for nested
1021 # conditionals and other expansions, also according to phase, immediately
1022 # prior to being merged.
1024 if phase == PHASE_EARLY:
1025 conditions_key = 'conditions'
1026 elif phase == PHASE_LATE:
1027 conditions_key = 'target_conditions'
1028 elif phase == PHASE_LATELATE:
1033 if not conditions_key in the_dict:
1036 conditions_list = the_dict[conditions_key]
1037 # Unhook the conditions list, it's no longer needed.
1038 del the_dict[conditions_key]
1040 for condition in conditions_list:
1041 if not isinstance(condition, list):
1042 raise GypError(conditions_key + ' must be a list')
1043 if len(condition) != 2 and len(condition) != 3:
1044 # It's possible that condition[0] won't work in which case this
1045 # attempt will raise its own IndexError. That's probably fine.
1046 raise GypError(conditions_key + ' ' + condition[0] +
1047 ' must be length 2 or 3, not ' + str(len(condition)))
1049 [cond_expr, true_dict] = condition[0:2]
1051 if len(condition) == 3:
1052 false_dict = condition[2]
1054 # Do expansions on the condition itself. Since the conditon can naturally
1055 # contain variable references without needing to resort to GYP expansion
1056 # syntax, this is of dubious value for variables, but someone might want to
1057 # use a command expansion directly inside a condition.
1058 cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1060 if not isinstance(cond_expr_expanded, str) and \
1061 not isinstance(cond_expr_expanded, int):
1063 'Variable expansion in this context permits str and int ' + \
1064 'only, found ' + expanded.__class__.__name__
1067 ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1069 if eval(ast_code, {'__builtins__': None}, variables):
1070 merge_dict = true_dict
1072 merge_dict = false_dict
1073 except SyntaxError, e:
1074 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1075 'at character %d.' %
1076 (str(e.args[0]), e.text, build_file, e.offset),
1077 e.filename, e.lineno, e.offset, e.text)
1079 except NameError, e:
1080 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1081 (cond_expr_expanded, build_file))
1084 if merge_dict != None:
1085 # Expand variables and nested conditinals in the merge_dict before
1087 ProcessVariablesAndConditionsInDict(merge_dict, phase,
1088 variables, build_file)
1090 MergeDicts(the_dict, merge_dict, build_file, build_file)
1093 def LoadAutomaticVariablesFromDict(variables, the_dict):
1094 # Any keys with plain string values in the_dict become automatic variables.
1095 # The variable name is the key name with a "_" character prepended.
1096 for key, value in the_dict.iteritems():
1097 if isinstance(value, str) or isinstance(value, int) or \
1098 isinstance(value, list):
1099 variables['_' + key] = value
1102 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1103 # Any keys in the_dict's "variables" dict, if it has one, becomes a
1104 # variable. The variable name is the key name in the "variables" dict.
1105 # Variables that end with the % character are set only if they are unset in
1106 # the variables dict. the_dict_key is the name of the key that accesses
1107 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
1108 # (it could be a list or it could be parentless because it is a root dict),
1109 # the_dict_key will be None.
1110 for key, value in the_dict.get('variables', {}).iteritems():
1111 if not isinstance(value, str) and not isinstance(value, int) and \
1112 not isinstance(value, list):
1115 if key.endswith('%'):
1116 variable_name = key[:-1]
1117 if variable_name in variables:
1118 # If the variable is already set, don't set it.
1120 if the_dict_key is 'variables' and variable_name in the_dict:
1121 # If the variable is set without a % in the_dict, and the_dict is a
1122 # variables dict (making |variables| a varaibles sub-dict of a
1123 # variables dict), use the_dict's definition.
1124 value = the_dict[variable_name]
1128 variables[variable_name] = value
1131 def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1132 build_file, the_dict_key=None):
1133 """Handle all variable and command expansion and conditional evaluation.
1135 This function is the public entry point for all variable expansions and
1136 conditional evaluations. The variables_in dictionary will not be modified
1140 # Make a copy of the variables_in dict that can be modified during the
1141 # loading of automatics and the loading of the variables dict.
1142 variables = variables_in.copy()
1143 LoadAutomaticVariablesFromDict(variables, the_dict)
1145 if 'variables' in the_dict:
1146 # Make sure all the local variables are added to the variables
1147 # list before we process them so that you can reference one
1148 # variable from another. They will be fully expanded by recursion
1149 # in ExpandVariables.
1150 for key, value in the_dict['variables'].iteritems():
1151 variables[key] = value
1153 # Handle the associated variables dict first, so that any variable
1154 # references within can be resolved prior to using them as variables.
1155 # Pass a copy of the variables dict to avoid having it be tainted.
1156 # Otherwise, it would have extra automatics added for everything that
1157 # should just be an ordinary variable in this scope.
1158 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1159 variables, build_file, 'variables')
1161 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1163 for key, value in the_dict.iteritems():
1164 # Skip "variables", which was already processed if present.
1165 if key != 'variables' and isinstance(value, str):
1166 expanded = ExpandVariables(value, phase, variables, build_file)
1167 if not isinstance(expanded, str) and not isinstance(expanded, int):
1169 'Variable expansion in this context permits str and int ' + \
1170 'only, found ' + expanded.__class__.__name__ + ' for ' + key
1171 the_dict[key] = expanded
1173 # Variable expansion may have resulted in changes to automatics. Reload.
1174 # TODO(mark): Optimization: only reload if no changes were made.
1175 variables = variables_in.copy()
1176 LoadAutomaticVariablesFromDict(variables, the_dict)
1177 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1179 # Process conditions in this dict. This is done after variable expansion
1180 # so that conditions may take advantage of expanded variables. For example,
1181 # if the_dict contains:
1182 # {'type': '<(library_type)',
1183 # 'conditions': [['_type=="static_library"', { ... }]]},
1184 # _type, as used in the condition, will only be set to the value of
1185 # library_type if variable expansion is performed before condition
1186 # processing. However, condition processing should occur prior to recursion
1187 # so that variables (both automatic and "variables" dict type) may be
1188 # adjusted by conditions sections, merged into the_dict, and have the
1189 # intended impact on contained dicts.
1191 # This arrangement means that a "conditions" section containing a "variables"
1192 # section will only have those variables effective in subdicts, not in
1193 # the_dict. The workaround is to put a "conditions" section within a
1194 # "variables" section. For example:
1195 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1196 # 'defines': ['<(define)'],
1197 # 'my_subdict': {'defines': ['<(define)']}},
1198 # will not result in "IS_MAC" being appended to the "defines" list in the
1199 # current scope but would result in it being appended to the "defines" list
1200 # within "my_subdict". By comparison:
1201 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1202 # 'defines': ['<(define)'],
1203 # 'my_subdict': {'defines': ['<(define)']}},
1204 # will append "IS_MAC" to both "defines" lists.
1206 # Evaluate conditions sections, allowing variable expansions within them
1207 # as well as nested conditionals. This will process a 'conditions' or
1208 # 'target_conditions' section, perform appropriate merging and recursive
1209 # conditional and variable processing, and then remove the conditions section
1210 # from the_dict if it is present.
1211 ProcessConditionsInDict(the_dict, phase, variables, build_file)
1213 # Conditional processing may have resulted in changes to automatics or the
1214 # variables dict. Reload.
1215 variables = variables_in.copy()
1216 LoadAutomaticVariablesFromDict(variables, the_dict)
1217 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1219 # Recurse into child dicts, or process child lists which may result in
1220 # further recursion into descendant dicts.
1221 for key, value in the_dict.iteritems():
1222 # Skip "variables" and string values, which were already processed if
1224 if key == 'variables' or isinstance(value, str):
1226 if isinstance(value, dict):
1227 # Pass a copy of the variables dict so that subdicts can't influence
1229 ProcessVariablesAndConditionsInDict(value, phase, variables,
1231 elif isinstance(value, list):
1232 # The list itself can't influence the variables dict, and
1233 # ProcessVariablesAndConditionsInList will make copies of the variables
1234 # dict if it needs to pass it to something that can influence it. No
1235 # copy is necessary here.
1236 ProcessVariablesAndConditionsInList(value, phase, variables,
1238 elif not isinstance(value, int):
1239 raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1243 def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1245 # Iterate using an index so that new values can be assigned into the_list.
1247 while index < len(the_list):
1248 item = the_list[index]
1249 if isinstance(item, dict):
1250 # Make a copy of the variables dict so that it won't influence anything
1251 # outside of its own scope.
1252 ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1253 elif isinstance(item, list):
1254 ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1255 elif isinstance(item, str):
1256 expanded = ExpandVariables(item, phase, variables, build_file)
1257 if isinstance(expanded, str) or isinstance(expanded, int):
1258 the_list[index] = expanded
1259 elif isinstance(expanded, list):
1260 the_list[index:index+1] = expanded
1261 index += len(expanded)
1263 # index now identifies the next item to examine. Continue right now
1264 # without falling into the index increment below.
1268 'Variable expansion in this context permits strings and ' + \
1269 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1271 elif not isinstance(item, int):
1272 raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1273 ' at index ' + index
1277 def BuildTargetsDict(data):
1278 """Builds a dict mapping fully-qualified target names to their target dicts.
1280 |data| is a dict mapping loaded build files by pathname relative to the
1281 current directory. Values in |data| are build file contents. For each
1282 |data| value with a "targets" key, the value of the "targets" key is taken
1283 as a list containing target dicts. Each target's fully-qualified name is
1284 constructed from the pathname of the build file (|data| key) and its
1285 "target_name" property. These fully-qualified names are used as the keys
1286 in the returned dict. These keys provide access to the target dicts,
1287 the dicts in the "targets" lists.
1291 for build_file in data['target_build_files']:
1292 for target in data[build_file].get('targets', []):
1293 target_name = gyp.common.QualifiedTarget(build_file,
1294 target['target_name'],
1296 if target_name in targets:
1297 raise GypError('Duplicate target definitions for ' + target_name)
1298 targets[target_name] = target
1303 def QualifyDependencies(targets):
1304 """Make dependency links fully-qualified relative to the current directory.
1306 |targets| is a dict mapping fully-qualified target names to their target
1307 dicts. For each target in this dict, keys known to contain dependency
1308 links are examined, and any dependencies referenced will be rewritten
1309 so that they are fully-qualified and relative to the current directory.
1310 All rewritten dependencies are suitable for use as keys to |targets| or a
1314 all_dependency_sections = [dep + op
1315 for dep in dependency_sections
1316 for op in ('', '!', '/')]
1318 for target, target_dict in targets.iteritems():
1319 target_build_file = gyp.common.BuildFile(target)
1320 toolset = target_dict['toolset']
1321 for dependency_key in all_dependency_sections:
1322 dependencies = target_dict.get(dependency_key, [])
1323 for index in xrange(0, len(dependencies)):
1324 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1325 target_build_file, dependencies[index], toolset)
1326 if not multiple_toolsets:
1327 # Ignore toolset specification in the dependency if it is specified.
1328 dep_toolset = toolset
1329 dependency = gyp.common.QualifiedTarget(dep_file,
1332 dependencies[index] = dependency
1334 # Make sure anything appearing in a list other than "dependencies" also
1335 # appears in the "dependencies" list.
1336 if dependency_key != 'dependencies' and \
1337 dependency not in target_dict['dependencies']:
1338 raise GypError('Found ' + dependency + ' in ' + dependency_key +
1339 ' of ' + target + ', but not in dependencies')
1342 def ExpandWildcardDependencies(targets, data):
1343 """Expands dependencies specified as build_file:*.
1345 For each target in |targets|, examines sections containing links to other
1346 targets. If any such section contains a link of the form build_file:*, it
1347 is taken as a wildcard link, and is expanded to list each target in
1348 build_file. The |data| dict provides access to build file dicts.
1350 Any target that does not wish to be included by wildcard can provide an
1351 optional "suppress_wildcard" key in its target dict. When present and
1352 true, a wildcard dependency link will not include such targets.
1354 All dependency names, including the keys to |targets| and the values in each
1355 dependency list, must be qualified when this function is called.
1358 for target, target_dict in targets.iteritems():
1359 toolset = target_dict['toolset']
1360 target_build_file = gyp.common.BuildFile(target)
1361 for dependency_key in dependency_sections:
1362 dependencies = target_dict.get(dependency_key, [])
1364 # Loop this way instead of "for dependency in" or "for index in xrange"
1365 # because the dependencies list will be modified within the loop body.
1367 while index < len(dependencies):
1368 (dependency_build_file, dependency_target, dependency_toolset) = \
1369 gyp.common.ParseQualifiedTarget(dependencies[index])
1370 if dependency_target != '*' and dependency_toolset != '*':
1371 # Not a wildcard. Keep it moving.
1375 if dependency_build_file == target_build_file:
1376 # It's an error for a target to depend on all other targets in
1377 # the same file, because a target cannot depend on itself.
1378 raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1379 target + ' referring to same build file')
1381 # Take the wildcard out and adjust the index so that the next
1382 # dependency in the list will be processed the next time through the
1384 del dependencies[index]
1387 # Loop through the targets in the other build file, adding them to
1388 # this target's list of dependencies in place of the removed
1390 dependency_target_dicts = data[dependency_build_file]['targets']
1391 for dependency_target_dict in dependency_target_dicts:
1392 if int(dependency_target_dict.get('suppress_wildcard', False)):
1394 dependency_target_name = dependency_target_dict['target_name']
1395 if (dependency_target != '*' and
1396 dependency_target != dependency_target_name):
1398 dependency_target_toolset = dependency_target_dict['toolset']
1399 if (dependency_toolset != '*' and
1400 dependency_toolset != dependency_target_toolset):
1402 dependency = gyp.common.QualifiedTarget(dependency_build_file,
1403 dependency_target_name,
1404 dependency_target_toolset)
1406 dependencies.insert(index, dependency)
1412 """Removes duplicate elements from l, keeping the first element."""
1414 return [seen.setdefault(e, e) for e in l if e not in seen]
1417 def RemoveDuplicateDependencies(targets):
1418 """Makes sure every dependency appears only once in all targets's dependency
1420 for target_name, target_dict in targets.iteritems():
1421 for dependency_key in dependency_sections:
1422 dependencies = target_dict.get(dependency_key, [])
1424 target_dict[dependency_key] = Unify(dependencies)
1427 def Filter(l, item):
1428 """Removes item from l."""
1430 return [res.setdefault(e, e) for e in l if e != item]
1433 def RemoveSelfDependencies(targets):
1434 """Remove self dependencies from targets that have the prune_self_dependency
1436 for target_name, target_dict in targets.iteritems():
1437 for dependency_key in dependency_sections:
1438 dependencies = target_dict.get(dependency_key, [])
1440 for t in dependencies:
1441 if t == target_name:
1442 if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1443 target_dict[dependency_key] = Filter(dependencies, target_name)
1446 class DependencyGraphNode(object):
1450 ref: A reference to an object that this DependencyGraphNode represents.
1451 dependencies: List of DependencyGraphNodes on which this one depends.
1452 dependents: List of DependencyGraphNodes that depend on this one.
1455 class CircularException(GypError):
1458 def __init__(self, ref):
1460 self.dependencies = []
1461 self.dependents = []
1464 return '<DependencyGraphNode: %r>' % self.ref
1466 def FlattenToList(self):
1467 # flat_list is the sorted list of dependencies - actually, the list items
1468 # are the "ref" attributes of DependencyGraphNodes. Every target will
1469 # appear in flat_list after all of its dependencies, and before all of its
1473 # in_degree_zeros is the list of DependencyGraphNodes that have no
1474 # dependencies not in flat_list. Initially, it is a copy of the children
1475 # of this node, because when the graph was built, nodes with no
1476 # dependencies were made implicit dependents of the root node.
1477 in_degree_zeros = set(self.dependents[:])
1479 while in_degree_zeros:
1480 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1481 # can be appended to flat_list. Take these nodes out of in_degree_zeros
1482 # as work progresses, so that the next node to process from the list can
1483 # always be accessed at a consistent position.
1484 node = in_degree_zeros.pop()
1485 flat_list.append(node.ref)
1487 # Look at dependents of the node just added to flat_list. Some of them
1488 # may now belong in in_degree_zeros.
1489 for node_dependent in node.dependents:
1490 is_in_degree_zero = True
1491 for node_dependent_dependency in node_dependent.dependencies:
1492 if not node_dependent_dependency.ref in flat_list:
1493 # The dependent one or more dependencies not in flat_list. There
1494 # will be more chances to add it to flat_list when examining
1495 # it again as a dependent of those other dependencies, provided
1496 # that there are no cycles.
1497 is_in_degree_zero = False
1500 if is_in_degree_zero:
1501 # All of the dependent's dependencies are already in flat_list. Add
1502 # it to in_degree_zeros where it will be processed in a future
1503 # iteration of the outer loop.
1504 in_degree_zeros.add(node_dependent)
1508 def FindCycles(self, path=None):
1510 Returns a list of cycles in the graph, where each cycle is its own list.
1516 for node in self.dependents:
1523 results.append(tuple(cycle))
1525 results.extend(node.FindCycles([node] + path))
1527 return list(set(results))
1529 def DirectDependencies(self, dependencies=None):
1530 """Returns a list of just direct dependencies."""
1531 if dependencies == None:
1534 for dependency in self.dependencies:
1535 # Check for None, corresponding to the root node.
1536 if dependency.ref != None and dependency.ref not in dependencies:
1537 dependencies.append(dependency.ref)
1541 def _AddImportedDependencies(self, targets, dependencies=None):
1542 """Given a list of direct dependencies, adds indirect dependencies that
1543 other dependencies have declared to export their settings.
1545 This method does not operate on self. Rather, it operates on the list
1546 of dependencies in the |dependencies| argument. For each dependency in
1547 that list, if any declares that it exports the settings of one of its
1548 own dependencies, those dependencies whose settings are "passed through"
1549 are added to the list. As new items are added to the list, they too will
1550 be processed, so it is possible to import settings through multiple levels
1553 This method is not terribly useful on its own, it depends on being
1554 "primed" with a list of direct dependencies such as one provided by
1555 DirectDependencies. DirectAndImportedDependencies is intended to be the
1559 if dependencies == None:
1563 while index < len(dependencies):
1564 dependency = dependencies[index]
1565 dependency_dict = targets[dependency]
1566 # Add any dependencies whose settings should be imported to the list
1567 # if not already present. Newly-added items will be checked for
1568 # their own imports when the list iteration reaches them.
1569 # Rather than simply appending new items, insert them after the
1570 # dependency that exported them. This is done to more closely match
1571 # the depth-first method used by DeepDependencies.
1573 for imported_dependency in \
1574 dependency_dict.get('export_dependent_settings', []):
1575 if imported_dependency not in dependencies:
1576 dependencies.insert(index + add_index, imported_dependency)
1577 add_index = add_index + 1
1582 def DirectAndImportedDependencies(self, targets, dependencies=None):
1583 """Returns a list of a target's direct dependencies and all indirect
1584 dependencies that a dependency has advertised settings should be exported
1585 through the dependency for.
1588 dependencies = self.DirectDependencies(dependencies)
1589 return self._AddImportedDependencies(targets, dependencies)
1591 def DeepDependencies(self, dependencies=None):
1592 """Returns a list of all of a target's dependencies, recursively."""
1593 if dependencies == None:
1596 for dependency in self.dependencies:
1597 # Check for None, corresponding to the root node.
1598 if dependency.ref != None and dependency.ref not in dependencies:
1599 dependencies.append(dependency.ref)
1600 dependency.DeepDependencies(dependencies)
1604 def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1605 dependencies=None, initial=True):
1606 """Returns a list of dependency targets that are linked into this target.
1608 This function has a split personality, depending on the setting of
1609 |initial|. Outside callers should always leave |initial| at its default
1612 When adding a target to the list of dependencies, this function will
1613 recurse into itself with |initial| set to False, to collect dependencies
1614 that are linked into the linkable target for which the list is being built.
1616 If |include_shared_libraries| is False, the resulting dependencies will not
1617 include shared_library targets that are linked into this target.
1619 if dependencies == None:
1622 # Check for None, corresponding to the root node.
1623 if self.ref == None:
1626 # It's kind of sucky that |targets| has to be passed into this function,
1627 # but that's presently the easiest way to access the target dicts so that
1628 # this function can find target types.
1630 if 'target_name' not in targets[self.ref]:
1631 raise GypError("Missing 'target_name' field in target.")
1633 if 'type' not in targets[self.ref]:
1634 raise GypError("Missing 'type' field in target %s" %
1635 targets[self.ref]['target_name'])
1637 target_type = targets[self.ref]['type']
1639 is_linkable = target_type in linkable_types
1641 if initial and not is_linkable:
1642 # If this is the first target being examined and it's not linkable,
1643 # return an empty list of link dependencies, because the link
1644 # dependencies are intended to apply to the target itself (initial is
1645 # True) and this target won't be linked.
1648 # Don't traverse 'none' targets if explicitly excluded.
1649 if (target_type == 'none' and
1650 not targets[self.ref].get('dependencies_traverse', True)):
1651 if self.ref not in dependencies:
1652 dependencies.append(self.ref)
1655 # Executables and loadable modules are already fully and finally linked.
1656 # Nothing else can be a link dependency of them, there can only be
1657 # dependencies in the sense that a dependent target might run an
1658 # executable or load the loadable_module.
1659 if not initial and target_type in ('executable', 'loadable_module'):
1662 # Shared libraries are already fully linked. They should only be included
1663 # in |dependencies| when adjusting static library dependencies (in order to
1664 # link against the shared_library's import lib), but should not be included
1665 # in |dependencies| when propagating link_settings.
1666 # The |include_shared_libraries| flag controls which of these two cases we
1668 if (not initial and target_type == 'shared_library' and
1669 not include_shared_libraries):
1672 # The target is linkable, add it to the list of link dependencies.
1673 if self.ref not in dependencies:
1674 dependencies.append(self.ref)
1675 if initial or not is_linkable:
1676 # If this is a subsequent target and it's linkable, don't look any
1677 # further for linkable dependencies, as they'll already be linked into
1678 # this target linkable. Always look at dependencies of the initial
1679 # target, and always look at dependencies of non-linkables.
1680 for dependency in self.dependencies:
1681 dependency._LinkDependenciesInternal(targets,
1682 include_shared_libraries,
1683 dependencies, False)
1687 def DependenciesForLinkSettings(self, targets):
1689 Returns a list of dependency targets whose link_settings should be merged
1693 # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1694 # link_settings are propagated. So for now, we will allow it, unless the
1695 # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1696 # False. Once chrome is fixed, we can remove this flag.
1697 include_shared_libraries = \
1698 targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1699 return self._LinkDependenciesInternal(targets, include_shared_libraries)
1701 def DependenciesToLinkAgainst(self, targets):
1703 Returns a list of dependency targets that are linked into this target.
1705 return self._LinkDependenciesInternal(targets, True)
1708 def BuildDependencyList(targets):
1709 # Create a DependencyGraphNode for each target. Put it into a dict for easy
1711 dependency_nodes = {}
1712 for target, spec in targets.iteritems():
1713 if target not in dependency_nodes:
1714 dependency_nodes[target] = DependencyGraphNode(target)
1716 # Set up the dependency links. Targets that have no dependencies are treated
1717 # as dependent on root_node.
1718 root_node = DependencyGraphNode(None)
1719 for target, spec in targets.iteritems():
1720 target_node = dependency_nodes[target]
1721 target_build_file = gyp.common.BuildFile(target)
1722 dependencies = spec.get('dependencies')
1723 if not dependencies:
1724 target_node.dependencies = [root_node]
1725 root_node.dependents.append(target_node)
1727 for dependency in dependencies:
1728 dependency_node = dependency_nodes.get(dependency)
1729 if not dependency_node:
1730 raise GypError("Dependency '%s' not found while "
1731 "trying to load target %s" % (dependency, target))
1732 target_node.dependencies.append(dependency_node)
1733 dependency_node.dependents.append(target_node)
1735 flat_list = root_node.FlattenToList()
1737 # If there's anything left unvisited, there must be a circular dependency
1738 # (cycle). If you need to figure out what's wrong, look for elements of
1739 # targets that are not in flat_list.
1740 if len(flat_list) != len(targets):
1741 raise DependencyGraphNode.CircularException(
1742 'Some targets not reachable, cycle in dependency graph detected: ' +
1743 ' '.join(set(flat_list) ^ set(targets)))
1745 return [dependency_nodes, flat_list]
1748 def VerifyNoGYPFileCircularDependencies(targets):
1749 # Create a DependencyGraphNode for each gyp file containing a target. Put
1750 # it into a dict for easy access.
1751 dependency_nodes = {}
1752 for target in targets.iterkeys():
1753 build_file = gyp.common.BuildFile(target)
1754 if not build_file in dependency_nodes:
1755 dependency_nodes[build_file] = DependencyGraphNode(build_file)
1757 # Set up the dependency links.
1758 for target, spec in targets.iteritems():
1759 build_file = gyp.common.BuildFile(target)
1760 build_file_node = dependency_nodes[build_file]
1761 target_dependencies = spec.get('dependencies', [])
1762 for dependency in target_dependencies:
1764 dependency_build_file = gyp.common.BuildFile(dependency)
1766 gyp.common.ExceptionAppend(
1767 e, 'while computing dependencies of .gyp file %s' % build_file)
1770 if dependency_build_file == build_file:
1771 # A .gyp file is allowed to refer back to itself.
1773 dependency_node = dependency_nodes.get(dependency_build_file)
1774 if not dependency_node:
1775 raise GypError("Dependancy '%s' not found" % dependency_build_file)
1776 if dependency_node not in build_file_node.dependencies:
1777 build_file_node.dependencies.append(dependency_node)
1778 dependency_node.dependents.append(build_file_node)
1781 # Files that have no dependencies are treated as dependent on root_node.
1782 root_node = DependencyGraphNode(None)
1783 for build_file_node in dependency_nodes.itervalues():
1784 if len(build_file_node.dependencies) == 0:
1785 build_file_node.dependencies.append(root_node)
1786 root_node.dependents.append(build_file_node)
1788 flat_list = root_node.FlattenToList()
1790 # If there's anything left unvisited, there must be a circular dependency
1792 if len(flat_list) != len(dependency_nodes):
1794 for file in dependency_nodes.iterkeys():
1795 if not file in flat_list:
1796 bad_files.append(file)
1797 common_path_prefix = os.path.commonprefix(dependency_nodes)
1799 for cycle in root_node.FindCycles():
1800 simplified_paths = []
1802 assert(node.ref.startswith(common_path_prefix))
1803 simplified_paths.append(node.ref[len(common_path_prefix):])
1804 cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
1805 raise DependencyGraphNode.CircularException, \
1806 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
1809 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1810 # key should be one of all_dependent_settings, direct_dependent_settings,
1813 for target in flat_list:
1814 target_dict = targets[target]
1815 build_file = gyp.common.BuildFile(target)
1817 if key == 'all_dependent_settings':
1818 dependencies = dependency_nodes[target].DeepDependencies()
1819 elif key == 'direct_dependent_settings':
1821 dependency_nodes[target].DirectAndImportedDependencies(targets)
1822 elif key == 'link_settings':
1824 dependency_nodes[target].DependenciesForLinkSettings(targets)
1826 raise GypError("DoDependentSettings doesn't know how to determine "
1827 'dependencies for ' + key)
1829 for dependency in dependencies:
1830 dependency_dict = targets[dependency]
1831 if not key in dependency_dict:
1833 dependency_build_file = gyp.common.BuildFile(dependency)
1834 MergeDicts(target_dict, dependency_dict[key],
1835 build_file, dependency_build_file)
1838 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1840 # Recompute target "dependencies" properties. For each static library
1841 # target, remove "dependencies" entries referring to other static libraries,
1842 # unless the dependency has the "hard_dependency" attribute set. For each
1843 # linkable target, add a "dependencies" entry referring to all of the
1844 # target's computed list of link dependencies (including static libraries
1845 # if no such entry is already present.
1846 for target in flat_list:
1847 target_dict = targets[target]
1848 target_type = target_dict['type']
1850 if target_type == 'static_library':
1851 if not 'dependencies' in target_dict:
1854 target_dict['dependencies_original'] = target_dict.get(
1855 'dependencies', [])[:]
1857 # A static library should not depend on another static library unless
1858 # the dependency relationship is "hard," which should only be done when
1859 # a dependent relies on some side effect other than just the build
1860 # product, like a rule or action output. Further, if a target has a
1861 # non-hard dependency, but that dependency exports a hard dependency,
1862 # the non-hard dependency can safely be removed, but the exported hard
1863 # dependency must be added to the target to keep the same dependency
1866 dependency_nodes[target].DirectAndImportedDependencies(targets)
1868 while index < len(dependencies):
1869 dependency = dependencies[index]
1870 dependency_dict = targets[dependency]
1872 # Remove every non-hard static library dependency and remove every
1873 # non-static library dependency that isn't a direct dependency.
1874 if (dependency_dict['type'] == 'static_library' and \
1875 not dependency_dict.get('hard_dependency', False)) or \
1876 (dependency_dict['type'] != 'static_library' and \
1877 not dependency in target_dict['dependencies']):
1878 # Take the dependency out of the list, and don't increment index
1879 # because the next dependency to analyze will shift into the index
1880 # formerly occupied by the one being removed.
1881 del dependencies[index]
1885 # Update the dependencies. If the dependencies list is empty, it's not
1886 # needed, so unhook it.
1887 if len(dependencies) > 0:
1888 target_dict['dependencies'] = dependencies
1890 del target_dict['dependencies']
1892 elif target_type in linkable_types:
1893 # Get a list of dependency targets that should be linked into this
1894 # target. Add them to the dependencies list if they're not already
1897 link_dependencies = \
1898 dependency_nodes[target].DependenciesToLinkAgainst(targets)
1899 for dependency in link_dependencies:
1900 if dependency == target:
1902 if not 'dependencies' in target_dict:
1903 target_dict['dependencies'] = []
1904 if not dependency in target_dict['dependencies']:
1905 target_dict['dependencies'].append(dependency)
1906 # Sort the dependencies list in the order from dependents to dependencies.
1907 # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1908 # Note: flat_list is already sorted in the order from dependencies to
1910 if sort_dependencies and 'dependencies' in target_dict:
1911 target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1912 if dep in target_dict['dependencies']]
1915 # Initialize this here to speed up MakePathRelative.
1916 exception_re = re.compile(r'''["']?[-/$<>^]''')
1919 def MakePathRelative(to_file, fro_file, item):
1920 # If item is a relative path, it's relative to the build file dict that it's
1921 # coming from. Fix it up to make it relative to the build file dict that
1923 # Exception: any |item| that begins with these special characters is
1924 # returned without modification.
1925 # / Used when a path is already absolute (shortcut optimization;
1926 # such paths would be returned as absolute anyway)
1927 # $ Used for build environment variables
1928 # - Used for some build environment flags (such as -lapr-1 in a
1929 # "libraries" section)
1930 # < Used for our own variable and command expansions (see ExpandVariables)
1931 # > Used for our own variable and command expansions (see ExpandVariables)
1932 # ^ Used for our own variable and command expansions (see ExpandVariables)
1934 # "/' Used when a value is quoted. If these are present, then we
1935 # check the second character instead.
1937 if to_file == fro_file or exception_re.match(item):
1940 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1941 # temporary measure. This should really be addressed by keeping all paths
1942 # in POSIX until actual project generation.
1943 ret = os.path.normpath(os.path.join(
1944 gyp.common.RelativePath(os.path.dirname(fro_file),
1945 os.path.dirname(to_file)),
1946 item)).replace('\\', '/')
1951 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1952 # Python documentation recommends objects which do not support hash
1953 # set this value to None. Python library objects follow this rule.
1954 is_hashable = lambda val: val.__hash__
1956 # If x is hashable, returns whether x is in s. Else returns whether x is in l.
1957 def is_in_set_or_list(x, s, l):
1964 # Make membership testing of hashables in |to| (in particular, strings)
1966 hashable_to_set = set(x for x in to if is_hashable(x))
1969 if isinstance(item, str) or isinstance(item, int):
1970 # The cheap and easy case.
1972 to_item = MakePathRelative(to_file, fro_file, item)
1976 if not isinstance(item, str) or not item.startswith('-'):
1977 # Any string that doesn't begin with a "-" is a singleton - it can
1978 # only appear once in a list, to be enforced by the list merge append
1981 elif isinstance(item, dict):
1982 # Make a copy of the dictionary, continuing to look for paths to fix.
1983 # The other intelligent aspects of merge processing won't apply because
1984 # item is being merged into an empty dict.
1986 MergeDicts(to_item, item, to_file, fro_file)
1987 elif isinstance(item, list):
1988 # Recurse, making a copy of the list. If the list contains any
1989 # descendant dicts, path fixing will occur. Note that here, custom
1990 # values for is_paths and append are dropped; those are only to be
1991 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1992 # matter anyway because the new |to_item| list is empty.
1994 MergeLists(to_item, item, to_file, fro_file)
1997 'Attempt to merge list item of unsupported type ' + \
1998 item.__class__.__name__
2001 # If appending a singleton that's already in the list, don't append.
2002 # This ensures that the earliest occurrence of the item will stay put.
2003 if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2005 if is_hashable(to_item):
2006 hashable_to_set.add(to_item)
2008 # If prepending a singleton that's already in the list, remove the
2009 # existing instance and proceed with the prepend. This ensures that the
2010 # item appears at the earliest possible position in the list.
2011 while singleton and to_item in to:
2014 # Don't just insert everything at index 0. That would prepend the new
2015 # items to the list in reverse order, which would be an unwelcome
2017 to.insert(prepend_index, to_item)
2018 if is_hashable(to_item):
2019 hashable_to_set.add(to_item)
2020 prepend_index = prepend_index + 1
2023 def MergeDicts(to, fro, to_file, fro_file):
2024 # I wanted to name the parameter "from" but it's a Python keyword...
2025 for k, v in fro.iteritems():
2026 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2027 # copy semantics. Something else may want to merge from the |fro| dict
2028 # later, and having the same dict ref pointed to twice in the tree isn't
2029 # what anyone wants considering that the dicts may subsequently be
2033 if isinstance(v, str) or isinstance(v, int):
2034 if not (isinstance(to[k], str) or isinstance(to[k], int)):
2036 elif v.__class__ != to[k].__class__:
2041 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2042 ' into incompatible type ' + to[k].__class__.__name__ + \
2044 if isinstance(v, str) or isinstance(v, int):
2045 # Overwrite the existing value, if any. Cheap and easy.
2046 is_path = IsPathSection(k)
2048 to[k] = MakePathRelative(to_file, fro_file, v)
2051 elif isinstance(v, dict):
2052 # Recurse, guaranteeing copies will be made of objects that require it.
2055 MergeDicts(to[k], v, to_file, fro_file)
2056 elif isinstance(v, list):
2057 # Lists in dicts can be merged with different policies, depending on
2058 # how the key in the "from" dict (k, the from-key) is written.
2060 # If the from-key has ...the to-list will have this action
2061 # this character appended:... applied when receiving the from-list:
2064 # ? set, only if to-list does not yet exist
2067 # This logic is list-specific, but since it relies on the associated
2068 # dict key, it's checked in this dict-oriented function.
2073 lists_incompatible = [list_base, list_base + '?']
2077 lists_incompatible = [list_base + '=', list_base + '?']
2081 lists_incompatible = [list_base, list_base + '=', list_base + '+']
2084 lists_incompatible = [list_base + '=', list_base + '?']
2086 # Some combinations of merge policies appearing together are meaningless.
2087 # It's stupid to replace and append simultaneously, for example. Append
2088 # and prepend are the only policies that can coexist.
2089 for list_incompatible in lists_incompatible:
2090 if list_incompatible in fro:
2091 raise GypError('Incompatible list policies ' + k + ' and ' +
2096 # If the key ends in "?", the list will only be merged if it doesn't
2099 if not isinstance(to[list_base], list):
2100 # This may not have been checked above if merging in a list with an
2101 # extension character.
2103 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2104 ' into incompatible type ' + to[list_base].__class__.__name__ + \
2105 ' for key ' + list_base + '(' + k + ')'
2109 # Call MergeLists, which will make copies of objects that require it.
2110 # MergeLists can recurse back into MergeDicts, although this will be
2111 # to make copies of dicts (with paths fixed), there will be no
2112 # subsequent dict "merging" once entering a list because lists are
2113 # always replaced, appended to, or prepended to.
2114 is_paths = IsPathSection(list_base)
2115 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2118 'Attempt to merge dict value of unsupported type ' + \
2119 v.__class__.__name__ + ' for key ' + k
2122 def MergeConfigWithInheritance(new_configuration_dict, build_file,
2123 target_dict, configuration, visited):
2124 # Skip if previously visted.
2125 if configuration in visited:
2128 # Look at this configuration.
2129 configuration_dict = target_dict['configurations'][configuration]
2132 for parent in configuration_dict.get('inherit_from', []):
2133 MergeConfigWithInheritance(new_configuration_dict, build_file,
2134 target_dict, parent, visited + [configuration])
2136 # Merge it into the new config.
2137 MergeDicts(new_configuration_dict, configuration_dict,
2138 build_file, build_file)
2141 if 'abstract' in new_configuration_dict:
2142 del new_configuration_dict['abstract']
2145 def SetUpConfigurations(target, target_dict):
2146 # key_suffixes is a list of key suffixes that might appear on key names.
2147 # These suffixes are handled in conditional evaluations (for =, +, and ?)
2148 # and rules/exclude processing (for ! and /). Keys with these suffixes
2149 # should be treated the same as keys without.
2150 key_suffixes = ['=', '+', '?', '!', '/']
2152 build_file = gyp.common.BuildFile(target)
2154 # Provide a single configuration by default if none exists.
2155 # TODO(mark): Signal an error if default_configurations exists but
2156 # configurations does not.
2157 if not 'configurations' in target_dict:
2158 target_dict['configurations'] = {'Default': {}}
2159 if not 'default_configuration' in target_dict:
2160 concrete = [i for i in target_dict['configurations'].iterkeys()
2161 if not target_dict['configurations'][i].get('abstract')]
2162 target_dict['default_configuration'] = sorted(concrete)[0]
2164 for configuration in target_dict['configurations'].keys():
2165 old_configuration_dict = target_dict['configurations'][configuration]
2166 # Skip abstract configurations (saves work only).
2167 if old_configuration_dict.get('abstract'):
2169 # Configurations inherit (most) settings from the enclosing target scope.
2170 # Get the inheritance relationship right by making a copy of the target
2172 new_configuration_dict = copy.deepcopy(target_dict)
2174 # Take out the bits that don't belong in a "configurations" section.
2175 # Since configuration setup is done before conditional, exclude, and rules
2176 # processing, be careful with handling of the suffix characters used in
2179 for key in new_configuration_dict:
2181 if key_ext in key_suffixes:
2185 if key_base in non_configuration_keys:
2186 delete_keys.append(key)
2188 for key in delete_keys:
2189 del new_configuration_dict[key]
2191 # Merge in configuration (with all its parents first).
2192 MergeConfigWithInheritance(new_configuration_dict, build_file,
2193 target_dict, configuration, [])
2195 # Put the new result back into the target dict as a configuration.
2196 target_dict['configurations'][configuration] = new_configuration_dict
2198 # Now drop all the abstract ones.
2199 for configuration in target_dict['configurations'].keys():
2200 old_configuration_dict = target_dict['configurations'][configuration]
2201 if old_configuration_dict.get('abstract'):
2202 del target_dict['configurations'][configuration]
2204 # Now that all of the target's configurations have been built, go through
2205 # the target dict's keys and remove everything that's been moved into a
2206 # "configurations" section.
2208 for key in target_dict:
2210 if key_ext in key_suffixes:
2214 if not key_base in non_configuration_keys:
2215 delete_keys.append(key)
2216 for key in delete_keys:
2217 del target_dict[key]
2219 # Check the configurations to see if they contain invalid keys.
2220 for configuration in target_dict['configurations'].keys():
2221 configuration_dict = target_dict['configurations'][configuration]
2222 for key in configuration_dict.keys():
2223 if key in invalid_configuration_keys:
2224 raise GypError('%s not allowed in the %s configuration, found in '
2225 'target %s' % (key, configuration, target))
2229 def ProcessListFiltersInDict(name, the_dict):
2230 """Process regular expression and exclusion-based filters on lists.
2232 An exclusion list is in a dict key named with a trailing "!", like
2233 "sources!". Every item in such a list is removed from the associated
2234 main list, which in this example, would be "sources". Removed items are
2235 placed into a "sources_excluded" list in the dict.
2237 Regular expression (regex) filters are contained in dict keys named with a
2238 trailing "/", such as "sources/" to operate on the "sources" list. Regex
2239 filters in a dict take the form:
2240 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2241 ['include', '_mac\\.cc$'] ],
2242 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2243 _win.cc. The second filter then includes all files ending in _mac.cc that
2244 are now or were once in the "sources" list. Items matching an "exclude"
2245 filter are subject to the same processing as would occur if they were listed
2246 by name in an exclusion list (ending in "!"). Items matching an "include"
2247 filter are brought back into the main list if previously excluded by an
2248 exclusion list or exclusion regex filter. Subsequent matching "exclude"
2249 patterns can still cause items to be excluded after matching an "include".
2252 # Look through the dictionary for any lists whose keys end in "!" or "/".
2253 # These are lists that will be treated as exclude lists and regular
2254 # expression-based exclude/include lists. Collect the lists that are
2255 # needed first, looking for the lists that they operate on, and assemble
2256 # then into |lists|. This is done in a separate loop up front, because
2257 # the _included and _excluded keys need to be added to the_dict, and that
2258 # can't be done while iterating through it.
2262 for key, value in the_dict.iteritems():
2264 if operation != '!' and operation != '/':
2267 if not isinstance(value, list):
2268 raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2269 value.__class__.__name__
2272 if list_key not in the_dict:
2273 # This happens when there's a list like "sources!" but no corresponding
2274 # "sources" list. Since there's nothing for it to operate on, queue up
2275 # the "sources!" list for deletion now.
2276 del_lists.append(key)
2279 if not isinstance(the_dict[list_key], list):
2280 value = the_dict[list_key]
2281 raise ValueError, name + ' key ' + list_key + \
2282 ' must be list, not ' + \
2283 value.__class__.__name__ + ' when applying ' + \
2284 {'!': 'exclusion', '/': 'regex'}[operation]
2286 if not list_key in lists:
2287 lists.append(list_key)
2289 # Delete the lists that are known to be unneeded at this point.
2290 for del_list in del_lists:
2291 del the_dict[del_list]
2293 for list_key in lists:
2294 the_list = the_dict[list_key]
2296 # Initialize the list_actions list, which is parallel to the_list. Each
2297 # item in list_actions identifies whether the corresponding item in
2298 # the_list should be excluded, unconditionally preserved (included), or
2299 # whether no exclusion or inclusion has been applied. Items for which
2300 # no exclusion or inclusion has been applied (yet) have value -1, items
2301 # excluded have value 0, and items included have value 1. Includes and
2302 # excludes override previous actions. All items in list_actions are
2303 # initialized to -1 because no excludes or includes have been processed
2305 list_actions = list((-1,) * len(the_list))
2307 exclude_key = list_key + '!'
2308 if exclude_key in the_dict:
2309 for exclude_item in the_dict[exclude_key]:
2310 for index in xrange(0, len(the_list)):
2311 if exclude_item == the_list[index]:
2312 # This item matches the exclude_item, so set its action to 0
2314 list_actions[index] = 0
2316 # The "whatever!" list is no longer needed, dump it.
2317 del the_dict[exclude_key]
2319 regex_key = list_key + '/'
2320 if regex_key in the_dict:
2321 for regex_item in the_dict[regex_key]:
2322 [action, pattern] = regex_item
2323 pattern_re = re.compile(pattern)
2325 if action == 'exclude':
2326 # This item matches an exclude regex, so set its value to 0 (exclude).
2328 elif action == 'include':
2329 # This item matches an include regex, so set its value to 1 (include).
2332 # This is an action that doesn't make any sense.
2333 raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2336 for index in xrange(0, len(the_list)):
2337 list_item = the_list[index]
2338 if list_actions[index] == action_value:
2339 # Even if the regex matches, nothing will change so continue (regex
2340 # searches are expensive).
2342 if pattern_re.search(list_item):
2343 # Regular expression match.
2344 list_actions[index] = action_value
2346 # The "whatever/" list is no longer needed, dump it.
2347 del the_dict[regex_key]
2349 # Add excluded items to the excluded list.
2351 # Note that exclude_key ("sources!") is different from excluded_key
2352 # ("sources_excluded"). The exclude_key list is input and it was already
2353 # processed and deleted; the excluded_key list is output and it's about
2355 excluded_key = list_key + '_excluded'
2356 if excluded_key in the_dict:
2357 raise GypError(name + ' key ' + excluded_key +
2358 ' must not be present prior '
2359 ' to applying exclusion/regex filters for ' + list_key)
2363 # Go backwards through the list_actions list so that as items are deleted,
2364 # the indices of items that haven't been seen yet don't shift. That means
2365 # that things need to be prepended to excluded_list to maintain them in the
2366 # same order that they existed in the_list.
2367 for index in xrange(len(list_actions) - 1, -1, -1):
2368 if list_actions[index] == 0:
2369 # Dump anything with action 0 (exclude). Keep anything with action 1
2370 # (include) or -1 (no include or exclude seen for the item).
2371 excluded_list.insert(0, the_list[index])
2374 # If anything was excluded, put the excluded list into the_dict at
2376 if len(excluded_list) > 0:
2377 the_dict[excluded_key] = excluded_list
2379 # Now recurse into subdicts and lists that may contain dicts.
2380 for key, value in the_dict.iteritems():
2381 if isinstance(value, dict):
2382 ProcessListFiltersInDict(key, value)
2383 elif isinstance(value, list):
2384 ProcessListFiltersInList(key, value)
2387 def ProcessListFiltersInList(name, the_list):
2388 for item in the_list:
2389 if isinstance(item, dict):
2390 ProcessListFiltersInDict(name, item)
2391 elif isinstance(item, list):
2392 ProcessListFiltersInList(name, item)
2395 def ValidateTargetType(target, target_dict):
2396 """Ensures the 'type' field on the target is one of the known types.
2399 target: string, name of target.
2400 target_dict: dict, target spec.
2402 Raises an exception on error.
2404 VALID_TARGET_TYPES = ('executable', 'loadable_module',
2405 'static_library', 'shared_library',
2407 target_type = target_dict.get('type', None)
2408 if target_type not in VALID_TARGET_TYPES:
2409 raise GypError("Target %s has an invalid target type '%s'. "
2410 "Must be one of %s." %
2411 (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2412 if (target_dict.get('standalone_static_library', 0) and
2413 not target_type == 'static_library'):
2414 raise GypError('Target %s has type %s but standalone_static_library flag is'
2415 ' only valid for static_library type.' % (target,
2419 def ValidateSourcesInTarget(target, target_dict, build_file):
2420 # TODO: Check if MSVC allows this for loadable_module targets.
2421 if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2423 sources = target_dict.get('sources', [])
2425 for source in sources:
2426 name, ext = os.path.splitext(source)
2427 is_compiled_file = ext in [
2428 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2429 if not is_compiled_file:
2431 basename = os.path.basename(name) # Don't include extension.
2432 basenames.setdefault(basename, []).append(source)
2435 for basename, files in basenames.iteritems():
2437 error += ' %s: %s\n' % (basename, ' '.join(files))
2440 print('static library %s has several files with the same basename:\n' %
2441 target + error + 'Some build systems, e.g. MSVC08, '
2442 'cannot handle that.')
2443 raise GypError('Duplicate basenames in sources section, see list above')
2446 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2447 """Ensures that the rules sections in target_dict are valid and consistent,
2448 and determines which sources they apply to.
2451 target: string, name of target.
2452 target_dict: dict, target spec containing "rules" and "sources" lists.
2453 extra_sources_for_rules: a list of keys to scan for rule matches in
2454 addition to 'sources'.
2457 # Dicts to map between values found in rules' 'rule_name' and 'extension'
2458 # keys and the rule dicts themselves.
2460 rule_extensions = {}
2462 rules = target_dict.get('rules', [])
2464 # Make sure that there's no conflict among rule names and extensions.
2465 rule_name = rule['rule_name']
2466 if rule_name in rule_names:
2467 raise GypError('rule %s exists in duplicate, target %s' %
2468 (rule_name, target))
2469 rule_names[rule_name] = rule
2471 rule_extension = rule['extension']
2472 if rule_extension.startswith('.'):
2473 rule_extension = rule_extension[1:]
2474 if rule_extension in rule_extensions:
2475 raise GypError(('extension %s associated with multiple rules, ' +
2476 'target %s rules %s and %s') %
2477 (rule_extension, target,
2478 rule_extensions[rule_extension]['rule_name'],
2480 rule_extensions[rule_extension] = rule
2482 # Make sure rule_sources isn't already there. It's going to be
2483 # created below if needed.
2484 if 'rule_sources' in rule:
2486 'rule_sources must not exist in input, target %s rule %s' %
2487 (target, rule_name))
2490 source_keys = ['sources']
2491 source_keys.extend(extra_sources_for_rules)
2492 for source_key in source_keys:
2493 for source in target_dict.get(source_key, []):
2494 (source_root, source_extension) = os.path.splitext(source)
2495 if source_extension.startswith('.'):
2496 source_extension = source_extension[1:]
2497 if source_extension == rule_extension:
2498 rule_sources.append(source)
2500 if len(rule_sources) > 0:
2501 rule['rule_sources'] = rule_sources
2504 def ValidateRunAsInTarget(target, target_dict, build_file):
2505 target_name = target_dict.get('target_name')
2506 run_as = target_dict.get('run_as')
2509 if not isinstance(run_as, dict):
2510 raise GypError("The 'run_as' in target %s from file %s should be a "
2512 (target_name, build_file))
2513 action = run_as.get('action')
2515 raise GypError("The 'run_as' in target %s from file %s must have an "
2516 "'action' section." %
2517 (target_name, build_file))
2518 if not isinstance(action, list):
2519 raise GypError("The 'action' for 'run_as' in target %s from file %s "
2521 (target_name, build_file))
2522 working_directory = run_as.get('working_directory')
2523 if working_directory and not isinstance(working_directory, str):
2524 raise GypError("The 'working_directory' for 'run_as' in target %s "
2525 "in file %s should be a string." %
2526 (target_name, build_file))
2527 environment = run_as.get('environment')
2528 if environment and not isinstance(environment, dict):
2529 raise GypError("The 'environment' for 'run_as' in target %s "
2530 "in file %s should be a dictionary." %
2531 (target_name, build_file))
2534 def ValidateActionsInTarget(target, target_dict, build_file):
2535 '''Validates the inputs to the actions in a target.'''
2536 target_name = target_dict.get('target_name')
2537 actions = target_dict.get('actions', [])
2538 for action in actions:
2539 action_name = action.get('action_name')
2541 raise GypError("Anonymous action in target %s. "
2542 "An action must have an 'action_name' field." %
2544 inputs = action.get('inputs', None)
2546 raise GypError('Action in target %s has no inputs.' % target_name)
2547 action_command = action.get('action')
2548 if action_command and not action_command[0]:
2549 raise GypError("Empty action as command in target %s." % target_name)
2552 def TurnIntIntoStrInDict(the_dict):
2553 """Given dict the_dict, recursively converts all integers into strings.
2555 # Use items instead of iteritems because there's no need to try to look at
2556 # reinserted keys and their associated values.
2557 for k, v in the_dict.items():
2558 if isinstance(v, int):
2561 elif isinstance(v, dict):
2562 TurnIntIntoStrInDict(v)
2563 elif isinstance(v, list):
2564 TurnIntIntoStrInList(v)
2566 if isinstance(k, int):
2567 the_dict[str(k)] = v
2571 def TurnIntIntoStrInList(the_list):
2572 """Given list the_list, recursively converts all integers into strings.
2574 for index in xrange(0, len(the_list)):
2575 item = the_list[index]
2576 if isinstance(item, int):
2577 the_list[index] = str(item)
2578 elif isinstance(item, dict):
2579 TurnIntIntoStrInDict(item)
2580 elif isinstance(item, list):
2581 TurnIntIntoStrInList(item)
2584 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2586 """Return only the targets that are deep dependencies of |root_targets|."""
2587 qualified_root_targets = []
2588 for target in root_targets:
2589 target = target.strip()
2590 qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2591 if not qualified_targets:
2592 raise GypError("Could not find target %s" % target)
2593 qualified_root_targets.extend(qualified_targets)
2596 for target in qualified_root_targets:
2597 wanted_targets[target] = targets[target]
2598 for dependency in dependency_nodes[target].DeepDependencies():
2599 wanted_targets[dependency] = targets[dependency]
2601 wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2603 # Prune unwanted targets from each build_file's data dict.
2604 for build_file in data['target_build_files']:
2605 if not 'targets' in data[build_file]:
2608 for target in data[build_file]['targets']:
2609 qualified_name = gyp.common.QualifiedTarget(build_file,
2610 target['target_name'],
2612 if qualified_name in wanted_targets:
2613 new_targets.append(target)
2614 data[build_file]['targets'] = new_targets
2616 return wanted_targets, wanted_flat_list
2619 def VerifyNoCollidingTargets(targets):
2620 """Verify that no two targets in the same directory share the same name.
2623 targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2625 # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2627 for target in targets:
2628 # Separate out 'path/to/file.gyp, 'target_name' from
2629 # 'path/to/file.gyp:target_name'.
2630 path, name = target.rsplit(':', 1)
2631 # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2632 subdir, gyp = os.path.split(path)
2633 # Use '.' for the current directory '', so that the error messages make
2637 # Prepare a key like 'path/to:target_name'.
2638 key = subdir + ':' + name
2640 # Complain if this target is already used.
2641 raise GypError('Duplicate target name "%s" in directory "%s" used both '
2642 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2646 def SetGeneratorGlobals(generator_input_info):
2647 # Set up path_sections and non_configuration_keys with the default data plus
2648 # the generator-specific data.
2649 global path_sections
2650 path_sections = base_path_sections[:]
2651 path_sections.extend(generator_input_info['path_sections'])
2653 global non_configuration_keys
2654 non_configuration_keys = base_non_configuration_keys[:]
2655 non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2657 global multiple_toolsets
2658 multiple_toolsets = generator_input_info[
2659 'generator_supports_multiple_toolsets']
2661 global generator_filelist_paths
2662 generator_filelist_paths = generator_input_info['generator_filelist_paths']
2665 def Load(build_files, variables, includes, depth, generator_input_info, check,
2666 circular_check, parallel, root_targets):
2667 SetGeneratorGlobals(generator_input_info)
2668 # A generator can have other lists (in addition to sources) be processed
2670 extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2672 # Load build files. This loads every target-containing build file into
2673 # the |data| dictionary such that the keys to |data| are build file names,
2674 # and the values are the entire build file contents after "early" or "pre"
2675 # processing has been done and includes have been resolved.
2676 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2677 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2678 # track of the keys corresponding to "target" files.
2679 data = {'target_build_files': set()}
2681 # Normalize paths everywhere. This is important because paths will be
2682 # used as keys to the data dict and for references between input files.
2683 build_files = set(map(os.path.normpath, build_files))
2685 LoadTargetBuildFilesParallel(build_files, data, aux_data,
2686 variables, includes, depth, check,
2687 generator_input_info)
2689 for build_file in build_files:
2691 LoadTargetBuildFile(build_file, data, aux_data,
2692 variables, includes, depth, check, True)
2693 except Exception, e:
2694 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2697 # Build a dict to access each target's subdict by qualified name.
2698 targets = BuildTargetsDict(data)
2700 # Fully qualify all dependency links.
2701 QualifyDependencies(targets)
2703 # Remove self-dependencies from targets that have 'prune_self_dependencies'
2705 RemoveSelfDependencies(targets)
2707 # Expand dependencies specified as build_file:*.
2708 ExpandWildcardDependencies(targets, data)
2710 # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2711 for target_name, target_dict in targets.iteritems():
2713 for key_base in dependency_sections:
2714 for op in ('', '!', '/'):
2716 if key in target_dict:
2717 tmp_dict[key] = target_dict[key]
2718 del target_dict[key]
2719 ProcessListFiltersInDict(target_name, tmp_dict)
2720 # Write the results back to |target_dict|.
2721 for key in tmp_dict:
2722 target_dict[key] = tmp_dict[key]
2724 # Make sure every dependency appears at most once.
2725 RemoveDuplicateDependencies(targets)
2728 # Make sure that any targets in a.gyp don't contain dependencies in other
2729 # .gyp files that further depend on a.gyp.
2730 VerifyNoGYPFileCircularDependencies(targets)
2732 [dependency_nodes, flat_list] = BuildDependencyList(targets)
2735 # Remove, from |targets| and |flat_list|, the targets that are not deep
2736 # dependencies of the targets specified in |root_targets|.
2737 targets, flat_list = PruneUnwantedTargets(
2738 targets, flat_list, dependency_nodes, root_targets, data)
2740 # Check that no two targets in the same directory have the same name.
2741 VerifyNoCollidingTargets(flat_list)
2743 # Handle dependent settings of various types.
2744 for settings_type in ['all_dependent_settings',
2745 'direct_dependent_settings',
2747 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2749 # Take out the dependent settings now that they've been published to all
2750 # of the targets that require them.
2751 for target in flat_list:
2752 if settings_type in targets[target]:
2753 del targets[target][settings_type]
2755 # Make sure static libraries don't declare dependencies on other static
2756 # libraries, but that linkables depend on all unlinked static libraries
2757 # that they need so that their link steps will be correct.
2758 gii = generator_input_info
2759 if gii['generator_wants_static_library_dependencies_adjusted']:
2760 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2761 gii['generator_wants_sorted_dependencies'])
2763 # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2764 for target in flat_list:
2765 target_dict = targets[target]
2766 build_file = gyp.common.BuildFile(target)
2767 ProcessVariablesAndConditionsInDict(
2768 target_dict, PHASE_LATE, variables, build_file)
2770 # Move everything that can go into a "configurations" section into one.
2771 for target in flat_list:
2772 target_dict = targets[target]
2773 SetUpConfigurations(target, target_dict)
2775 # Apply exclude (!) and regex (/) list filters.
2776 for target in flat_list:
2777 target_dict = targets[target]
2778 ProcessListFiltersInDict(target, target_dict)
2780 # Apply "latelate" variable expansions and condition evaluations.
2781 for target in flat_list:
2782 target_dict = targets[target]
2783 build_file = gyp.common.BuildFile(target)
2784 ProcessVariablesAndConditionsInDict(
2785 target_dict, PHASE_LATELATE, variables, build_file)
2787 # Make sure that the rules make sense, and build up rule_sources lists as
2788 # needed. Not all generators will need to use the rule_sources lists, but
2789 # some may, and it seems best to build the list in a common spot.
2790 # Also validate actions and run_as elements in targets.
2791 for target in flat_list:
2792 target_dict = targets[target]
2793 build_file = gyp.common.BuildFile(target)
2794 ValidateTargetType(target, target_dict)
2795 # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2796 # scalesystemdependent_arm_additions.c or similar.
2797 if 'arm' not in variables.get('target_arch', ''):
2798 ValidateSourcesInTarget(target, target_dict, build_file)
2799 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2800 ValidateRunAsInTarget(target, target_dict, build_file)
2801 ValidateActionsInTarget(target, target_dict, build_file)
2803 # Generators might not expect ints. Turn them into strs.
2804 TurnIntIntoStrInDict(data)
2806 # TODO(mark): Return |data| for now because the generator needs a list of
2807 # build files that came in. In the future, maybe it should just accept
2808 # a list, and not the whole data dict.
2809 return [flat_list, targets, data]