1 # Copyright (c) 2012 Google Inc. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 from compiler.ast import Const
6 from compiler.ast import Dict
7 from compiler.ast import Discard
8 from compiler.ast import List
9 from compiler.ast import Module
10 from compiler.ast import Node
11 from compiler.ast import Stmt
15 import multiprocessing
26 from gyp.common import GypError
29 # A list of types that are treated as linkable.
30 linkable_types = ['executable', 'shared_library', 'loadable_module']
32 # A list of sections that contain links to other targets.
33 dependency_sections = ['dependencies', 'export_dependent_settings']
35 # base_path_sections is a list of sections defined by GYP that contain
36 # pathnames. The generators can provide more keys, the two lists are merged
37 # into path_sections, but you should call IsPathSection instead of using either
39 base_path_sections = [
50 is_path_section_charset = set('=+?!')
51 is_path_section_match_re = re.compile('_(dir|file|path)s?$')
53 def IsPathSection(section):
54 # If section ends in one of these characters, it's applied to a section
55 # without the trailing characters. '/' is notably absent from this list,
56 # because there's no way for a regular expression to be treated as a path.
57 while section[-1:] in is_path_section_charset:
58 section = section[:-1]
59 return section in path_sections or is_path_section_match_re.search(section)
61 # base_non_configuration_keys is a list of key names that belong in the target
62 # itself and should not be propagated into its configurations. It is merged
63 # with a list that can come from the generator to
64 # create non_configuration_keys.
65 base_non_configuration_keys = [
66 # Sections that must exist inside targets and not configurations.
70 'default_configuration',
72 'dependencies_original',
82 'standalone_static_library',
89 # Sections that can be found inside targets or configurations, but that
90 # should not be propagated from targets into their configurations.
93 non_configuration_keys = []
95 # Keys that do not belong inside a configuration dictionary.
96 invalid_configuration_keys = [
98 'all_dependent_settings',
101 'direct_dependent_settings',
105 'standalone_static_library',
110 # Controls whether or not the generator supports multiple toolsets.
111 multiple_toolsets = False
113 # Paths for converting filelist paths to output paths: {
115 # qualified_output_dir,
117 generator_filelist_paths = None
119 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
120 """Return a list of all build files included into build_file_path.
122 The returned list will contain build_file_path as well as all other files
123 that it included, either directly or indirectly. Note that the list may
124 contain files that were included into a conditional section that evaluated
125 to false and was not merged into build_file_path's dict.
127 aux_data is a dict containing a key for each build file or included build
128 file. Those keys provide access to dicts whose "included" keys contain
129 lists of all other files included by the build file.
131 included should be left at its default None value by external callers. It
132 is used for recursion.
134 The returned list will not contain any duplicate entries. Each build file
135 in the list will be relative to the current directory.
141 if build_file_path in included:
144 included.append(build_file_path)
146 for included_build_file in aux_data[build_file_path].get('included', []):
147 GetIncludedBuildFiles(included_build_file, aux_data, included)
152 def CheckedEval(file_contents):
153 """Return the eval of a gyp file.
155 The gyp file is restricted to dictionaries and lists only, and
156 repeated keys are not allowed.
158 Note that this is slower than eval() is.
161 ast = compiler.parse(file_contents)
162 assert isinstance(ast, Module)
163 c1 = ast.getChildren()
165 assert isinstance(c1[1], Stmt)
166 c2 = c1[1].getChildren()
167 assert isinstance(c2[0], Discard)
168 c3 = c2[0].getChildren()
170 return CheckNode(c3[0], [])
173 def CheckNode(node, keypath):
174 if isinstance(node, Dict):
175 c = node.getChildren()
177 for n in range(0, len(c), 2):
178 assert isinstance(c[n], Const)
179 key = c[n].getChildren()[0]
181 raise GypError("Key '" + key + "' repeated at level " +
182 repr(len(keypath) + 1) + " with key path '" +
183 '.'.join(keypath) + "'")
184 kp = list(keypath) # Make a copy of the list for descending this node.
186 dict[key] = CheckNode(c[n + 1], kp)
188 elif isinstance(node, List):
189 c = node.getChildren()
191 for index, child in enumerate(c):
192 kp = list(keypath) # Copy list.
193 kp.append(repr(index))
194 children.append(CheckNode(child, kp))
196 elif isinstance(node, Const):
197 return node.getChildren()[0]
199 raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
203 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
205 if build_file_path in data:
206 return data[build_file_path]
208 if os.path.exists(build_file_path):
209 build_file_contents = open(build_file_path).read()
211 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
213 build_file_data = None
216 build_file_data = CheckedEval(build_file_contents)
218 build_file_data = eval(build_file_contents, {'__builtins__': None},
220 except SyntaxError, e:
221 e.filename = build_file_path
224 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
227 if not isinstance(build_file_data, dict):
228 raise GypError("%s does not evaluate to a dictionary." % build_file_path)
230 data[build_file_path] = build_file_data
231 aux_data[build_file_path] = {}
233 # Scan for includes and merge them in.
234 if ('skip_includes' not in build_file_data or
235 not build_file_data['skip_includes']):
238 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
239 aux_data, variables, includes, check)
241 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
242 aux_data, variables, None, check)
244 gyp.common.ExceptionAppend(e,
245 'while reading includes of ' + build_file_path)
248 return build_file_data
251 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
252 variables, includes, check):
255 includes_list.extend(includes)
256 if 'includes' in subdict:
257 for include in subdict['includes']:
258 # "include" is specified relative to subdict_path, so compute the real
259 # path to include by appending the provided "include" to the directory
260 # in which subdict_path resides.
262 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
263 includes_list.append(relative_include)
264 # Unhook the includes list, it's no longer needed.
265 del subdict['includes']
267 # Merge in the included files.
268 for include in includes_list:
269 if not 'included' in aux_data[subdict_path]:
270 aux_data[subdict_path]['included'] = []
271 aux_data[subdict_path]['included'].append(include)
273 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
276 LoadOneBuildFile(include, data, aux_data, variables, None,
278 subdict_path, include)
280 # Recurse into subdictionaries.
281 for k, v in subdict.iteritems():
282 if v.__class__ == dict:
283 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
285 elif v.__class__ == list:
286 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
290 # This recurses into lists so that it can look for dicts.
291 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
294 if item.__class__ == dict:
295 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
296 variables, None, check)
297 elif item.__class__ == list:
298 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
301 # Processes toolsets in all the targets. This recurses into condition entries
302 # since they can contain toolsets as well.
303 def ProcessToolsetsInDict(data):
304 if 'targets' in data:
305 target_list = data['targets']
307 for target in target_list:
308 # If this target already has an explicit 'toolset', and no 'toolsets'
309 # list, don't modify it further.
310 if 'toolset' in target and 'toolsets' not in target:
311 new_target_list.append(target)
313 if multiple_toolsets:
314 toolsets = target.get('toolsets', ['target'])
316 toolsets = ['target']
317 # Make sure this 'toolsets' definition is only processed once.
318 if 'toolsets' in target:
319 del target['toolsets']
320 if len(toolsets) > 0:
321 # Optimization: only do copies if more than one toolset is specified.
322 for build in toolsets[1:]:
323 new_target = copy.deepcopy(target)
324 new_target['toolset'] = build
325 new_target_list.append(new_target)
326 target['toolset'] = toolsets[0]
327 new_target_list.append(target)
328 data['targets'] = new_target_list
329 if 'conditions' in data:
330 for condition in data['conditions']:
331 if isinstance(condition, list):
332 for condition_dict in condition[1:]:
333 ProcessToolsetsInDict(condition_dict)
336 # TODO(mark): I don't love this name. It just means that it's going to load
337 # a build file that contains targets and is expected to provide a targets dict
338 # that contains the targets...
339 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
340 depth, check, load_dependencies):
341 # If depth is set, predefine the DEPTH variable to be a relative path from
342 # this build file's directory to the directory identified by depth.
344 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
345 # temporary measure. This should really be addressed by keeping all paths
346 # in POSIX until actual project generation.
347 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
349 variables['DEPTH'] = '.'
351 variables['DEPTH'] = d.replace('\\', '/')
353 if build_file_path in data['target_build_files']:
356 data['target_build_files'].add(build_file_path)
358 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
359 "Loading Target Build File '%s'", build_file_path)
361 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
362 includes, True, check)
364 # Store DEPTH for later use in generators.
365 build_file_data['_DEPTH'] = depth
367 # Set up the included_files key indicating which .gyp files contributed to
369 if 'included_files' in build_file_data:
370 raise GypError(build_file_path + ' must not contain included_files key')
372 included = GetIncludedBuildFiles(build_file_path, aux_data)
373 build_file_data['included_files'] = []
374 for included_file in included:
375 # included_file is relative to the current directory, but it needs to
376 # be made relative to build_file_path's directory.
377 included_relative = \
378 gyp.common.RelativePath(included_file,
379 os.path.dirname(build_file_path))
380 build_file_data['included_files'].append(included_relative)
382 # Do a first round of toolsets expansion so that conditions can be defined
384 ProcessToolsetsInDict(build_file_data)
386 # Apply "pre"/"early" variable expansions and condition evaluations.
387 ProcessVariablesAndConditionsInDict(
388 build_file_data, PHASE_EARLY, variables, build_file_path)
390 # Since some toolsets might have been defined conditionally, perform
391 # a second round of toolsets expansion now.
392 ProcessToolsetsInDict(build_file_data)
394 # Look at each project's target_defaults dict, and merge settings into
396 if 'target_defaults' in build_file_data:
397 if 'targets' not in build_file_data:
398 raise GypError("Unable to find targets in build file %s" %
402 while index < len(build_file_data['targets']):
403 # This procedure needs to give the impression that target_defaults is
404 # used as defaults, and the individual targets inherit from that.
405 # The individual targets need to be merged into the defaults. Make
406 # a deep copy of the defaults for each target, merge the target dict
407 # as found in the input file into that copy, and then hook up the
408 # copy with the target-specific data merged into it as the replacement
410 old_target_dict = build_file_data['targets'][index]
411 new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
412 MergeDicts(new_target_dict, old_target_dict,
413 build_file_path, build_file_path)
414 build_file_data['targets'][index] = new_target_dict
418 del build_file_data['target_defaults']
420 # Look for dependencies. This means that dependency resolution occurs
421 # after "pre" conditionals and variable expansion, but before "post" -
422 # in other words, you can't put a "dependencies" section inside a "post"
423 # conditional within a target.
426 if 'targets' in build_file_data:
427 for target_dict in build_file_data['targets']:
428 if 'dependencies' not in target_dict:
430 for dependency in target_dict['dependencies']:
432 gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
434 if load_dependencies:
435 for dependency in dependencies:
437 LoadTargetBuildFile(dependency, data, aux_data, variables,
438 includes, depth, check, load_dependencies)
440 gyp.common.ExceptionAppend(
441 e, 'while loading dependencies of %s' % build_file_path)
444 return (build_file_path, dependencies)
447 def CallLoadTargetBuildFile(global_flags,
448 build_file_path, data,
450 includes, depth, check,
451 generator_input_info):
452 """Wrapper around LoadTargetBuildFile for parallel processing.
454 This wrapper is used when LoadTargetBuildFile is executed in
459 signal.signal(signal.SIGINT, signal.SIG_IGN)
461 # Apply globals so that the worker process behaves the same.
462 for key, value in global_flags.iteritems():
463 globals()[key] = value
465 # Save the keys so we can return data that changed.
466 data_keys = set(data)
467 aux_data_keys = set(aux_data)
469 SetGeneratorGlobals(generator_input_info)
470 result = LoadTargetBuildFile(build_file_path, data,
472 includes, depth, check, False)
476 (build_file_path, dependencies) = result
480 if key == 'target_build_files':
482 if key not in data_keys:
483 data_out[key] = data[key]
486 if key not in aux_data_keys:
487 aux_data_out[key] = aux_data[key]
489 # This gets serialized and sent back to the main process via a pipe.
490 # It's handled in LoadTargetBuildFileCallback.
491 return (build_file_path,
496 sys.stderr.write("gyp: %s\n" % e)
499 print >>sys.stderr, 'Exception:', e
500 print >>sys.stderr, traceback.format_exc()
504 class ParallelProcessingError(Exception):
508 class ParallelState(object):
509 """Class to keep track of state when processing input files in parallel.
511 If build files are loaded in parallel, use this to keep track of
512 state during farming out and processing parallel jobs. It's stored
513 in a global so that the callback function can have access to it.
517 # The multiprocessing pool.
519 # The condition variable used to protect this object and notify
520 # the main loop when there might be more data to process.
521 self.condition = None
522 # The "data" dict that was passed to LoadTargetBuildFileParallel
524 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
526 # The number of parallel calls outstanding; decremented when a response
529 # The set of all build files that have been scheduled, so we don't
530 # schedule the same one twice.
531 self.scheduled = set()
532 # A list of dependency build file paths that haven't been scheduled yet.
533 self.dependencies = []
534 # Flag to indicate if there was an error in a child process.
537 def LoadTargetBuildFileCallback(self, result):
538 """Handle the results of running LoadTargetBuildFile in another process.
540 self.condition.acquire()
543 self.condition.notify()
544 self.condition.release()
546 (build_file_path0, data0, aux_data0, dependencies0) = result
547 self.data['target_build_files'].add(build_file_path0)
549 self.data[key] = data0[key]
550 for key in aux_data0:
551 self.aux_data[key] = aux_data0[key]
552 for new_dependency in dependencies0:
553 if new_dependency not in self.scheduled:
554 self.scheduled.add(new_dependency)
555 self.dependencies.append(new_dependency)
557 self.condition.notify()
558 self.condition.release()
561 def LoadTargetBuildFilesParallel(build_files, data, aux_data,
562 variables, includes, depth, check,
563 generator_input_info):
564 parallel_state = ParallelState()
565 parallel_state.condition = threading.Condition()
566 # Make copies of the build_files argument that we can modify while working.
567 parallel_state.dependencies = list(build_files)
568 parallel_state.scheduled = set(build_files)
569 parallel_state.pending = 0
570 parallel_state.data = data
571 parallel_state.aux_data = aux_data
574 parallel_state.condition.acquire()
575 while parallel_state.dependencies or parallel_state.pending:
576 if parallel_state.error:
578 if not parallel_state.dependencies:
579 parallel_state.condition.wait()
582 dependency = parallel_state.dependencies.pop()
584 parallel_state.pending += 1
586 data_in['target_build_files'] = data['target_build_files']
589 'path_sections': globals()['path_sections'],
590 'non_configuration_keys': globals()['non_configuration_keys'],
591 'multiple_toolsets': globals()['multiple_toolsets']}
593 if not parallel_state.pool:
594 parallel_state.pool = multiprocessing.Pool(8)
595 parallel_state.pool.apply_async(
596 CallLoadTargetBuildFile,
597 args = (global_flags, dependency,
598 data_in, aux_data_in,
599 variables, includes, depth, check, generator_input_info),
600 callback = parallel_state.LoadTargetBuildFileCallback)
601 except KeyboardInterrupt, e:
602 parallel_state.pool.terminate()
605 parallel_state.condition.release()
607 parallel_state.pool.close()
608 parallel_state.pool.join()
609 parallel_state.pool = None
611 if parallel_state.error:
614 # Look for the bracket that matches the first bracket seen in a
615 # string, and return the start and end as a tuple. For example, if
616 # the input is something like "<(foo <(bar)) blah", then it would
617 # return (1, 13), indicating the entire string except for the leading
618 # "<" and trailing " blah".
619 LBRACKETS= set('{[(')
620 BRACKETS = {'}': '{', ']': '[', ')': '('}
621 def FindEnclosingBracketGroup(input_str):
624 for index, char in enumerate(input_str):
625 if char in LBRACKETS:
629 elif char in BRACKETS:
632 if stack.pop() != BRACKETS[char]:
635 return (start, index + 1)
639 canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
642 def IsStrCanonicalInt(string):
643 """Returns True if |string| is in its canonical integer form.
645 The canonical form is such that str(int(string)) == string.
647 return isinstance(string, str) and canonical_int_re.match(string)
650 # This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
651 # "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
652 # In the last case, the inner "<()" is captured in match['content'].
653 early_variable_re = re.compile(
654 '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
655 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
656 '\((?P<is_array>\s*\[?)'
657 '(?P<content>.*?)(\]?)\))')
659 # This matches the same as early_variable_re, but with '>' instead of '<'.
660 late_variable_re = re.compile(
661 '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
662 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
663 '\((?P<is_array>\s*\[?)'
664 '(?P<content>.*?)(\]?)\))')
666 # This matches the same as early_variable_re, but with '^' instead of '<'.
667 latelate_variable_re = re.compile(
668 '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
669 '(?P<command_string>[-a-zA-Z0-9_.]+)?'
670 '\((?P<is_array>\s*\[?)'
671 '(?P<content>.*?)(\]?)\))')
673 # Global cache of results from running commands so they don't have to be run
675 cached_command_results = {}
678 def FixupPlatformCommand(cmd):
679 if sys.platform == 'win32':
680 if type(cmd) == list:
681 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
683 cmd = re.sub('^cat ', 'type ', cmd)
692 def ExpandVariables(input, phase, variables, build_file):
693 # Look for the pattern that gets expanded into variables
694 if phase == PHASE_EARLY:
695 variable_re = early_variable_re
696 expansion_symbol = '<'
697 elif phase == PHASE_LATE:
698 variable_re = late_variable_re
699 expansion_symbol = '>'
700 elif phase == PHASE_LATELATE:
701 variable_re = latelate_variable_re
702 expansion_symbol = '^'
706 input_str = str(input)
707 if IsStrCanonicalInt(input_str):
708 return int(input_str)
710 # Do a quick scan to determine if an expensive regex search is warranted.
711 if expansion_symbol not in input_str:
714 # Get the entire list of matches as a list of MatchObject instances.
715 # (using findall here would return strings instead of MatchObjects).
716 matches = list(variable_re.finditer(input_str))
721 # Reverse the list of matches so that replacements are done right-to-left.
722 # That ensures that earlier replacements won't mess up the string in a
723 # way that causes later calls to find the earlier substituted text instead
724 # of what's intended for replacement.
726 for match_group in matches:
727 match = match_group.groupdict()
728 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
729 # match['replace'] is the substring to look for, match['type']
730 # is the character code for the replacement type (< > <! >! <| >| <@
731 # >@ <!@ >!@), match['is_array'] contains a '[' for command
732 # arrays, and match['content'] is the name of the variable (< >)
733 # or command to run (<! >!). match['command_string'] is an optional
734 # command string. Currently, only 'pymod_do_main' is supported.
736 # run_command is true if a ! variant is used.
737 run_command = '!' in match['type']
738 command_string = match['command_string']
740 # file_list is true if a | variant is used.
741 file_list = '|' in match['type']
743 # Capture these now so we can adjust them later.
744 replace_start = match_group.start('replace')
745 replace_end = match_group.end('replace')
747 # Find the ending paren, and re-evaluate the contained string.
748 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
750 # Adjust the replacement range to match the entire command
751 # found by FindEnclosingBracketGroup (since the variable_re
752 # probably doesn't match the entire command if it contained
754 replace_end = replace_start + c_end
756 # Find the "real" replacement, matching the appropriate closing
757 # paren, and adjust the replacement start and end.
758 replacement = input_str[replace_start:replace_end]
760 # Figure out what the contents of the variable parens are.
761 contents_start = replace_start + c_start + 1
762 contents_end = replace_end - 1
763 contents = input_str[contents_start:contents_end]
765 # Do filter substitution now for <|().
766 # Admittedly, this is different than the evaluation order in other
767 # contexts. However, since filtration has no chance to run on <|(),
768 # this seems like the only obvious way to give them access to filters.
770 processed_variables = copy.deepcopy(variables)
771 ProcessListFiltersInDict(contents, processed_variables)
772 # Recurse to expand variables in the contents
773 contents = ExpandVariables(contents, phase,
774 processed_variables, build_file)
776 # Recurse to expand variables in the contents
777 contents = ExpandVariables(contents, phase, variables, build_file)
779 # Strip off leading/trailing whitespace so that variable matches are
780 # simpler below (and because they are rarely needed).
781 contents = contents.strip()
783 # expand_to_list is true if an @ variant is used. In that case,
784 # the expansion should result in a list. Note that the caller
785 # is to be expecting a list in return, and not all callers do
786 # because not all are working in list context. Also, for list
787 # expansions, there can be no other text besides the variable
788 # expansion in the input string.
789 expand_to_list = '@' in match['type'] and input_str == replacement
791 if run_command or file_list:
792 # Find the build file's directory, so commands can be run or file lists
793 # generated relative to it.
794 build_file_dir = os.path.dirname(build_file)
795 if build_file_dir == '' and not file_list:
796 # If build_file is just a leaf filename indicating a file in the
797 # current directory, build_file_dir might be an empty string. Set
798 # it to None to signal to subprocess.Popen that it should run the
799 # command in the current directory.
800 build_file_dir = None
802 # Support <|(listfile.txt ...) which generates a file
803 # containing items from a gyp list, generated at gyp time.
804 # This works around actions/rules which have more inputs than will
805 # fit on the command line.
807 if type(contents) == list:
808 contents_list = contents
810 contents_list = contents.split(' ')
811 replacement = contents_list[0]
812 if os.path.isabs(replacement):
813 raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
815 if not generator_filelist_paths:
816 path = os.path.join(build_file_dir, replacement)
818 if os.path.isabs(build_file_dir):
819 toplevel = generator_filelist_paths['toplevel']
820 rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
822 rel_build_file_dir = build_file_dir
823 qualified_out_dir = generator_filelist_paths['qualified_out_dir']
824 path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
825 if not os.path.isdir(os.path.dirname(path)):
826 os.makedirs(os.path.dirname(path))
828 replacement = gyp.common.RelativePath(path, build_file_dir)
829 f = gyp.common.WriteOnDiff(path)
830 for i in contents_list[1:]:
836 if match['is_array']:
837 contents = eval(contents)
840 # Check for a cached value to avoid executing commands, or generating
841 # file lists more than once.
842 # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
843 # possible that the command being invoked depends on the current
844 # directory. For that case the syntax needs to be extended so that the
845 # directory is also used in cache_key (it becomes a tuple).
846 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
847 # someone could author a set of GYP files where each time the command
848 # is invoked it produces different output by design. When the need
849 # arises, the syntax should be extended to support no caching off a
850 # command's output so it is run every time.
851 cache_key = str(contents)
852 cached_value = cached_command_results.get(cache_key, None)
853 if cached_value is None:
854 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
855 "Executing command '%s' in directory '%s'",
856 contents, build_file_dir)
860 if command_string == 'pymod_do_main':
861 # <!pymod_do_main(modulename param eters) loads |modulename| as a
862 # python module and then calls that module's DoMain() function,
863 # passing ["param", "eters"] as a single list argument. For modules
864 # that don't load quickly, this can be faster than
865 # <!(python modulename param eters). Do this in |build_file_dir|.
866 oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
867 if build_file_dir: # build_file_dir may be None (see above).
868 os.chdir(build_file_dir)
871 parsed_contents = shlex.split(contents)
873 py_module = __import__(parsed_contents[0])
874 except ImportError as e:
875 raise GypError("Error importing pymod_do_main"
876 "module (%s): %s" % (parsed_contents[0], e))
877 replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
880 assert replacement != None
882 raise GypError("Unknown command string '%s' in '%s'." %
883 (command_string, contents))
885 # Fix up command with platform specific workarounds.
886 contents = FixupPlatformCommand(contents)
887 p = subprocess.Popen(contents, shell=use_shell,
888 stdout=subprocess.PIPE,
889 stderr=subprocess.PIPE,
890 stdin=subprocess.PIPE,
893 p_stdout, p_stderr = p.communicate('')
895 if p.wait() != 0 or p_stderr:
896 sys.stderr.write(p_stderr)
897 # Simulate check_call behavior, since check_call only exists
898 # in python 2.5 and later.
899 raise GypError("Call to '%s' returned exit status %d." %
900 (contents, p.returncode))
901 replacement = p_stdout.rstrip()
903 cached_command_results[cache_key] = replacement
905 gyp.DebugOutput(gyp.DEBUG_VARIABLES,
906 "Had cache value for command '%s' in directory '%s'",
907 contents,build_file_dir)
908 replacement = cached_value
911 if not contents in variables:
912 if contents[-1] in ['!', '/']:
913 # In order to allow cross-compiles (nacl) to happen more naturally,
914 # we will allow references to >(sources/) etc. to resolve to
915 # and empty list if undefined. This allows actions to:
924 raise GypError('Undefined variable ' + contents +
927 replacement = variables[contents]
929 if isinstance(replacement, list):
930 for item in replacement:
931 if (not contents[-1] == '/' and
932 not isinstance(item, str) and not isinstance(item, int)):
933 raise GypError('Variable ' + contents +
934 ' must expand to a string or list of strings; ' +
936 item.__class__.__name__)
937 # Run through the list and handle variable expansions in it. Since
938 # the list is guaranteed not to contain dicts, this won't do anything
939 # with conditions sections.
940 ProcessVariablesAndConditionsInList(replacement, phase, variables,
942 elif not isinstance(replacement, str) and \
943 not isinstance(replacement, int):
944 raise GypError('Variable ' + contents +
945 ' must expand to a string or list of strings; ' +
946 'found a ' + replacement.__class__.__name__)
949 # Expanding in list context. It's guaranteed that there's only one
950 # replacement to do in |input_str| and that it's this replacement. See
952 if isinstance(replacement, list):
953 # If it's already a list, make a copy.
954 output = replacement[:]
956 # Split it the same way sh would split arguments.
957 output = shlex.split(str(replacement))
959 # Expanding in string context.
960 encoded_replacement = ''
961 if isinstance(replacement, list):
962 # When expanding a list into string context, turn the list items
963 # into a string in a way that will work with a subprocess call.
965 # TODO(mark): This isn't completely correct. This should
966 # call a generator-provided function that observes the
967 # proper list-to-argument quoting rules on a specific
968 # platform instead of just calling the POSIX encoding
970 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
972 encoded_replacement = replacement
974 output = output[:replace_start] + str(encoded_replacement) + \
976 # Prepare for the next match iteration.
979 # Look for more matches now that we've replaced some, to deal with
980 # expanding local variables (variables defined in the same
981 # variables block as this one).
982 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
983 if isinstance(output, list):
984 if output and isinstance(output[0], list):
985 # Leave output alone if it's a list of lists.
986 # We don't want such lists to be stringified.
992 ExpandVariables(item, phase, variables, build_file))
995 output = ExpandVariables(output, phase, variables, build_file)
997 # Convert all strings that are canonically-represented integers into integers.
998 if isinstance(output, list):
999 for index in xrange(0, len(output)):
1000 if IsStrCanonicalInt(output[index]):
1001 output[index] = int(output[index])
1002 elif IsStrCanonicalInt(output):
1003 output = int(output)
1008 def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1009 # Process a 'conditions' or 'target_conditions' section in the_dict,
1010 # depending on phase.
1011 # early -> conditions
1012 # late -> target_conditions
1013 # latelate -> no conditions
1015 # Each item in a conditions list consists of cond_expr, a string expression
1016 # evaluated as the condition, and true_dict, a dict that will be merged into
1017 # the_dict if cond_expr evaluates to true. Optionally, a third item,
1018 # false_dict, may be present. false_dict is merged into the_dict if
1019 # cond_expr evaluates to false.
1021 # Any dict merged into the_dict will be recursively processed for nested
1022 # conditionals and other expansions, also according to phase, immediately
1023 # prior to being merged.
1025 if phase == PHASE_EARLY:
1026 conditions_key = 'conditions'
1027 elif phase == PHASE_LATE:
1028 conditions_key = 'target_conditions'
1029 elif phase == PHASE_LATELATE:
1034 if not conditions_key in the_dict:
1037 conditions_list = the_dict[conditions_key]
1038 # Unhook the conditions list, it's no longer needed.
1039 del the_dict[conditions_key]
1041 for condition in conditions_list:
1042 if not isinstance(condition, list):
1043 raise GypError(conditions_key + ' must be a list')
1044 if len(condition) != 2 and len(condition) != 3:
1045 # It's possible that condition[0] won't work in which case this
1046 # attempt will raise its own IndexError. That's probably fine.
1047 raise GypError(conditions_key + ' ' + condition[0] +
1048 ' must be length 2 or 3, not ' + str(len(condition)))
1050 [cond_expr, true_dict] = condition[0:2]
1052 if len(condition) == 3:
1053 false_dict = condition[2]
1055 # Do expansions on the condition itself. Since the conditon can naturally
1056 # contain variable references without needing to resort to GYP expansion
1057 # syntax, this is of dubious value for variables, but someone might want to
1058 # use a command expansion directly inside a condition.
1059 cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1061 if not isinstance(cond_expr_expanded, str) and \
1062 not isinstance(cond_expr_expanded, int):
1064 'Variable expansion in this context permits str and int ' + \
1065 'only, found ' + expanded.__class__.__name__
1068 ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1070 if eval(ast_code, {'__builtins__': None}, variables):
1071 merge_dict = true_dict
1073 merge_dict = false_dict
1074 except SyntaxError, e:
1075 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1076 'at character %d.' %
1077 (str(e.args[0]), e.text, build_file, e.offset),
1078 e.filename, e.lineno, e.offset, e.text)
1080 except NameError, e:
1081 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1082 (cond_expr_expanded, build_file))
1085 if merge_dict != None:
1086 # Expand variables and nested conditinals in the merge_dict before
1088 ProcessVariablesAndConditionsInDict(merge_dict, phase,
1089 variables, build_file)
1091 MergeDicts(the_dict, merge_dict, build_file, build_file)
1094 def LoadAutomaticVariablesFromDict(variables, the_dict):
1095 # Any keys with plain string values in the_dict become automatic variables.
1096 # The variable name is the key name with a "_" character prepended.
1097 for key, value in the_dict.iteritems():
1098 if isinstance(value, str) or isinstance(value, int) or \
1099 isinstance(value, list):
1100 variables['_' + key] = value
1103 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1104 # Any keys in the_dict's "variables" dict, if it has one, becomes a
1105 # variable. The variable name is the key name in the "variables" dict.
1106 # Variables that end with the % character are set only if they are unset in
1107 # the variables dict. the_dict_key is the name of the key that accesses
1108 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
1109 # (it could be a list or it could be parentless because it is a root dict),
1110 # the_dict_key will be None.
1111 for key, value in the_dict.get('variables', {}).iteritems():
1112 if not isinstance(value, str) and not isinstance(value, int) and \
1113 not isinstance(value, list):
1116 if key.endswith('%'):
1117 variable_name = key[:-1]
1118 if variable_name in variables:
1119 # If the variable is already set, don't set it.
1121 if the_dict_key is 'variables' and variable_name in the_dict:
1122 # If the variable is set without a % in the_dict, and the_dict is a
1123 # variables dict (making |variables| a varaibles sub-dict of a
1124 # variables dict), use the_dict's definition.
1125 value = the_dict[variable_name]
1129 variables[variable_name] = value
1132 def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1133 build_file, the_dict_key=None):
1134 """Handle all variable and command expansion and conditional evaluation.
1136 This function is the public entry point for all variable expansions and
1137 conditional evaluations. The variables_in dictionary will not be modified
1141 # Make a copy of the variables_in dict that can be modified during the
1142 # loading of automatics and the loading of the variables dict.
1143 variables = variables_in.copy()
1144 LoadAutomaticVariablesFromDict(variables, the_dict)
1146 if 'variables' in the_dict:
1147 # Make sure all the local variables are added to the variables
1148 # list before we process them so that you can reference one
1149 # variable from another. They will be fully expanded by recursion
1150 # in ExpandVariables.
1151 for key, value in the_dict['variables'].iteritems():
1152 variables[key] = value
1154 # Handle the associated variables dict first, so that any variable
1155 # references within can be resolved prior to using them as variables.
1156 # Pass a copy of the variables dict to avoid having it be tainted.
1157 # Otherwise, it would have extra automatics added for everything that
1158 # should just be an ordinary variable in this scope.
1159 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1160 variables, build_file, 'variables')
1162 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1164 for key, value in the_dict.iteritems():
1165 # Skip "variables", which was already processed if present.
1166 if key != 'variables' and isinstance(value, str):
1167 expanded = ExpandVariables(value, phase, variables, build_file)
1168 if not isinstance(expanded, str) and not isinstance(expanded, int):
1170 'Variable expansion in this context permits str and int ' + \
1171 'only, found ' + expanded.__class__.__name__ + ' for ' + key
1172 the_dict[key] = expanded
1174 # Variable expansion may have resulted in changes to automatics. Reload.
1175 # TODO(mark): Optimization: only reload if no changes were made.
1176 variables = variables_in.copy()
1177 LoadAutomaticVariablesFromDict(variables, the_dict)
1178 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1180 # Process conditions in this dict. This is done after variable expansion
1181 # so that conditions may take advantage of expanded variables. For example,
1182 # if the_dict contains:
1183 # {'type': '<(library_type)',
1184 # 'conditions': [['_type=="static_library"', { ... }]]},
1185 # _type, as used in the condition, will only be set to the value of
1186 # library_type if variable expansion is performed before condition
1187 # processing. However, condition processing should occur prior to recursion
1188 # so that variables (both automatic and "variables" dict type) may be
1189 # adjusted by conditions sections, merged into the_dict, and have the
1190 # intended impact on contained dicts.
1192 # This arrangement means that a "conditions" section containing a "variables"
1193 # section will only have those variables effective in subdicts, not in
1194 # the_dict. The workaround is to put a "conditions" section within a
1195 # "variables" section. For example:
1196 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1197 # 'defines': ['<(define)'],
1198 # 'my_subdict': {'defines': ['<(define)']}},
1199 # will not result in "IS_MAC" being appended to the "defines" list in the
1200 # current scope but would result in it being appended to the "defines" list
1201 # within "my_subdict". By comparison:
1202 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1203 # 'defines': ['<(define)'],
1204 # 'my_subdict': {'defines': ['<(define)']}},
1205 # will append "IS_MAC" to both "defines" lists.
1207 # Evaluate conditions sections, allowing variable expansions within them
1208 # as well as nested conditionals. This will process a 'conditions' or
1209 # 'target_conditions' section, perform appropriate merging and recursive
1210 # conditional and variable processing, and then remove the conditions section
1211 # from the_dict if it is present.
1212 ProcessConditionsInDict(the_dict, phase, variables, build_file)
1214 # Conditional processing may have resulted in changes to automatics or the
1215 # variables dict. Reload.
1216 variables = variables_in.copy()
1217 LoadAutomaticVariablesFromDict(variables, the_dict)
1218 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1220 # Recurse into child dicts, or process child lists which may result in
1221 # further recursion into descendant dicts.
1222 for key, value in the_dict.iteritems():
1223 # Skip "variables" and string values, which were already processed if
1225 if key == 'variables' or isinstance(value, str):
1227 if isinstance(value, dict):
1228 # Pass a copy of the variables dict so that subdicts can't influence
1230 ProcessVariablesAndConditionsInDict(value, phase, variables,
1232 elif isinstance(value, list):
1233 # The list itself can't influence the variables dict, and
1234 # ProcessVariablesAndConditionsInList will make copies of the variables
1235 # dict if it needs to pass it to something that can influence it. No
1236 # copy is necessary here.
1237 ProcessVariablesAndConditionsInList(value, phase, variables,
1239 elif not isinstance(value, int):
1240 raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1244 def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1246 # Iterate using an index so that new values can be assigned into the_list.
1248 while index < len(the_list):
1249 item = the_list[index]
1250 if isinstance(item, dict):
1251 # Make a copy of the variables dict so that it won't influence anything
1252 # outside of its own scope.
1253 ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1254 elif isinstance(item, list):
1255 ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1256 elif isinstance(item, str):
1257 expanded = ExpandVariables(item, phase, variables, build_file)
1258 if isinstance(expanded, str) or isinstance(expanded, int):
1259 the_list[index] = expanded
1260 elif isinstance(expanded, list):
1261 the_list[index:index+1] = expanded
1262 index += len(expanded)
1264 # index now identifies the next item to examine. Continue right now
1265 # without falling into the index increment below.
1269 'Variable expansion in this context permits strings and ' + \
1270 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1272 elif not isinstance(item, int):
1273 raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1274 ' at index ' + index
1278 def BuildTargetsDict(data):
1279 """Builds a dict mapping fully-qualified target names to their target dicts.
1281 |data| is a dict mapping loaded build files by pathname relative to the
1282 current directory. Values in |data| are build file contents. For each
1283 |data| value with a "targets" key, the value of the "targets" key is taken
1284 as a list containing target dicts. Each target's fully-qualified name is
1285 constructed from the pathname of the build file (|data| key) and its
1286 "target_name" property. These fully-qualified names are used as the keys
1287 in the returned dict. These keys provide access to the target dicts,
1288 the dicts in the "targets" lists.
1292 for build_file in data['target_build_files']:
1293 for target in data[build_file].get('targets', []):
1294 target_name = gyp.common.QualifiedTarget(build_file,
1295 target['target_name'],
1297 if target_name in targets:
1298 raise GypError('Duplicate target definitions for ' + target_name)
1299 targets[target_name] = target
1304 def QualifyDependencies(targets):
1305 """Make dependency links fully-qualified relative to the current directory.
1307 |targets| is a dict mapping fully-qualified target names to their target
1308 dicts. For each target in this dict, keys known to contain dependency
1309 links are examined, and any dependencies referenced will be rewritten
1310 so that they are fully-qualified and relative to the current directory.
1311 All rewritten dependencies are suitable for use as keys to |targets| or a
1315 all_dependency_sections = [dep + op
1316 for dep in dependency_sections
1317 for op in ('', '!', '/')]
1319 for target, target_dict in targets.iteritems():
1320 target_build_file = gyp.common.BuildFile(target)
1321 toolset = target_dict['toolset']
1322 for dependency_key in all_dependency_sections:
1323 dependencies = target_dict.get(dependency_key, [])
1324 for index in xrange(0, len(dependencies)):
1325 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1326 target_build_file, dependencies[index], toolset)
1327 if not multiple_toolsets:
1328 # Ignore toolset specification in the dependency if it is specified.
1329 dep_toolset = toolset
1330 dependency = gyp.common.QualifiedTarget(dep_file,
1333 dependencies[index] = dependency
1335 # Make sure anything appearing in a list other than "dependencies" also
1336 # appears in the "dependencies" list.
1337 if dependency_key != 'dependencies' and \
1338 dependency not in target_dict['dependencies']:
1339 raise GypError('Found ' + dependency + ' in ' + dependency_key +
1340 ' of ' + target + ', but not in dependencies')
1343 def ExpandWildcardDependencies(targets, data):
1344 """Expands dependencies specified as build_file:*.
1346 For each target in |targets|, examines sections containing links to other
1347 targets. If any such section contains a link of the form build_file:*, it
1348 is taken as a wildcard link, and is expanded to list each target in
1349 build_file. The |data| dict provides access to build file dicts.
1351 Any target that does not wish to be included by wildcard can provide an
1352 optional "suppress_wildcard" key in its target dict. When present and
1353 true, a wildcard dependency link will not include such targets.
1355 All dependency names, including the keys to |targets| and the values in each
1356 dependency list, must be qualified when this function is called.
1359 for target, target_dict in targets.iteritems():
1360 toolset = target_dict['toolset']
1361 target_build_file = gyp.common.BuildFile(target)
1362 for dependency_key in dependency_sections:
1363 dependencies = target_dict.get(dependency_key, [])
1365 # Loop this way instead of "for dependency in" or "for index in xrange"
1366 # because the dependencies list will be modified within the loop body.
1368 while index < len(dependencies):
1369 (dependency_build_file, dependency_target, dependency_toolset) = \
1370 gyp.common.ParseQualifiedTarget(dependencies[index])
1371 if dependency_target != '*' and dependency_toolset != '*':
1372 # Not a wildcard. Keep it moving.
1376 if dependency_build_file == target_build_file:
1377 # It's an error for a target to depend on all other targets in
1378 # the same file, because a target cannot depend on itself.
1379 raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1380 target + ' referring to same build file')
1382 # Take the wildcard out and adjust the index so that the next
1383 # dependency in the list will be processed the next time through the
1385 del dependencies[index]
1388 # Loop through the targets in the other build file, adding them to
1389 # this target's list of dependencies in place of the removed
1391 dependency_target_dicts = data[dependency_build_file]['targets']
1392 for dependency_target_dict in dependency_target_dicts:
1393 if int(dependency_target_dict.get('suppress_wildcard', False)):
1395 dependency_target_name = dependency_target_dict['target_name']
1396 if (dependency_target != '*' and
1397 dependency_target != dependency_target_name):
1399 dependency_target_toolset = dependency_target_dict['toolset']
1400 if (dependency_toolset != '*' and
1401 dependency_toolset != dependency_target_toolset):
1403 dependency = gyp.common.QualifiedTarget(dependency_build_file,
1404 dependency_target_name,
1405 dependency_target_toolset)
1407 dependencies.insert(index, dependency)
1413 """Removes duplicate elements from l, keeping the first element."""
1415 return [seen.setdefault(e, e) for e in l if e not in seen]
1418 def RemoveDuplicateDependencies(targets):
1419 """Makes sure every dependency appears only once in all targets's dependency
1421 for target_name, target_dict in targets.iteritems():
1422 for dependency_key in dependency_sections:
1423 dependencies = target_dict.get(dependency_key, [])
1425 target_dict[dependency_key] = Unify(dependencies)
1428 def Filter(l, item):
1429 """Removes item from l."""
1431 return [res.setdefault(e, e) for e in l if e != item]
1434 def RemoveSelfDependencies(targets):
1435 """Remove self dependencies from targets that have the prune_self_dependency
1437 for target_name, target_dict in targets.iteritems():
1438 for dependency_key in dependency_sections:
1439 dependencies = target_dict.get(dependency_key, [])
1441 for t in dependencies:
1442 if t == target_name:
1443 if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1444 target_dict[dependency_key] = Filter(dependencies, target_name)
1447 class DependencyGraphNode(object):
1451 ref: A reference to an object that this DependencyGraphNode represents.
1452 dependencies: List of DependencyGraphNodes on which this one depends.
1453 dependents: List of DependencyGraphNodes that depend on this one.
1456 class CircularException(GypError):
1459 def __init__(self, ref):
1461 self.dependencies = []
1462 self.dependents = []
1465 return '<DependencyGraphNode: %r>' % self.ref
1467 def FlattenToList(self):
1468 # flat_list is the sorted list of dependencies - actually, the list items
1469 # are the "ref" attributes of DependencyGraphNodes. Every target will
1470 # appear in flat_list after all of its dependencies, and before all of its
1474 # in_degree_zeros is the list of DependencyGraphNodes that have no
1475 # dependencies not in flat_list. Initially, it is a copy of the children
1476 # of this node, because when the graph was built, nodes with no
1477 # dependencies were made implicit dependents of the root node.
1478 in_degree_zeros = set(self.dependents[:])
1480 while in_degree_zeros:
1481 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1482 # can be appended to flat_list. Take these nodes out of in_degree_zeros
1483 # as work progresses, so that the next node to process from the list can
1484 # always be accessed at a consistent position.
1485 node = in_degree_zeros.pop()
1486 flat_list.append(node.ref)
1488 # Look at dependents of the node just added to flat_list. Some of them
1489 # may now belong in in_degree_zeros.
1490 for node_dependent in node.dependents:
1491 is_in_degree_zero = True
1492 for node_dependent_dependency in node_dependent.dependencies:
1493 if not node_dependent_dependency.ref in flat_list:
1494 # The dependent one or more dependencies not in flat_list. There
1495 # will be more chances to add it to flat_list when examining
1496 # it again as a dependent of those other dependencies, provided
1497 # that there are no cycles.
1498 is_in_degree_zero = False
1501 if is_in_degree_zero:
1502 # All of the dependent's dependencies are already in flat_list. Add
1503 # it to in_degree_zeros where it will be processed in a future
1504 # iteration of the outer loop.
1505 in_degree_zeros.add(node_dependent)
1509 def FindCycles(self, path=None):
1511 Returns a list of cycles in the graph, where each cycle is its own list.
1517 for node in self.dependents:
1524 results.append(tuple(cycle))
1526 results.extend(node.FindCycles([node] + path))
1528 return list(set(results))
1530 def DirectDependencies(self, dependencies=None):
1531 """Returns a list of just direct dependencies."""
1532 if dependencies == None:
1535 for dependency in self.dependencies:
1536 # Check for None, corresponding to the root node.
1537 if dependency.ref != None and dependency.ref not in dependencies:
1538 dependencies.append(dependency.ref)
1542 def _AddImportedDependencies(self, targets, dependencies=None):
1543 """Given a list of direct dependencies, adds indirect dependencies that
1544 other dependencies have declared to export their settings.
1546 This method does not operate on self. Rather, it operates on the list
1547 of dependencies in the |dependencies| argument. For each dependency in
1548 that list, if any declares that it exports the settings of one of its
1549 own dependencies, those dependencies whose settings are "passed through"
1550 are added to the list. As new items are added to the list, they too will
1551 be processed, so it is possible to import settings through multiple levels
1554 This method is not terribly useful on its own, it depends on being
1555 "primed" with a list of direct dependencies such as one provided by
1556 DirectDependencies. DirectAndImportedDependencies is intended to be the
1560 if dependencies == None:
1564 while index < len(dependencies):
1565 dependency = dependencies[index]
1566 dependency_dict = targets[dependency]
1567 # Add any dependencies whose settings should be imported to the list
1568 # if not already present. Newly-added items will be checked for
1569 # their own imports when the list iteration reaches them.
1570 # Rather than simply appending new items, insert them after the
1571 # dependency that exported them. This is done to more closely match
1572 # the depth-first method used by DeepDependencies.
1574 for imported_dependency in \
1575 dependency_dict.get('export_dependent_settings', []):
1576 if imported_dependency not in dependencies:
1577 dependencies.insert(index + add_index, imported_dependency)
1578 add_index = add_index + 1
1583 def DirectAndImportedDependencies(self, targets, dependencies=None):
1584 """Returns a list of a target's direct dependencies and all indirect
1585 dependencies that a dependency has advertised settings should be exported
1586 through the dependency for.
1589 dependencies = self.DirectDependencies(dependencies)
1590 return self._AddImportedDependencies(targets, dependencies)
1592 def DeepDependencies(self, dependencies=None):
1593 """Returns a list of all of a target's dependencies, recursively."""
1594 if dependencies == None:
1597 for dependency in self.dependencies:
1598 # Check for None, corresponding to the root node.
1599 if dependency.ref != None and dependency.ref not in dependencies:
1600 dependencies.append(dependency.ref)
1601 dependency.DeepDependencies(dependencies)
1605 def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1606 dependencies=None, initial=True):
1607 """Returns a list of dependency targets that are linked into this target.
1609 This function has a split personality, depending on the setting of
1610 |initial|. Outside callers should always leave |initial| at its default
1613 When adding a target to the list of dependencies, this function will
1614 recurse into itself with |initial| set to False, to collect dependencies
1615 that are linked into the linkable target for which the list is being built.
1617 If |include_shared_libraries| is False, the resulting dependencies will not
1618 include shared_library targets that are linked into this target.
1620 if dependencies == None:
1623 # Check for None, corresponding to the root node.
1624 if self.ref == None:
1627 # It's kind of sucky that |targets| has to be passed into this function,
1628 # but that's presently the easiest way to access the target dicts so that
1629 # this function can find target types.
1631 if 'target_name' not in targets[self.ref]:
1632 raise GypError("Missing 'target_name' field in target.")
1634 if 'type' not in targets[self.ref]:
1635 raise GypError("Missing 'type' field in target %s" %
1636 targets[self.ref]['target_name'])
1638 target_type = targets[self.ref]['type']
1640 is_linkable = target_type in linkable_types
1642 if initial and not is_linkable:
1643 # If this is the first target being examined and it's not linkable,
1644 # return an empty list of link dependencies, because the link
1645 # dependencies are intended to apply to the target itself (initial is
1646 # True) and this target won't be linked.
1649 # Don't traverse 'none' targets if explicitly excluded.
1650 if (target_type == 'none' and
1651 not targets[self.ref].get('dependencies_traverse', True)):
1652 if self.ref not in dependencies:
1653 dependencies.append(self.ref)
1656 # Executables and loadable modules are already fully and finally linked.
1657 # Nothing else can be a link dependency of them, there can only be
1658 # dependencies in the sense that a dependent target might run an
1659 # executable or load the loadable_module.
1660 if not initial and target_type in ('executable', 'loadable_module'):
1663 # Shared libraries are already fully linked. They should only be included
1664 # in |dependencies| when adjusting static library dependencies (in order to
1665 # link against the shared_library's import lib), but should not be included
1666 # in |dependencies| when propagating link_settings.
1667 # The |include_shared_libraries| flag controls which of these two cases we
1669 if (not initial and target_type == 'shared_library' and
1670 not include_shared_libraries):
1673 # The target is linkable, add it to the list of link dependencies.
1674 if self.ref not in dependencies:
1675 dependencies.append(self.ref)
1676 if initial or not is_linkable:
1677 # If this is a subsequent target and it's linkable, don't look any
1678 # further for linkable dependencies, as they'll already be linked into
1679 # this target linkable. Always look at dependencies of the initial
1680 # target, and always look at dependencies of non-linkables.
1681 for dependency in self.dependencies:
1682 dependency._LinkDependenciesInternal(targets,
1683 include_shared_libraries,
1684 dependencies, False)
1688 def DependenciesForLinkSettings(self, targets):
1690 Returns a list of dependency targets whose link_settings should be merged
1694 # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1695 # link_settings are propagated. So for now, we will allow it, unless the
1696 # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1697 # False. Once chrome is fixed, we can remove this flag.
1698 include_shared_libraries = \
1699 targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1700 return self._LinkDependenciesInternal(targets, include_shared_libraries)
1702 def DependenciesToLinkAgainst(self, targets):
1704 Returns a list of dependency targets that are linked into this target.
1706 return self._LinkDependenciesInternal(targets, True)
1709 def BuildDependencyList(targets):
1710 # Create a DependencyGraphNode for each target. Put it into a dict for easy
1712 dependency_nodes = {}
1713 for target, spec in targets.iteritems():
1714 if target not in dependency_nodes:
1715 dependency_nodes[target] = DependencyGraphNode(target)
1717 # Set up the dependency links. Targets that have no dependencies are treated
1718 # as dependent on root_node.
1719 root_node = DependencyGraphNode(None)
1720 for target, spec in targets.iteritems():
1721 target_node = dependency_nodes[target]
1722 target_build_file = gyp.common.BuildFile(target)
1723 dependencies = spec.get('dependencies')
1724 if not dependencies:
1725 target_node.dependencies = [root_node]
1726 root_node.dependents.append(target_node)
1728 for dependency in dependencies:
1729 dependency_node = dependency_nodes.get(dependency)
1730 if not dependency_node:
1731 raise GypError("Dependency '%s' not found while "
1732 "trying to load target %s" % (dependency, target))
1733 target_node.dependencies.append(dependency_node)
1734 dependency_node.dependents.append(target_node)
1736 flat_list = root_node.FlattenToList()
1738 # If there's anything left unvisited, there must be a circular dependency
1739 # (cycle). If you need to figure out what's wrong, look for elements of
1740 # targets that are not in flat_list.
1741 if len(flat_list) != len(targets):
1742 raise DependencyGraphNode.CircularException(
1743 'Some targets not reachable, cycle in dependency graph detected: ' +
1744 ' '.join(set(flat_list) ^ set(targets)))
1746 return [dependency_nodes, flat_list]
1749 def VerifyNoGYPFileCircularDependencies(targets):
1750 # Create a DependencyGraphNode for each gyp file containing a target. Put
1751 # it into a dict for easy access.
1752 dependency_nodes = {}
1753 for target in targets.iterkeys():
1754 build_file = gyp.common.BuildFile(target)
1755 if not build_file in dependency_nodes:
1756 dependency_nodes[build_file] = DependencyGraphNode(build_file)
1758 # Set up the dependency links.
1759 for target, spec in targets.iteritems():
1760 build_file = gyp.common.BuildFile(target)
1761 build_file_node = dependency_nodes[build_file]
1762 target_dependencies = spec.get('dependencies', [])
1763 for dependency in target_dependencies:
1765 dependency_build_file = gyp.common.BuildFile(dependency)
1767 gyp.common.ExceptionAppend(
1768 e, 'while computing dependencies of .gyp file %s' % build_file)
1771 if dependency_build_file == build_file:
1772 # A .gyp file is allowed to refer back to itself.
1774 dependency_node = dependency_nodes.get(dependency_build_file)
1775 if not dependency_node:
1776 raise GypError("Dependancy '%s' not found" % dependency_build_file)
1777 if dependency_node not in build_file_node.dependencies:
1778 build_file_node.dependencies.append(dependency_node)
1779 dependency_node.dependents.append(build_file_node)
1782 # Files that have no dependencies are treated as dependent on root_node.
1783 root_node = DependencyGraphNode(None)
1784 for build_file_node in dependency_nodes.itervalues():
1785 if len(build_file_node.dependencies) == 0:
1786 build_file_node.dependencies.append(root_node)
1787 root_node.dependents.append(build_file_node)
1789 flat_list = root_node.FlattenToList()
1791 # If there's anything left unvisited, there must be a circular dependency
1793 if len(flat_list) != len(dependency_nodes):
1795 for file in dependency_nodes.iterkeys():
1796 if not file in flat_list:
1797 bad_files.append(file)
1798 common_path_prefix = os.path.commonprefix(dependency_nodes)
1800 for cycle in root_node.FindCycles():
1801 simplified_paths = []
1803 assert(node.ref.startswith(common_path_prefix))
1804 simplified_paths.append(node.ref[len(common_path_prefix):])
1805 cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
1806 raise DependencyGraphNode.CircularException, \
1807 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
1810 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1811 # key should be one of all_dependent_settings, direct_dependent_settings,
1814 for target in flat_list:
1815 target_dict = targets[target]
1816 build_file = gyp.common.BuildFile(target)
1818 if key == 'all_dependent_settings':
1819 dependencies = dependency_nodes[target].DeepDependencies()
1820 elif key == 'direct_dependent_settings':
1822 dependency_nodes[target].DirectAndImportedDependencies(targets)
1823 elif key == 'link_settings':
1825 dependency_nodes[target].DependenciesForLinkSettings(targets)
1827 raise GypError("DoDependentSettings doesn't know how to determine "
1828 'dependencies for ' + key)
1830 for dependency in dependencies:
1831 dependency_dict = targets[dependency]
1832 if not key in dependency_dict:
1834 dependency_build_file = gyp.common.BuildFile(dependency)
1835 MergeDicts(target_dict, dependency_dict[key],
1836 build_file, dependency_build_file)
1839 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1841 # Recompute target "dependencies" properties. For each static library
1842 # target, remove "dependencies" entries referring to other static libraries,
1843 # unless the dependency has the "hard_dependency" attribute set. For each
1844 # linkable target, add a "dependencies" entry referring to all of the
1845 # target's computed list of link dependencies (including static libraries
1846 # if no such entry is already present.
1847 for target in flat_list:
1848 target_dict = targets[target]
1849 target_type = target_dict['type']
1851 if target_type == 'static_library':
1852 if not 'dependencies' in target_dict:
1855 target_dict['dependencies_original'] = target_dict.get(
1856 'dependencies', [])[:]
1858 # A static library should not depend on another static library unless
1859 # the dependency relationship is "hard," which should only be done when
1860 # a dependent relies on some side effect other than just the build
1861 # product, like a rule or action output. Further, if a target has a
1862 # non-hard dependency, but that dependency exports a hard dependency,
1863 # the non-hard dependency can safely be removed, but the exported hard
1864 # dependency must be added to the target to keep the same dependency
1867 dependency_nodes[target].DirectAndImportedDependencies(targets)
1869 while index < len(dependencies):
1870 dependency = dependencies[index]
1871 dependency_dict = targets[dependency]
1873 # Remove every non-hard static library dependency and remove every
1874 # non-static library dependency that isn't a direct dependency.
1875 if (dependency_dict['type'] == 'static_library' and \
1876 not dependency_dict.get('hard_dependency', False)) or \
1877 (dependency_dict['type'] != 'static_library' and \
1878 not dependency in target_dict['dependencies']):
1879 # Take the dependency out of the list, and don't increment index
1880 # because the next dependency to analyze will shift into the index
1881 # formerly occupied by the one being removed.
1882 del dependencies[index]
1886 # Update the dependencies. If the dependencies list is empty, it's not
1887 # needed, so unhook it.
1888 if len(dependencies) > 0:
1889 target_dict['dependencies'] = dependencies
1891 del target_dict['dependencies']
1893 elif target_type in linkable_types:
1894 # Get a list of dependency targets that should be linked into this
1895 # target. Add them to the dependencies list if they're not already
1898 link_dependencies = \
1899 dependency_nodes[target].DependenciesToLinkAgainst(targets)
1900 for dependency in link_dependencies:
1901 if dependency == target:
1903 if not 'dependencies' in target_dict:
1904 target_dict['dependencies'] = []
1905 if not dependency in target_dict['dependencies']:
1906 target_dict['dependencies'].append(dependency)
1907 # Sort the dependencies list in the order from dependents to dependencies.
1908 # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1909 # Note: flat_list is already sorted in the order from dependencies to
1911 if sort_dependencies and 'dependencies' in target_dict:
1912 target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1913 if dep in target_dict['dependencies']]
1916 # Initialize this here to speed up MakePathRelative.
1917 exception_re = re.compile(r'''["']?[-/$<>^]''')
1920 def MakePathRelative(to_file, fro_file, item):
1921 # If item is a relative path, it's relative to the build file dict that it's
1922 # coming from. Fix it up to make it relative to the build file dict that
1924 # Exception: any |item| that begins with these special characters is
1925 # returned without modification.
1926 # / Used when a path is already absolute (shortcut optimization;
1927 # such paths would be returned as absolute anyway)
1928 # $ Used for build environment variables
1929 # - Used for some build environment flags (such as -lapr-1 in a
1930 # "libraries" section)
1931 # < Used for our own variable and command expansions (see ExpandVariables)
1932 # > Used for our own variable and command expansions (see ExpandVariables)
1933 # ^ Used for our own variable and command expansions (see ExpandVariables)
1935 # "/' Used when a value is quoted. If these are present, then we
1936 # check the second character instead.
1938 if to_file == fro_file or exception_re.match(item):
1941 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1942 # temporary measure. This should really be addressed by keeping all paths
1943 # in POSIX until actual project generation.
1944 ret = os.path.normpath(os.path.join(
1945 gyp.common.RelativePath(os.path.dirname(fro_file),
1946 os.path.dirname(to_file)),
1947 item)).replace('\\', '/')
1952 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1953 # Python documentation recommends objects which do not support hash
1954 # set this value to None. Python library objects follow this rule.
1955 is_hashable = lambda val: val.__hash__
1957 # If x is hashable, returns whether x is in s. Else returns whether x is in l.
1958 def is_in_set_or_list(x, s, l):
1965 # Make membership testing of hashables in |to| (in particular, strings)
1967 hashable_to_set = set(x for x in to if is_hashable(x))
1970 if isinstance(item, str) or isinstance(item, int):
1971 # The cheap and easy case.
1973 to_item = MakePathRelative(to_file, fro_file, item)
1977 if not isinstance(item, str) or not item.startswith('-'):
1978 # Any string that doesn't begin with a "-" is a singleton - it can
1979 # only appear once in a list, to be enforced by the list merge append
1982 elif isinstance(item, dict):
1983 # Make a copy of the dictionary, continuing to look for paths to fix.
1984 # The other intelligent aspects of merge processing won't apply because
1985 # item is being merged into an empty dict.
1987 MergeDicts(to_item, item, to_file, fro_file)
1988 elif isinstance(item, list):
1989 # Recurse, making a copy of the list. If the list contains any
1990 # descendant dicts, path fixing will occur. Note that here, custom
1991 # values for is_paths and append are dropped; those are only to be
1992 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
1993 # matter anyway because the new |to_item| list is empty.
1995 MergeLists(to_item, item, to_file, fro_file)
1998 'Attempt to merge list item of unsupported type ' + \
1999 item.__class__.__name__
2002 # If appending a singleton that's already in the list, don't append.
2003 # This ensures that the earliest occurrence of the item will stay put.
2004 if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2006 if is_hashable(to_item):
2007 hashable_to_set.add(to_item)
2009 # If prepending a singleton that's already in the list, remove the
2010 # existing instance and proceed with the prepend. This ensures that the
2011 # item appears at the earliest possible position in the list.
2012 while singleton and to_item in to:
2015 # Don't just insert everything at index 0. That would prepend the new
2016 # items to the list in reverse order, which would be an unwelcome
2018 to.insert(prepend_index, to_item)
2019 if is_hashable(to_item):
2020 hashable_to_set.add(to_item)
2021 prepend_index = prepend_index + 1
2024 def MergeDicts(to, fro, to_file, fro_file):
2025 # I wanted to name the parameter "from" but it's a Python keyword...
2026 for k, v in fro.iteritems():
2027 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2028 # copy semantics. Something else may want to merge from the |fro| dict
2029 # later, and having the same dict ref pointed to twice in the tree isn't
2030 # what anyone wants considering that the dicts may subsequently be
2034 if isinstance(v, str) or isinstance(v, int):
2035 if not (isinstance(to[k], str) or isinstance(to[k], int)):
2037 elif v.__class__ != to[k].__class__:
2042 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2043 ' into incompatible type ' + to[k].__class__.__name__ + \
2045 if isinstance(v, str) or isinstance(v, int):
2046 # Overwrite the existing value, if any. Cheap and easy.
2047 is_path = IsPathSection(k)
2049 to[k] = MakePathRelative(to_file, fro_file, v)
2052 elif isinstance(v, dict):
2053 # Recurse, guaranteeing copies will be made of objects that require it.
2056 MergeDicts(to[k], v, to_file, fro_file)
2057 elif isinstance(v, list):
2058 # Lists in dicts can be merged with different policies, depending on
2059 # how the key in the "from" dict (k, the from-key) is written.
2061 # If the from-key has ...the to-list will have this action
2062 # this character appended:... applied when receiving the from-list:
2065 # ? set, only if to-list does not yet exist
2068 # This logic is list-specific, but since it relies on the associated
2069 # dict key, it's checked in this dict-oriented function.
2074 lists_incompatible = [list_base, list_base + '?']
2078 lists_incompatible = [list_base + '=', list_base + '?']
2082 lists_incompatible = [list_base, list_base + '=', list_base + '+']
2085 lists_incompatible = [list_base + '=', list_base + '?']
2087 # Some combinations of merge policies appearing together are meaningless.
2088 # It's stupid to replace and append simultaneously, for example. Append
2089 # and prepend are the only policies that can coexist.
2090 for list_incompatible in lists_incompatible:
2091 if list_incompatible in fro:
2092 raise GypError('Incompatible list policies ' + k + ' and ' +
2097 # If the key ends in "?", the list will only be merged if it doesn't
2100 if not isinstance(to[list_base], list):
2101 # This may not have been checked above if merging in a list with an
2102 # extension character.
2104 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2105 ' into incompatible type ' + to[list_base].__class__.__name__ + \
2106 ' for key ' + list_base + '(' + k + ')'
2110 # Call MergeLists, which will make copies of objects that require it.
2111 # MergeLists can recurse back into MergeDicts, although this will be
2112 # to make copies of dicts (with paths fixed), there will be no
2113 # subsequent dict "merging" once entering a list because lists are
2114 # always replaced, appended to, or prepended to.
2115 is_paths = IsPathSection(list_base)
2116 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2119 'Attempt to merge dict value of unsupported type ' + \
2120 v.__class__.__name__ + ' for key ' + k
2123 def MergeConfigWithInheritance(new_configuration_dict, build_file,
2124 target_dict, configuration, visited):
2125 # Skip if previously visted.
2126 if configuration in visited:
2129 # Look at this configuration.
2130 configuration_dict = target_dict['configurations'][configuration]
2133 for parent in configuration_dict.get('inherit_from', []):
2134 MergeConfigWithInheritance(new_configuration_dict, build_file,
2135 target_dict, parent, visited + [configuration])
2137 # Merge it into the new config.
2138 MergeDicts(new_configuration_dict, configuration_dict,
2139 build_file, build_file)
2142 if 'abstract' in new_configuration_dict:
2143 del new_configuration_dict['abstract']
2146 def SetUpConfigurations(target, target_dict):
2147 # key_suffixes is a list of key suffixes that might appear on key names.
2148 # These suffixes are handled in conditional evaluations (for =, +, and ?)
2149 # and rules/exclude processing (for ! and /). Keys with these suffixes
2150 # should be treated the same as keys without.
2151 key_suffixes = ['=', '+', '?', '!', '/']
2153 build_file = gyp.common.BuildFile(target)
2155 # Provide a single configuration by default if none exists.
2156 # TODO(mark): Signal an error if default_configurations exists but
2157 # configurations does not.
2158 if not 'configurations' in target_dict:
2159 target_dict['configurations'] = {'Default': {}}
2160 if not 'default_configuration' in target_dict:
2161 concrete = [i for i in target_dict['configurations'].iterkeys()
2162 if not target_dict['configurations'][i].get('abstract')]
2163 target_dict['default_configuration'] = sorted(concrete)[0]
2165 for configuration in target_dict['configurations'].keys():
2166 old_configuration_dict = target_dict['configurations'][configuration]
2167 # Skip abstract configurations (saves work only).
2168 if old_configuration_dict.get('abstract'):
2170 # Configurations inherit (most) settings from the enclosing target scope.
2171 # Get the inheritance relationship right by making a copy of the target
2173 new_configuration_dict = copy.deepcopy(target_dict)
2175 # Take out the bits that don't belong in a "configurations" section.
2176 # Since configuration setup is done before conditional, exclude, and rules
2177 # processing, be careful with handling of the suffix characters used in
2180 for key in new_configuration_dict:
2182 if key_ext in key_suffixes:
2186 if key_base in non_configuration_keys:
2187 delete_keys.append(key)
2189 for key in delete_keys:
2190 del new_configuration_dict[key]
2192 # Merge in configuration (with all its parents first).
2193 MergeConfigWithInheritance(new_configuration_dict, build_file,
2194 target_dict, configuration, [])
2196 # Put the new result back into the target dict as a configuration.
2197 target_dict['configurations'][configuration] = new_configuration_dict
2199 # Now drop all the abstract ones.
2200 for configuration in target_dict['configurations'].keys():
2201 old_configuration_dict = target_dict['configurations'][configuration]
2202 if old_configuration_dict.get('abstract'):
2203 del target_dict['configurations'][configuration]
2205 # Now that all of the target's configurations have been built, go through
2206 # the target dict's keys and remove everything that's been moved into a
2207 # "configurations" section.
2209 for key in target_dict:
2211 if key_ext in key_suffixes:
2215 if not key_base in non_configuration_keys:
2216 delete_keys.append(key)
2217 for key in delete_keys:
2218 del target_dict[key]
2220 # Check the configurations to see if they contain invalid keys.
2221 for configuration in target_dict['configurations'].keys():
2222 configuration_dict = target_dict['configurations'][configuration]
2223 for key in configuration_dict.keys():
2224 if key in invalid_configuration_keys:
2225 raise GypError('%s not allowed in the %s configuration, found in '
2226 'target %s' % (key, configuration, target))
2230 def ProcessListFiltersInDict(name, the_dict):
2231 """Process regular expression and exclusion-based filters on lists.
2233 An exclusion list is in a dict key named with a trailing "!", like
2234 "sources!". Every item in such a list is removed from the associated
2235 main list, which in this example, would be "sources". Removed items are
2236 placed into a "sources_excluded" list in the dict.
2238 Regular expression (regex) filters are contained in dict keys named with a
2239 trailing "/", such as "sources/" to operate on the "sources" list. Regex
2240 filters in a dict take the form:
2241 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2242 ['include', '_mac\\.cc$'] ],
2243 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2244 _win.cc. The second filter then includes all files ending in _mac.cc that
2245 are now or were once in the "sources" list. Items matching an "exclude"
2246 filter are subject to the same processing as would occur if they were listed
2247 by name in an exclusion list (ending in "!"). Items matching an "include"
2248 filter are brought back into the main list if previously excluded by an
2249 exclusion list or exclusion regex filter. Subsequent matching "exclude"
2250 patterns can still cause items to be excluded after matching an "include".
2253 # Look through the dictionary for any lists whose keys end in "!" or "/".
2254 # These are lists that will be treated as exclude lists and regular
2255 # expression-based exclude/include lists. Collect the lists that are
2256 # needed first, looking for the lists that they operate on, and assemble
2257 # then into |lists|. This is done in a separate loop up front, because
2258 # the _included and _excluded keys need to be added to the_dict, and that
2259 # can't be done while iterating through it.
2263 for key, value in the_dict.iteritems():
2265 if operation != '!' and operation != '/':
2268 if not isinstance(value, list):
2269 raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2270 value.__class__.__name__
2273 if list_key not in the_dict:
2274 # This happens when there's a list like "sources!" but no corresponding
2275 # "sources" list. Since there's nothing for it to operate on, queue up
2276 # the "sources!" list for deletion now.
2277 del_lists.append(key)
2280 if not isinstance(the_dict[list_key], list):
2281 value = the_dict[list_key]
2282 raise ValueError, name + ' key ' + list_key + \
2283 ' must be list, not ' + \
2284 value.__class__.__name__ + ' when applying ' + \
2285 {'!': 'exclusion', '/': 'regex'}[operation]
2287 if not list_key in lists:
2288 lists.append(list_key)
2290 # Delete the lists that are known to be unneeded at this point.
2291 for del_list in del_lists:
2292 del the_dict[del_list]
2294 for list_key in lists:
2295 the_list = the_dict[list_key]
2297 # Initialize the list_actions list, which is parallel to the_list. Each
2298 # item in list_actions identifies whether the corresponding item in
2299 # the_list should be excluded, unconditionally preserved (included), or
2300 # whether no exclusion or inclusion has been applied. Items for which
2301 # no exclusion or inclusion has been applied (yet) have value -1, items
2302 # excluded have value 0, and items included have value 1. Includes and
2303 # excludes override previous actions. All items in list_actions are
2304 # initialized to -1 because no excludes or includes have been processed
2306 list_actions = list((-1,) * len(the_list))
2308 exclude_key = list_key + '!'
2309 if exclude_key in the_dict:
2310 for exclude_item in the_dict[exclude_key]:
2311 for index in xrange(0, len(the_list)):
2312 if exclude_item == the_list[index]:
2313 # This item matches the exclude_item, so set its action to 0
2315 list_actions[index] = 0
2317 # The "whatever!" list is no longer needed, dump it.
2318 del the_dict[exclude_key]
2320 regex_key = list_key + '/'
2321 if regex_key in the_dict:
2322 for regex_item in the_dict[regex_key]:
2323 [action, pattern] = regex_item
2324 pattern_re = re.compile(pattern)
2326 if action == 'exclude':
2327 # This item matches an exclude regex, so set its value to 0 (exclude).
2329 elif action == 'include':
2330 # This item matches an include regex, so set its value to 1 (include).
2333 # This is an action that doesn't make any sense.
2334 raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2337 for index in xrange(0, len(the_list)):
2338 list_item = the_list[index]
2339 if list_actions[index] == action_value:
2340 # Even if the regex matches, nothing will change so continue (regex
2341 # searches are expensive).
2343 if pattern_re.search(list_item):
2344 # Regular expression match.
2345 list_actions[index] = action_value
2347 # The "whatever/" list is no longer needed, dump it.
2348 del the_dict[regex_key]
2350 # Add excluded items to the excluded list.
2352 # Note that exclude_key ("sources!") is different from excluded_key
2353 # ("sources_excluded"). The exclude_key list is input and it was already
2354 # processed and deleted; the excluded_key list is output and it's about
2356 excluded_key = list_key + '_excluded'
2357 if excluded_key in the_dict:
2358 raise GypError(name + ' key ' + excluded_key +
2359 ' must not be present prior '
2360 ' to applying exclusion/regex filters for ' + list_key)
2364 # Go backwards through the list_actions list so that as items are deleted,
2365 # the indices of items that haven't been seen yet don't shift. That means
2366 # that things need to be prepended to excluded_list to maintain them in the
2367 # same order that they existed in the_list.
2368 for index in xrange(len(list_actions) - 1, -1, -1):
2369 if list_actions[index] == 0:
2370 # Dump anything with action 0 (exclude). Keep anything with action 1
2371 # (include) or -1 (no include or exclude seen for the item).
2372 excluded_list.insert(0, the_list[index])
2375 # If anything was excluded, put the excluded list into the_dict at
2377 if len(excluded_list) > 0:
2378 the_dict[excluded_key] = excluded_list
2380 # Now recurse into subdicts and lists that may contain dicts.
2381 for key, value in the_dict.iteritems():
2382 if isinstance(value, dict):
2383 ProcessListFiltersInDict(key, value)
2384 elif isinstance(value, list):
2385 ProcessListFiltersInList(key, value)
2388 def ProcessListFiltersInList(name, the_list):
2389 for item in the_list:
2390 if isinstance(item, dict):
2391 ProcessListFiltersInDict(name, item)
2392 elif isinstance(item, list):
2393 ProcessListFiltersInList(name, item)
2396 def ValidateTargetType(target, target_dict):
2397 """Ensures the 'type' field on the target is one of the known types.
2400 target: string, name of target.
2401 target_dict: dict, target spec.
2403 Raises an exception on error.
2405 VALID_TARGET_TYPES = ('executable', 'loadable_module',
2406 'static_library', 'shared_library',
2408 target_type = target_dict.get('type', None)
2409 if target_type not in VALID_TARGET_TYPES:
2410 raise GypError("Target %s has an invalid target type '%s'. "
2411 "Must be one of %s." %
2412 (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2413 if (target_dict.get('standalone_static_library', 0) and
2414 not target_type == 'static_library'):
2415 raise GypError('Target %s has type %s but standalone_static_library flag is'
2416 ' only valid for static_library type.' % (target,
2420 def ValidateSourcesInTarget(target, target_dict, build_file):
2421 # TODO: Check if MSVC allows this for loadable_module targets.
2422 if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2424 sources = target_dict.get('sources', [])
2426 for source in sources:
2427 name, ext = os.path.splitext(source)
2428 is_compiled_file = ext in [
2429 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2430 if not is_compiled_file:
2432 basename = os.path.basename(name) # Don't include extension.
2433 basenames.setdefault(basename, []).append(source)
2436 for basename, files in basenames.iteritems():
2438 error += ' %s: %s\n' % (basename, ' '.join(files))
2441 print('static library %s has several files with the same basename:\n' %
2442 target + error + 'Some build systems, e.g. MSVC08, '
2443 'cannot handle that.')
2444 raise GypError('Duplicate basenames in sources section, see list above')
2447 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2448 """Ensures that the rules sections in target_dict are valid and consistent,
2449 and determines which sources they apply to.
2452 target: string, name of target.
2453 target_dict: dict, target spec containing "rules" and "sources" lists.
2454 extra_sources_for_rules: a list of keys to scan for rule matches in
2455 addition to 'sources'.
2458 # Dicts to map between values found in rules' 'rule_name' and 'extension'
2459 # keys and the rule dicts themselves.
2461 rule_extensions = {}
2463 rules = target_dict.get('rules', [])
2465 # Make sure that there's no conflict among rule names and extensions.
2466 rule_name = rule['rule_name']
2467 if rule_name in rule_names:
2468 raise GypError('rule %s exists in duplicate, target %s' %
2469 (rule_name, target))
2470 rule_names[rule_name] = rule
2472 rule_extension = rule['extension']
2473 if rule_extension.startswith('.'):
2474 rule_extension = rule_extension[1:]
2475 if rule_extension in rule_extensions:
2476 raise GypError(('extension %s associated with multiple rules, ' +
2477 'target %s rules %s and %s') %
2478 (rule_extension, target,
2479 rule_extensions[rule_extension]['rule_name'],
2481 rule_extensions[rule_extension] = rule
2483 # Make sure rule_sources isn't already there. It's going to be
2484 # created below if needed.
2485 if 'rule_sources' in rule:
2487 'rule_sources must not exist in input, target %s rule %s' %
2488 (target, rule_name))
2491 source_keys = ['sources']
2492 source_keys.extend(extra_sources_for_rules)
2493 for source_key in source_keys:
2494 for source in target_dict.get(source_key, []):
2495 (source_root, source_extension) = os.path.splitext(source)
2496 if source_extension.startswith('.'):
2497 source_extension = source_extension[1:]
2498 if source_extension == rule_extension:
2499 rule_sources.append(source)
2501 if len(rule_sources) > 0:
2502 rule['rule_sources'] = rule_sources
2505 def ValidateRunAsInTarget(target, target_dict, build_file):
2506 target_name = target_dict.get('target_name')
2507 run_as = target_dict.get('run_as')
2510 if not isinstance(run_as, dict):
2511 raise GypError("The 'run_as' in target %s from file %s should be a "
2513 (target_name, build_file))
2514 action = run_as.get('action')
2516 raise GypError("The 'run_as' in target %s from file %s must have an "
2517 "'action' section." %
2518 (target_name, build_file))
2519 if not isinstance(action, list):
2520 raise GypError("The 'action' for 'run_as' in target %s from file %s "
2522 (target_name, build_file))
2523 working_directory = run_as.get('working_directory')
2524 if working_directory and not isinstance(working_directory, str):
2525 raise GypError("The 'working_directory' for 'run_as' in target %s "
2526 "in file %s should be a string." %
2527 (target_name, build_file))
2528 environment = run_as.get('environment')
2529 if environment and not isinstance(environment, dict):
2530 raise GypError("The 'environment' for 'run_as' in target %s "
2531 "in file %s should be a dictionary." %
2532 (target_name, build_file))
2535 def ValidateActionsInTarget(target, target_dict, build_file):
2536 '''Validates the inputs to the actions in a target.'''
2537 target_name = target_dict.get('target_name')
2538 actions = target_dict.get('actions', [])
2539 for action in actions:
2540 action_name = action.get('action_name')
2542 raise GypError("Anonymous action in target %s. "
2543 "An action must have an 'action_name' field." %
2545 inputs = action.get('inputs', None)
2547 raise GypError('Action in target %s has no inputs.' % target_name)
2548 action_command = action.get('action')
2549 if action_command and not action_command[0]:
2550 raise GypError("Empty action as command in target %s." % target_name)
2553 def TurnIntIntoStrInDict(the_dict):
2554 """Given dict the_dict, recursively converts all integers into strings.
2556 # Use items instead of iteritems because there's no need to try to look at
2557 # reinserted keys and their associated values.
2558 for k, v in the_dict.items():
2559 if isinstance(v, int):
2562 elif isinstance(v, dict):
2563 TurnIntIntoStrInDict(v)
2564 elif isinstance(v, list):
2565 TurnIntIntoStrInList(v)
2567 if isinstance(k, int):
2568 the_dict[str(k)] = v
2572 def TurnIntIntoStrInList(the_list):
2573 """Given list the_list, recursively converts all integers into strings.
2575 for index in xrange(0, len(the_list)):
2576 item = the_list[index]
2577 if isinstance(item, int):
2578 the_list[index] = str(item)
2579 elif isinstance(item, dict):
2580 TurnIntIntoStrInDict(item)
2581 elif isinstance(item, list):
2582 TurnIntIntoStrInList(item)
2585 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2587 """Return only the targets that are deep dependencies of |root_targets|."""
2588 qualified_root_targets = []
2589 for target in root_targets:
2590 target = target.strip()
2591 qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2592 if not qualified_targets:
2593 raise GypError("Could not find target %s" % target)
2594 qualified_root_targets.extend(qualified_targets)
2597 for target in qualified_root_targets:
2598 wanted_targets[target] = targets[target]
2599 for dependency in dependency_nodes[target].DeepDependencies():
2600 wanted_targets[dependency] = targets[dependency]
2602 wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2604 # Prune unwanted targets from each build_file's data dict.
2605 for build_file in data['target_build_files']:
2606 if not 'targets' in data[build_file]:
2609 for target in data[build_file]['targets']:
2610 qualified_name = gyp.common.QualifiedTarget(build_file,
2611 target['target_name'],
2613 if qualified_name in wanted_targets:
2614 new_targets.append(target)
2615 data[build_file]['targets'] = new_targets
2617 return wanted_targets, wanted_flat_list
2620 def VerifyNoCollidingTargets(targets):
2621 """Verify that no two targets in the same directory share the same name.
2624 targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2626 # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2628 for target in targets:
2629 # Separate out 'path/to/file.gyp, 'target_name' from
2630 # 'path/to/file.gyp:target_name'.
2631 path, name = target.rsplit(':', 1)
2632 # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2633 subdir, gyp = os.path.split(path)
2634 # Use '.' for the current directory '', so that the error messages make
2638 # Prepare a key like 'path/to:target_name'.
2639 key = subdir + ':' + name
2641 # Complain if this target is already used.
2642 raise GypError('Duplicate target name "%s" in directory "%s" used both '
2643 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2647 def SetGeneratorGlobals(generator_input_info):
2648 # Set up path_sections and non_configuration_keys with the default data plus
2649 # the generator-specific data.
2650 global path_sections
2651 path_sections = base_path_sections[:]
2652 path_sections.extend(generator_input_info['path_sections'])
2654 global non_configuration_keys
2655 non_configuration_keys = base_non_configuration_keys[:]
2656 non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2658 global multiple_toolsets
2659 multiple_toolsets = generator_input_info[
2660 'generator_supports_multiple_toolsets']
2662 global generator_filelist_paths
2663 generator_filelist_paths = generator_input_info['generator_filelist_paths']
2666 def Load(build_files, variables, includes, depth, generator_input_info, check,
2667 circular_check, parallel, root_targets):
2668 SetGeneratorGlobals(generator_input_info)
2669 # A generator can have other lists (in addition to sources) be processed
2671 extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2673 # Load build files. This loads every target-containing build file into
2674 # the |data| dictionary such that the keys to |data| are build file names,
2675 # and the values are the entire build file contents after "early" or "pre"
2676 # processing has been done and includes have been resolved.
2677 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2678 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2679 # track of the keys corresponding to "target" files.
2680 data = {'target_build_files': set()}
2682 # Normalize paths everywhere. This is important because paths will be
2683 # used as keys to the data dict and for references between input files.
2684 build_files = set(map(os.path.normpath, build_files))
2686 LoadTargetBuildFilesParallel(build_files, data, aux_data,
2687 variables, includes, depth, check,
2688 generator_input_info)
2690 for build_file in build_files:
2692 LoadTargetBuildFile(build_file, data, aux_data,
2693 variables, includes, depth, check, True)
2694 except Exception, e:
2695 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2698 # Build a dict to access each target's subdict by qualified name.
2699 targets = BuildTargetsDict(data)
2701 # Fully qualify all dependency links.
2702 QualifyDependencies(targets)
2704 # Remove self-dependencies from targets that have 'prune_self_dependencies'
2706 RemoveSelfDependencies(targets)
2708 # Expand dependencies specified as build_file:*.
2709 ExpandWildcardDependencies(targets, data)
2711 # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2712 for target_name, target_dict in targets.iteritems():
2714 for key_base in dependency_sections:
2715 for op in ('', '!', '/'):
2717 if key in target_dict:
2718 tmp_dict[key] = target_dict[key]
2719 del target_dict[key]
2720 ProcessListFiltersInDict(target_name, tmp_dict)
2721 # Write the results back to |target_dict|.
2722 for key in tmp_dict:
2723 target_dict[key] = tmp_dict[key]
2725 # Make sure every dependency appears at most once.
2726 RemoveDuplicateDependencies(targets)
2729 # Make sure that any targets in a.gyp don't contain dependencies in other
2730 # .gyp files that further depend on a.gyp.
2731 VerifyNoGYPFileCircularDependencies(targets)
2733 [dependency_nodes, flat_list] = BuildDependencyList(targets)
2736 # Remove, from |targets| and |flat_list|, the targets that are not deep
2737 # dependencies of the targets specified in |root_targets|.
2738 targets, flat_list = PruneUnwantedTargets(
2739 targets, flat_list, dependency_nodes, root_targets, data)
2741 # Check that no two targets in the same directory have the same name.
2742 VerifyNoCollidingTargets(flat_list)
2744 # Handle dependent settings of various types.
2745 for settings_type in ['all_dependent_settings',
2746 'direct_dependent_settings',
2748 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2750 # Take out the dependent settings now that they've been published to all
2751 # of the targets that require them.
2752 for target in flat_list:
2753 if settings_type in targets[target]:
2754 del targets[target][settings_type]
2756 # Make sure static libraries don't declare dependencies on other static
2757 # libraries, but that linkables depend on all unlinked static libraries
2758 # that they need so that their link steps will be correct.
2759 gii = generator_input_info
2760 if gii['generator_wants_static_library_dependencies_adjusted']:
2761 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2762 gii['generator_wants_sorted_dependencies'])
2764 # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2765 for target in flat_list:
2766 target_dict = targets[target]
2767 build_file = gyp.common.BuildFile(target)
2768 ProcessVariablesAndConditionsInDict(
2769 target_dict, PHASE_LATE, variables, build_file)
2771 # Move everything that can go into a "configurations" section into one.
2772 for target in flat_list:
2773 target_dict = targets[target]
2774 SetUpConfigurations(target, target_dict)
2776 # Apply exclude (!) and regex (/) list filters.
2777 for target in flat_list:
2778 target_dict = targets[target]
2779 ProcessListFiltersInDict(target, target_dict)
2781 # Apply "latelate" variable expansions and condition evaluations.
2782 for target in flat_list:
2783 target_dict = targets[target]
2784 build_file = gyp.common.BuildFile(target)
2785 ProcessVariablesAndConditionsInDict(
2786 target_dict, PHASE_LATELATE, variables, build_file)
2788 # Make sure that the rules make sense, and build up rule_sources lists as
2789 # needed. Not all generators will need to use the rule_sources lists, but
2790 # some may, and it seems best to build the list in a common spot.
2791 # Also validate actions and run_as elements in targets.
2792 for target in flat_list:
2793 target_dict = targets[target]
2794 build_file = gyp.common.BuildFile(target)
2795 ValidateTargetType(target, target_dict)
2796 # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2797 # scalesystemdependent_arm_additions.c or similar.
2798 if 'arm' not in variables.get('target_arch', ''):
2799 ValidateSourcesInTarget(target, target_dict, build_file)
2800 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2801 ValidateRunAsInTarget(target, target_dict, build_file)
2802 ValidateActionsInTarget(target, target_dict, build_file)
2804 # Generators might not expect ints. Turn them into strs.
2805 TurnIntIntoStrInDict(data)
2807 # TODO(mark): Return |data| for now because the generator needs a list of
2808 # build files that came in. In the future, maybe it should just accept
2809 # a list, and not the whole data dict.
2810 return [flat_list, targets, data]