3 # Copyright 2001 Google Inc. All Rights Reserved.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """Simple web server for browsing dependency graph data.
19 This script is inlined into the final executable and spawned by
23 from __future__ import print_function
26 import http.server as httpserver
28 import BaseHTTPServer as httpserver
36 from urllib.request import unquote
38 from urllib2 import unquote
39 from collections import namedtuple
41 Node = namedtuple('Node', ['inputs', 'rule', 'target', 'outputs'])
43 # Ideally we'd allow you to navigate to a build edge or a build node,
44 # with appropriate views for each. But there's no way to *name* a build
45 # edge so we can only display nodes.
47 # For a given node, it has at most one input edge, which has n
48 # different inputs. This becomes node.inputs. (We leave out the
49 # outputs of the input edge due to what follows.) The node can have
50 # multiple dependent output edges. Rather than attempting to display
51 # those, they are summarized by taking the union of all their outputs.
53 # This means there's no single view that shows you all inputs and outputs
54 # of an edge. But I think it's less confusing than alternatives.
56 def match_strip(line, prefix):
57 if not line.startswith(prefix):
59 return (True, line[len(prefix):])
62 lines = iter(text.split('\n'))
70 target = next(lines)[:-1] # strip trailing colon
73 (match, rule) = match_strip(line, ' input: ')
75 (match, line) = match_strip(next(lines), ' ')
78 (match, line) = match_strip(line, '| ')
81 (match, line) = match_strip(line, '|| ')
84 inputs.append((line, type))
85 (match, line) = match_strip(next(lines), ' ')
87 match, _ = match_strip(line, ' outputs:')
89 (match, line) = match_strip(next(lines), ' ')
92 (match, line) = match_strip(next(lines), ' ')
96 return Node(inputs, rule, target, outputs)
98 def create_page(body):
99 return '''<!DOCTYPE html>
117 font-family: WebKitHack, monospace;
121 -webkit-columns: auto 2;
126 def generate_html(node):
127 document = ['<h1><tt>%s</tt></h1>' % node.target]
130 document.append('<h2>target is built using rule <tt>%s</tt> of</h2>' %
132 if len(node.inputs) > 0:
133 document.append('<div class=filelist>')
134 for input, type in sorted(node.inputs):
137 extra = ' (%s)' % type
138 document.append('<tt><a href="?%s">%s</a>%s</tt><br>' %
139 (input, input, extra))
140 document.append('</div>')
143 document.append('<h2>dependent edges build:</h2>')
144 document.append('<div class=filelist>')
145 for output in sorted(node.outputs):
146 document.append('<tt><a href="?%s">%s</a></tt><br>' %
148 document.append('</div>')
150 return '\n'.join(document)
152 def ninja_dump(target):
153 cmd = [args.ninja_command, '-f', args.f, '-t', 'query', target]
154 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
155 universal_newlines=True)
156 return proc.communicate() + (proc.returncode,)
158 class RequestHandler(httpserver.BaseHTTPRequestHandler):
160 assert self.path[0] == '/'
161 target = unquote(self.path[1:])
164 self.send_response(302)
165 self.send_header('Location', '?' + args.initial_target)
169 if not target.startswith('?'):
170 self.send_response(404)
175 ninja_output, ninja_error, exit_code = ninja_dump(target)
177 page_body = generate_html(parse(ninja_output.strip()))
179 # Relay ninja's error message.
180 page_body = '<h1><tt>%s</tt></h1>' % ninja_error
182 self.send_response(200)
184 self.wfile.write(create_page(page_body).encode('utf-8'))
186 def log_message(self, format, *args):
187 pass # Swallow console spam.
189 parser = argparse.ArgumentParser(prog='ninja -t browse')
190 parser.add_argument('--port', '-p', default=8000, type=int,
191 help='Port number to use (default %(default)d)')
192 parser.add_argument('--no-browser', action='store_true',
193 help='Do not open a webbrowser on startup.')
195 parser.add_argument('--ninja-command', default='ninja',
196 help='Path to ninja binary (default %(default)s)')
197 parser.add_argument('-f', default='build.ninja',
198 help='Path to build.ninja file (default %(default)s)')
199 parser.add_argument('initial_target', default='all', nargs='?',
200 help='Initial target to show (default %(default)s)')
202 args = parser.parse_args()
204 httpd = httpserver.HTTPServer(('',port), RequestHandler)
206 hostname = socket.gethostname()
207 print('Web server running on %s:%d, ctl-C to abort...' % (hostname,port) )
208 print('Web server pid %d' % os.getpid(), file=sys.stderr )
209 if not args.no_browser:
210 webbrowser.open_new('http://%s:%s' % (hostname, port) )
211 httpd.serve_forever()
212 except KeyboardInterrupt:
214 pass # Swallow console spam.