3 # Copyright 2012 Google Inc. All Rights Reserved.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 # Modified by Linus Nielsen Feltzing for inclusion in the libcurl test
30 This is a test server to test the libcurl pipelining functionality.
31 It is a modified version if Google's HTTP pipelining test server. More
32 information can be found here:
34 http://dev.chromium.org/developers/design-documents/network-stack/http-pipelining
36 Source code can be found here:
38 http://code.google.com/p/http-pipelining-test/
40 MAX_REQUEST_SIZE = 1024 # bytes
41 MIN_POLL_TIME = 0.01 # seconds. Minimum time to poll, in order to prevent
42 # excessive looping because Python refuses to poll for
44 SEND_BUFFER_TIME = 0.5 # seconds
45 TIMEOUT = 30 # seconds
48 class Error(Exception):
52 class RequestTooLargeError(Error):
56 class ServeIndexError(Error):
60 class UnexpectedMethodError(Error):
64 class RequestParser(object):
65 """Parses an input buffer looking for HTTP GET requests."""
72 HEADER_RE = re.compile('([^:]+):(.*)\n')
73 REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n')
78 self._pending_headers = {}
79 self._pending_request = ""
80 self._state = self.LOOKING_FOR_GET
81 self._were_all_requests_http_1_1 = True
82 self._valid_requests = []
84 def ParseAdditionalData(self, data):
85 """Finds HTTP requests in |data|.
88 data: (String) Newly received input data from the socket.
92 (String) The request path.
93 (Map of String to String) The header name and value.
96 RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE.
97 UnexpectedMethodError: On a non-GET method.
98 Error: On a programming error.
100 logfile = open('log/server.input', 'a')
103 self._buffer += data.replace('\r', '')
104 should_continue_parsing = True
105 while should_continue_parsing:
106 if self._state == self.LOOKING_FOR_GET:
107 should_continue_parsing = self._DoLookForGet()
108 elif self._state == self.READING_HEADERS:
109 should_continue_parsing = self._DoReadHeader()
111 raise Error('Unexpected state: ' + self._state)
112 if len(self._buffer) > MAX_REQUEST_SIZE:
113 raise RequestTooLargeError(
114 'Request is at least %d bytes' % len(self._buffer))
115 valid_requests = self._valid_requests
116 self._valid_requests = []
117 return valid_requests
120 def were_all_requests_http_1_1(self):
121 return self._were_all_requests_http_1_1
123 def _DoLookForGet(self):
124 """Tries to parse an HTTTP request line.
127 (Boolean) True if a request was found.
130 UnexpectedMethodError: On a non-GET method.
132 m = self.REQUEST_RE.match(self._buffer)
135 method, path, http_major, http_minor = m.groups()
138 raise UnexpectedMethodError('Unexpected method: ' + method)
139 if path in ['/', '/index.htm', '/index.html']:
140 raise ServeIndexError()
142 if http_major != '1' or http_minor != '1':
143 self._were_all_requests_http_1_1 = False
147 self._pending_request = path
148 self._buffer = self._buffer[m.end():]
149 self._state = self.READING_HEADERS
152 def _DoReadHeader(self):
153 """Tries to parse a HTTP header.
156 (Boolean) True if it found the end of the request or a HTTP header.
158 if self._buffer.startswith('\n'):
159 self._buffer = self._buffer[1:]
160 self._state = self.LOOKING_FOR_GET
161 self._valid_requests.append((self._pending_request,
162 self._pending_headers))
163 self._pending_headers = {}
164 self._pending_request = ""
167 m = self.HEADER_RE.match(self._buffer)
171 header = m.group(1).lower()
172 value = m.group(2).strip().lower()
173 if header not in self._pending_headers:
174 self._pending_headers[header] = value
175 self._buffer = self._buffer[m.end():]
179 class ResponseBuilder(object):
180 """Builds HTTP responses for a list of accumulated requests."""
184 self._max_pipeline_depth = 0
185 self._requested_paths = []
186 self._processed_end = False
187 self._were_all_requests_http_1_1 = True
189 def QueueRequests(self, requested_paths, were_all_requests_http_1_1):
190 """Adds requests to the queue of requests.
193 requested_paths: (List of Strings) Requested paths.
195 self._requested_paths.extend(requested_paths)
196 self._were_all_requests_http_1_1 = were_all_requests_http_1_1
198 def Chunkify(self, data, chunksize):
199 """ Divides a string into chunks
201 return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)]
203 def BuildResponses(self):
204 """Converts the queue of requests into responses.
207 (String) Buffer containing all of the responses.
210 self._max_pipeline_depth = max(self._max_pipeline_depth,
211 len(self._requested_paths))
212 for path, headers in self._requested_paths:
213 if path == '/verifiedserver':
214 body = "WE ROOLZ: {}\r\n".format(os.getpid());
215 result += self._BuildResponse(
216 '200 OK', ['Server: Apache',
217 'Content-Length: {}'.format(len(body)),
218 'Cache-Control: no-store'], body)
220 elif path == '/alphabet.txt':
221 body = 'abcdefghijklmnopqrstuvwxyz'
222 result += self._BuildResponse(
223 '200 OK', ['Server: Apache',
224 'Content-Length: 26',
225 'Cache-Control: no-store'], body)
227 elif path == '/reverse.txt':
228 body = 'zyxwvutsrqponmlkjihgfedcba'
229 result += self._BuildResponse(
230 '200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body)
232 elif path == '/chunked.txt':
233 body = ('7\r\nchunked\r\n'
238 result += self._BuildResponse(
239 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'],
242 elif path == '/cached.txt':
243 body = 'azbycxdwevfugthsirjqkplomn'
244 result += self._BuildResponse(
245 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body)
247 elif path == '/connection_close.txt':
248 body = 'azbycxdwevfugthsirjqkplomn'
249 result += self._BuildResponse(
250 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body)
251 self._processed_end = True
253 elif path == '/1k.txt':
254 str = '0123456789abcdef'
255 body = ''.join([str for num in xrange(64)])
256 result += self._BuildResponse(
257 '200 OK', ['Server: Apache',
258 'Content-Length: 1024',
259 'Cache-Control: max-age=60'], body)
261 elif path == '/10k.txt':
262 str = '0123456789abcdef'
263 body = ''.join([str for num in xrange(640)])
264 result += self._BuildResponse(
265 '200 OK', ['Server: Apache',
266 'Content-Length: 10240',
267 'Cache-Control: max-age=60'], body)
269 elif path == '/100k.txt':
270 str = '0123456789abcdef'
271 body = ''.join([str for num in xrange(6400)])
272 result += self._BuildResponse(
275 'Content-Length: 102400',
276 'Cache-Control: max-age=60'],
279 elif path == '/100k_chunked.txt':
280 str = '0123456789abcdef'
281 moo = ''.join([str for num in xrange(6400)])
282 body = self.Chunkify(moo, 20480)
283 body.append('0\r\n\r\n')
286 result += self._BuildResponse(
287 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body)
289 elif path == '/stats.txt':
291 'max_pipeline_depth': self._max_pipeline_depth,
292 'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1),
294 body = ','.join(['%s:%s' % (k, v) for k, v in results.items()])
295 result += self._BuildResponse(
297 ['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body)
298 self._processed_end = True
301 result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away')
302 if self._processed_end:
304 self._requested_paths = []
307 def WriteError(self, status, error):
308 """Returns an HTTP response for the specified error.
311 status: (String) Response code and descrtion (e.g. "404 Not Found")
314 (String) Text of HTTP response.
316 return self._BuildResponse(
317 status, ['Connection: close', 'Content-Type: text/plain'], error)
320 def processed_end(self):
321 return self._processed_end
323 def _BuildResponse(self, status, headers, body):
324 """Builds an HTTP response.
327 status: (String) Response code and descrtion (e.g. "200 OK")
328 headers: (List of Strings) Headers (e.g. "Connection: close")
329 body: (String) Response body.
332 (String) Text of HTTP response.
334 return ('HTTP/1.1 %s\r\n'
337 '%s' % (status, '\r\n'.join(headers), body))
340 class PipelineRequestHandler(SocketServer.BaseRequestHandler):
341 """Called on an incoming TCP connection."""
343 def _GetTimeUntilTimeout(self):
344 return self._start_time + TIMEOUT - time.time()
346 def _GetTimeUntilNextSend(self):
347 if not self._last_queued_time:
349 return self._last_queued_time + SEND_BUFFER_TIME - time.time()
352 self._request_parser = RequestParser()
353 self._response_builder = ResponseBuilder()
354 self._last_queued_time = 0
356 self._num_written = 0
357 self._send_buffer = ""
358 self._start_time = time.time()
360 while not self._response_builder.processed_end or self._send_buffer:
362 time_left = self._GetTimeUntilTimeout()
363 time_until_next_send = self._GetTimeUntilNextSend()
364 max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME
366 rlist, wlist, xlist = [], [], []
367 fileno = self.request.fileno()
368 if max_poll_time > 0:
370 if self._send_buffer:
372 rlist, wlist, xlist = select.select(rlist, wlist, xlist, max_poll_time)
374 if self._GetTimeUntilTimeout() <= 0:
377 if self._GetTimeUntilNextSend() <= 0:
378 self._send_buffer += self._response_builder.BuildResponses()
379 self._num_written = self._num_queued
380 self._last_queued_time = 0
383 self.request.setblocking(False)
384 new_data = self.request.recv(MAX_REQUEST_SIZE)
385 self.request.setblocking(True)
388 new_requests = self._request_parser.ParseAdditionalData(new_data)
389 self._response_builder.QueueRequests(
390 new_requests, self._request_parser.were_all_requests_http_1_1)
391 self._num_queued += len(new_requests)
392 self._last_queued_time = time.time()
393 elif fileno in wlist:
394 num_bytes_sent = self.request.send(self._send_buffer[0:4096])
395 self._send_buffer = self._send_buffer[num_bytes_sent:]
398 except RequestTooLargeError as e:
399 self.request.send(self._response_builder.WriteError(
400 '413 Request Entity Too Large', e))
402 except UnexpectedMethodError as e:
403 self.request.send(self._response_builder.WriteError(
404 '405 Method Not Allowed', e))
406 except ServeIndexError:
407 self.request.send(self._response_builder.WriteError(
408 '200 OK', INFO_MESSAGE))
409 except Exception as e:
414 class PipelineServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
418 parser = argparse.ArgumentParser()
419 parser.add_argument("--port", action="store", default=0,
420 type=int, help="port to listen on")
421 parser.add_argument("--verbose", action="store", default=0,
422 type=int, help="verbose output")
423 parser.add_argument("--pidfile", action="store", default=0,
424 help="file name for the PID")
425 parser.add_argument("--logfile", action="store", default=0,
426 help="file name for the log")
427 parser.add_argument("--srcdir", action="store", default=0,
428 help="test directory")
429 parser.add_argument("--id", action="store", default=0,
431 parser.add_argument("--ipv4", action="store_true", default=0,
433 args = parser.parse_args()
437 f = open(args.pidfile, 'w')
438 f.write('{}'.format(pid))
441 server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler)
442 server.allow_reuse_address = True
443 server.serve_forever()