3 # Copyright 2012 Google Inc. All Rights Reserved.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 # Modified by Linus Nielsen Feltzing for inclusion in the libcurl test
23 import SocketServer as socketserver
33 This is a test server to test the libcurl pipelining functionality.
34 It is a modified version if Google's HTTP pipelining test server. More
35 information can be found here:
37 http://dev.chromium.org/developers/design-documents/network-stack/http-pipelining
39 Source code can be found here:
41 http://code.google.com/p/http-pipelining-test/
43 MAX_REQUEST_SIZE = 1024 # bytes
44 MIN_POLL_TIME = 0.01 # seconds. Minimum time to poll, in order to prevent
45 # excessive looping because Python refuses to poll for
47 SEND_BUFFER_TIME = 0.5 # seconds
48 TIMEOUT = 30 # seconds
51 class Error(Exception):
55 class RequestTooLargeError(Error):
59 class ServeIndexError(Error):
63 class UnexpectedMethodError(Error):
67 class RequestParser(object):
68 """Parses an input buffer looking for HTTP GET requests."""
75 HEADER_RE = re.compile('([^:]+):(.*)\n')
76 REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n')
81 self._pending_headers = {}
82 self._pending_request = ""
83 self._state = self.LOOKING_FOR_GET
84 self._were_all_requests_http_1_1 = True
85 self._valid_requests = []
87 def ParseAdditionalData(self, data):
88 """Finds HTTP requests in |data|.
91 data: (String) Newly received input data from the socket.
95 (String) The request path.
96 (Map of String to String) The header name and value.
99 RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE.
100 UnexpectedMethodError: On a non-GET method.
101 Error: On a programming error.
103 logfile = open('log/server.input', 'a')
106 self._buffer += data.replace('\r', '')
107 should_continue_parsing = True
108 while should_continue_parsing:
109 if self._state == self.LOOKING_FOR_GET:
110 should_continue_parsing = self._DoLookForGet()
111 elif self._state == self.READING_HEADERS:
112 should_continue_parsing = self._DoReadHeader()
114 raise Error('Unexpected state: ' + self._state)
115 if len(self._buffer) > MAX_REQUEST_SIZE:
116 raise RequestTooLargeError(
117 'Request is at least %d bytes' % len(self._buffer))
118 valid_requests = self._valid_requests
119 self._valid_requests = []
120 return valid_requests
123 def were_all_requests_http_1_1(self):
124 return self._were_all_requests_http_1_1
126 def _DoLookForGet(self):
127 """Tries to parse an HTTTP request line.
130 (Boolean) True if a request was found.
133 UnexpectedMethodError: On a non-GET method.
135 m = self.REQUEST_RE.match(self._buffer)
138 method, path, http_major, http_minor = m.groups()
141 raise UnexpectedMethodError('Unexpected method: ' + method)
142 if path in ['/', '/index.htm', '/index.html']:
143 raise ServeIndexError()
145 if http_major != '1' or http_minor != '1':
146 self._were_all_requests_http_1_1 = False
150 self._pending_request = path
151 self._buffer = self._buffer[m.end():]
152 self._state = self.READING_HEADERS
155 def _DoReadHeader(self):
156 """Tries to parse a HTTP header.
159 (Boolean) True if it found the end of the request or a HTTP header.
161 if self._buffer.startswith('\n'):
162 self._buffer = self._buffer[1:]
163 self._state = self.LOOKING_FOR_GET
164 self._valid_requests.append((self._pending_request,
165 self._pending_headers))
166 self._pending_headers = {}
167 self._pending_request = ""
170 m = self.HEADER_RE.match(self._buffer)
174 header = m.group(1).lower()
175 value = m.group(2).strip().lower()
176 if header not in self._pending_headers:
177 self._pending_headers[header] = value
178 self._buffer = self._buffer[m.end():]
182 class ResponseBuilder(object):
183 """Builds HTTP responses for a list of accumulated requests."""
187 self._max_pipeline_depth = 0
188 self._requested_paths = []
189 self._processed_end = False
190 self._were_all_requests_http_1_1 = True
192 def QueueRequests(self, requested_paths, were_all_requests_http_1_1):
193 """Adds requests to the queue of requests.
196 requested_paths: (List of Strings) Requested paths.
198 self._requested_paths.extend(requested_paths)
199 self._were_all_requests_http_1_1 = were_all_requests_http_1_1
201 def Chunkify(self, data, chunksize):
202 """ Divides a string into chunks
204 return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)]
206 def BuildResponses(self):
207 """Converts the queue of requests into responses.
210 (String) Buffer containing all of the responses.
213 self._max_pipeline_depth = max(self._max_pipeline_depth,
214 len(self._requested_paths))
215 for path, headers in self._requested_paths:
216 if path == '/verifiedserver':
217 body = "WE ROOLZ: {}\r\n".format(os.getpid());
218 result += self._BuildResponse(
219 '200 OK', ['Server: Apache',
220 'Content-Length: {}'.format(len(body)),
221 'Cache-Control: no-store'], body)
223 elif path == '/alphabet.txt':
224 body = 'abcdefghijklmnopqrstuvwxyz'
225 result += self._BuildResponse(
226 '200 OK', ['Server: Apache',
227 'Content-Length: 26',
228 'Cache-Control: no-store'], body)
230 elif path == '/reverse.txt':
231 body = 'zyxwvutsrqponmlkjihgfedcba'
232 result += self._BuildResponse(
233 '200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body)
235 elif path == '/chunked.txt':
236 body = ('7\r\nchunked\r\n'
241 result += self._BuildResponse(
242 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'],
245 elif path == '/cached.txt':
246 body = 'azbycxdwevfugthsirjqkplomn'
247 result += self._BuildResponse(
248 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body)
250 elif path == '/connection_close.txt':
251 body = 'azbycxdwevfugthsirjqkplomn'
252 result += self._BuildResponse(
253 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body)
254 self._processed_end = True
256 elif path == '/1k.txt':
257 body = '0123456789abcdef' * 64
258 result += self._BuildResponse(
259 '200 OK', ['Server: Apache',
260 'Content-Length: 1024',
261 'Cache-Control: max-age=60'], body)
263 elif path == '/10k.txt':
264 body = '0123456789abcdef' * 640
265 result += self._BuildResponse(
266 '200 OK', ['Server: Apache',
267 'Content-Length: 10240',
268 'Cache-Control: max-age=60'], body)
270 elif path == '/100k.txt':
271 body = '0123456789abcdef' * 6400
272 result += self._BuildResponse(
275 'Content-Length: 102400',
276 'Cache-Control: max-age=60'],
279 elif path == '/100k_chunked.txt':
280 body = self.Chunkify('0123456789abcdef' * 6400, 20480)
281 body.append('0\r\n\r\n')
284 result += self._BuildResponse(
285 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body)
287 elif path == '/stats.txt':
289 'max_pipeline_depth': self._max_pipeline_depth,
290 'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1),
292 body = ','.join(['%s:%s' % (k, v) for k, v in results.items()])
293 result += self._BuildResponse(
295 ['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body)
296 self._processed_end = True
299 result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away')
300 if self._processed_end:
302 self._requested_paths = []
305 def WriteError(self, status, error):
306 """Returns an HTTP response for the specified error.
309 status: (String) Response code and descrtion (e.g. "404 Not Found")
312 (String) Text of HTTP response.
314 return self._BuildResponse(
315 status, ['Connection: close', 'Content-Type: text/plain'], error)
318 def processed_end(self):
319 return self._processed_end
321 def _BuildResponse(self, status, headers, body):
322 """Builds an HTTP response.
325 status: (String) Response code and descrtion (e.g. "200 OK")
326 headers: (List of Strings) Headers (e.g. "Connection: close")
327 body: (String) Response body.
330 (String) Text of HTTP response.
332 return ('HTTP/1.1 %s\r\n'
335 '%s' % (status, '\r\n'.join(headers), body))
338 class PipelineRequestHandler(socketserver.BaseRequestHandler):
339 """Called on an incoming TCP connection."""
341 def _GetTimeUntilTimeout(self):
342 return self._start_time + TIMEOUT - time.time()
344 def _GetTimeUntilNextSend(self):
345 if not self._last_queued_time:
347 return self._last_queued_time + SEND_BUFFER_TIME - time.time()
350 self._request_parser = RequestParser()
351 self._response_builder = ResponseBuilder()
352 self._last_queued_time = 0
354 self._num_written = 0
355 self._send_buffer = ""
356 self._start_time = time.time()
358 while not self._response_builder.processed_end or self._send_buffer:
360 time_left = self._GetTimeUntilTimeout()
361 time_until_next_send = self._GetTimeUntilNextSend()
362 max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME
364 rlist, wlist, xlist = [], [], []
365 fileno = self.request.fileno()
366 if max_poll_time > 0:
368 if self._send_buffer:
370 rlist, wlist, xlist = select.select(rlist, wlist, xlist, max_poll_time)
372 if self._GetTimeUntilTimeout() <= 0:
375 if self._GetTimeUntilNextSend() <= 0:
376 self._send_buffer += self._response_builder.BuildResponses()
377 self._num_written = self._num_queued
378 self._last_queued_time = 0
381 self.request.setblocking(False)
382 new_data = self.request.recv(MAX_REQUEST_SIZE)
383 self.request.setblocking(True)
386 new_requests = self._request_parser.ParseAdditionalData(new_data)
387 self._response_builder.QueueRequests(
388 new_requests, self._request_parser.were_all_requests_http_1_1)
389 self._num_queued += len(new_requests)
390 self._last_queued_time = time.time()
391 elif fileno in wlist:
392 num_bytes_sent = self.request.send(self._send_buffer[0:4096])
393 self._send_buffer = self._send_buffer[num_bytes_sent:]
396 except RequestTooLargeError as e:
397 self.request.send(self._response_builder.WriteError(
398 '413 Request Entity Too Large', e))
400 except UnexpectedMethodError as e:
401 self.request.send(self._response_builder.WriteError(
402 '405 Method Not Allowed', e))
404 except ServeIndexError:
405 self.request.send(self._response_builder.WriteError(
406 '200 OK', INFO_MESSAGE))
407 except Exception as e:
412 class PipelineServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
416 parser = argparse.ArgumentParser()
417 parser.add_argument("--port", action="store", default=0,
418 type=int, help="port to listen on")
419 parser.add_argument("--verbose", action="store", default=0,
420 type=int, help="verbose output")
421 parser.add_argument("--pidfile", action="store", default=0,
422 help="file name for the PID")
423 parser.add_argument("--logfile", action="store", default=0,
424 help="file name for the log")
425 parser.add_argument("--srcdir", action="store", default=0,
426 help="test directory")
427 parser.add_argument("--id", action="store", default=0,
429 parser.add_argument("--ipv4", action="store_true", default=0,
431 args = parser.parse_args()
435 f = open(args.pidfile, 'w')
436 f.write('{}'.format(pid))
439 server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler)
440 server.allow_reuse_address = True
441 server.serve_forever()