00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020 try:
00021 import socketserver
00022 except:
00023 import SocketServer as socketserver
00024 import argparse
00025 import re
00026 import select
00027 import socket
00028 import time
00029 import pprint
00030 import os
00031
00032 INFO_MESSAGE = '''
00033 This is a test server to test the libcurl pipelining functionality.
00034 It is a modified version if Google's HTTP pipelining test server. More
00035 information can be found here:
00036
00037 https://dev.chromium.org/developers/design-documents/network-stack/http-pipelining
00038
00039 Source code can be found here:
00040
00041 https://code.google.com/archive/p/http-pipelining-test/
00042 '''
00043 MAX_REQUEST_SIZE = 1024
00044 MIN_POLL_TIME = 0.01
00045
00046
00047 SEND_BUFFER_TIME = 0.5
00048 TIMEOUT = 30
00049
00050
00051 class Error(Exception):
00052 pass
00053
00054
00055 class RequestTooLargeError(Error):
00056 pass
00057
00058
00059 class ServeIndexError(Error):
00060 pass
00061
00062
00063 class UnexpectedMethodError(Error):
00064 pass
00065
00066
00067 class RequestParser(object):
00068 """Parses an input buffer looking for HTTP GET requests."""
00069
00070 global logfile
00071
00072 LOOKING_FOR_GET = 1
00073 READING_HEADERS = 2
00074
00075 HEADER_RE = re.compile('([^:]+):(.*)\n')
00076 REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n')
00077
00078 def __init__(self):
00079 """Initializer."""
00080 self._buffer = ""
00081 self._pending_headers = {}
00082 self._pending_request = ""
00083 self._state = self.LOOKING_FOR_GET
00084 self._were_all_requests_http_1_1 = True
00085 self._valid_requests = []
00086
00087 def ParseAdditionalData(self, data):
00088 """Finds HTTP requests in |data|.
00089
00090 Args:
00091 data: (String) Newly received input data from the socket.
00092
00093 Returns:
00094 (List of Tuples)
00095 (String) The request path.
00096 (Map of String to String) The header name and value.
00097
00098 Raises:
00099 RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE.
00100 UnexpectedMethodError: On a non-GET method.
00101 Error: On a programming error.
00102 """
00103 logfile = open('log/server.input', 'a')
00104 logfile.write(data)
00105 logfile.close()
00106 self._buffer += data.replace('\r', '')
00107 should_continue_parsing = True
00108 while should_continue_parsing:
00109 if self._state == self.LOOKING_FOR_GET:
00110 should_continue_parsing = self._DoLookForGet()
00111 elif self._state == self.READING_HEADERS:
00112 should_continue_parsing = self._DoReadHeader()
00113 else:
00114 raise Error('Unexpected state: ' + self._state)
00115 if len(self._buffer) > MAX_REQUEST_SIZE:
00116 raise RequestTooLargeError(
00117 'Request is at least %d bytes' % len(self._buffer))
00118 valid_requests = self._valid_requests
00119 self._valid_requests = []
00120 return valid_requests
00121
00122 @property
00123 def were_all_requests_http_1_1(self):
00124 return self._were_all_requests_http_1_1
00125
00126 def _DoLookForGet(self):
00127 """Tries to parse an HTTTP request line.
00128
00129 Returns:
00130 (Boolean) True if a request was found.
00131
00132 Raises:
00133 UnexpectedMethodError: On a non-GET method.
00134 """
00135 m = self.REQUEST_RE.match(self._buffer)
00136 if not m:
00137 return False
00138 method, path, http_major, http_minor = m.groups()
00139
00140 if method != 'GET':
00141 raise UnexpectedMethodError('Unexpected method: ' + method)
00142 if path in ['/', '/index.htm', '/index.html']:
00143 raise ServeIndexError()
00144
00145 if http_major != '1' or http_minor != '1':
00146 self._were_all_requests_http_1_1 = False
00147
00148
00149
00150 self._pending_request = path
00151 self._buffer = self._buffer[m.end():]
00152 self._state = self.READING_HEADERS
00153 return True
00154
00155 def _DoReadHeader(self):
00156 """Tries to parse a HTTP header.
00157
00158 Returns:
00159 (Boolean) True if it found the end of the request or a HTTP header.
00160 """
00161 if self._buffer.startswith('\n'):
00162 self._buffer = self._buffer[1:]
00163 self._state = self.LOOKING_FOR_GET
00164 self._valid_requests.append((self._pending_request,
00165 self._pending_headers))
00166 self._pending_headers = {}
00167 self._pending_request = ""
00168 return True
00169
00170 m = self.HEADER_RE.match(self._buffer)
00171 if not m:
00172 return False
00173
00174 header = m.group(1).lower()
00175 value = m.group(2).strip().lower()
00176 if header not in self._pending_headers:
00177 self._pending_headers[header] = value
00178 self._buffer = self._buffer[m.end():]
00179 return True
00180
00181
00182 class ResponseBuilder(object):
00183 """Builds HTTP responses for a list of accumulated requests."""
00184
00185 def __init__(self):
00186 """Initializer."""
00187 self._max_pipeline_depth = 0
00188 self._requested_paths = []
00189 self._processed_end = False
00190 self._were_all_requests_http_1_1 = True
00191
00192 def QueueRequests(self, requested_paths, were_all_requests_http_1_1):
00193 """Adds requests to the queue of requests.
00194
00195 Args:
00196 requested_paths: (List of Strings) Requested paths.
00197 """
00198 self._requested_paths.extend(requested_paths)
00199 self._were_all_requests_http_1_1 = were_all_requests_http_1_1
00200
00201 def Chunkify(self, data, chunksize):
00202 """ Divides a string into chunks
00203 """
00204 return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)]
00205
00206 def BuildResponses(self):
00207 """Converts the queue of requests into responses.
00208
00209 Returns:
00210 (String) Buffer containing all of the responses.
00211 """
00212 result = ""
00213 self._max_pipeline_depth = max(self._max_pipeline_depth,
00214 len(self._requested_paths))
00215 for path, headers in self._requested_paths:
00216 if path == '/verifiedserver':
00217 body = "WE ROOLZ: {}\r\n".format(os.getpid());
00218 result += self._BuildResponse(
00219 '200 OK', ['Server: Apache',
00220 'Content-Length: {}'.format(len(body)),
00221 'Cache-Control: no-store'], body)
00222
00223 elif path == '/alphabet.txt':
00224 body = 'abcdefghijklmnopqrstuvwxyz'
00225 result += self._BuildResponse(
00226 '200 OK', ['Server: Apache',
00227 'Content-Length: 26',
00228 'Cache-Control: no-store'], body)
00229
00230 elif path == '/reverse.txt':
00231 body = 'zyxwvutsrqponmlkjihgfedcba'
00232 result += self._BuildResponse(
00233 '200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body)
00234
00235 elif path == '/chunked.txt':
00236 body = ('7\r\nchunked\r\n'
00237 '8\r\nencoding\r\n'
00238 '2\r\nis\r\n'
00239 '3\r\nfun\r\n'
00240 '0\r\n\r\n')
00241 result += self._BuildResponse(
00242 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'],
00243 body)
00244
00245 elif path == '/cached.txt':
00246 body = 'azbycxdwevfugthsirjqkplomn'
00247 result += self._BuildResponse(
00248 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body)
00249
00250 elif path == '/connection_close.txt':
00251 body = 'azbycxdwevfugthsirjqkplomn'
00252 result += self._BuildResponse(
00253 '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body)
00254 self._processed_end = True
00255
00256 elif path == '/1k.txt':
00257 body = '0123456789abcdef' * 64
00258 result += self._BuildResponse(
00259 '200 OK', ['Server: Apache',
00260 'Content-Length: 1024',
00261 'Cache-Control: max-age=60'], body)
00262
00263 elif path == '/10k.txt':
00264 body = '0123456789abcdef' * 640
00265 result += self._BuildResponse(
00266 '200 OK', ['Server: Apache',
00267 'Content-Length: 10240',
00268 'Cache-Control: max-age=60'], body)
00269
00270 elif path == '/100k.txt':
00271 body = '0123456789abcdef' * 6400
00272 result += self._BuildResponse(
00273 '200 OK',
00274 ['Server: Apache',
00275 'Content-Length: 102400',
00276 'Cache-Control: max-age=60'],
00277 body)
00278
00279 elif path == '/100k_chunked.txt':
00280 body = self.Chunkify('0123456789abcdef' * 6400, 20480)
00281 body.append('0\r\n\r\n')
00282 body = ''.join(body)
00283
00284 result += self._BuildResponse(
00285 '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body)
00286
00287 elif path == '/stats.txt':
00288 results = {
00289 'max_pipeline_depth': self._max_pipeline_depth,
00290 'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1),
00291 }
00292 body = ','.join(['%s:%s' % (k, v) for k, v in results.items()])
00293 result += self._BuildResponse(
00294 '200 OK',
00295 ['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body)
00296 self._processed_end = True
00297
00298 else:
00299 result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away')
00300 if self._processed_end:
00301 break
00302 self._requested_paths = []
00303 return result
00304
00305 def WriteError(self, status, error):
00306 """Returns an HTTP response for the specified error.
00307
00308 Args:
00309 status: (String) Response code and descrtion (e.g. "404 Not Found")
00310
00311 Returns:
00312 (String) Text of HTTP response.
00313 """
00314 return self._BuildResponse(
00315 status, ['Connection: close', 'Content-Type: text/plain'], error)
00316
00317 @property
00318 def processed_end(self):
00319 return self._processed_end
00320
00321 def _BuildResponse(self, status, headers, body):
00322 """Builds an HTTP response.
00323
00324 Args:
00325 status: (String) Response code and descrtion (e.g. "200 OK")
00326 headers: (List of Strings) Headers (e.g. "Connection: close")
00327 body: (String) Response body.
00328
00329 Returns:
00330 (String) Text of HTTP response.
00331 """
00332 return ('HTTP/1.1 %s\r\n'
00333 '%s\r\n'
00334 '\r\n'
00335 '%s' % (status, '\r\n'.join(headers), body))
00336
00337
00338 class PipelineRequestHandler(socketserver.BaseRequestHandler):
00339 """Called on an incoming TCP connection."""
00340
00341 def _GetTimeUntilTimeout(self):
00342 return self._start_time + TIMEOUT - time.time()
00343
00344 def _GetTimeUntilNextSend(self):
00345 if not self._last_queued_time:
00346 return TIMEOUT
00347 return self._last_queued_time + SEND_BUFFER_TIME - time.time()
00348
00349 def handle(self):
00350 self._request_parser = RequestParser()
00351 self._response_builder = ResponseBuilder()
00352 self._last_queued_time = 0
00353 self._num_queued = 0
00354 self._num_written = 0
00355 self._send_buffer = ""
00356 self._start_time = time.time()
00357 try:
00358 while not self._response_builder.processed_end or self._send_buffer:
00359
00360 time_left = self._GetTimeUntilTimeout()
00361 time_until_next_send = self._GetTimeUntilNextSend()
00362 max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME
00363
00364 rlist, wlist, xlist = [], [], []
00365 fileno = self.request.fileno()
00366 if max_poll_time > 0:
00367 rlist.append(fileno)
00368 if self._send_buffer:
00369 wlist.append(fileno)
00370 rlist, wlist, xlist = select.select(rlist, wlist, xlist, max_poll_time)
00371
00372 if self._GetTimeUntilTimeout() <= 0:
00373 return
00374
00375 if self._GetTimeUntilNextSend() <= 0:
00376 self._send_buffer += self._response_builder.BuildResponses()
00377 self._num_written = self._num_queued
00378 self._last_queued_time = 0
00379
00380 if fileno in rlist:
00381 self.request.setblocking(False)
00382 new_data = self.request.recv(MAX_REQUEST_SIZE)
00383 self.request.setblocking(True)
00384 if not new_data:
00385 return
00386 new_requests = self._request_parser.ParseAdditionalData(new_data)
00387 self._response_builder.QueueRequests(
00388 new_requests, self._request_parser.were_all_requests_http_1_1)
00389 self._num_queued += len(new_requests)
00390 self._last_queued_time = time.time()
00391 elif fileno in wlist:
00392 num_bytes_sent = self.request.send(self._send_buffer[0:4096])
00393 self._send_buffer = self._send_buffer[num_bytes_sent:]
00394 time.sleep(0.05)
00395
00396 except RequestTooLargeError as e:
00397 self.request.send(self._response_builder.WriteError(
00398 '413 Request Entity Too Large', e))
00399 raise
00400 except UnexpectedMethodError as e:
00401 self.request.send(self._response_builder.WriteError(
00402 '405 Method Not Allowed', e))
00403 raise
00404 except ServeIndexError:
00405 self.request.send(self._response_builder.WriteError(
00406 '200 OK', INFO_MESSAGE))
00407 except Exception as e:
00408 print(e)
00409 self.request.close()
00410
00411
00412 class PipelineServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
00413 pass
00414
00415
00416 parser = argparse.ArgumentParser()
00417 parser.add_argument("--port", action="store", default=0,
00418 type=int, help="port to listen on")
00419 parser.add_argument("--verbose", action="store", default=0,
00420 type=int, help="verbose output")
00421 parser.add_argument("--pidfile", action="store", default=0,
00422 help="file name for the PID")
00423 parser.add_argument("--logfile", action="store", default=0,
00424 help="file name for the log")
00425 parser.add_argument("--srcdir", action="store", default=0,
00426 help="test directory")
00427 parser.add_argument("--id", action="store", default=0,
00428 help="server ID")
00429 parser.add_argument("--ipv4", action="store_true", default=0,
00430 help="IPv4 flag")
00431 args = parser.parse_args()
00432
00433 if args.pidfile:
00434 pid = os.getpid()
00435 f = open(args.pidfile, 'w')
00436 f.write('{}'.format(pid))
00437 f.close()
00438
00439 server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler)
00440 server.allow_reuse_address = True
00441 server.serve_forever()