• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright 2015 Google Inc. All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#      http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16
17import httparchive
18import httplib
19import httpproxy
20import threading
21import unittest
22import util
23
24
25class MockCustomResponseHandler(object):
26  def __init__(self, response):
27    """
28    Args:
29      response: An instance of ArchivedHttpResponse that is returned for each
30      request.
31    """
32    self._response = response
33
34  def handle(self, request):
35    del request
36    return self._response
37
38
39class MockHttpArchiveFetch(object):
40  def __init__(self):
41    self.is_record_mode = False
42
43  def __call__(self, request):
44    return None
45
46
47class MockHttpArchiveHandler(httpproxy.HttpArchiveHandler):
48  def handle_one_request(self):
49    httpproxy.HttpArchiveHandler.handle_one_request(self)
50    HttpProxyTest.HANDLED_REQUEST_COUNT += 1
51
52
53class MockRules(object):
54  def Find(self, unused_rule_type_name):  # pylint: disable=unused-argument
55    return lambda unused_request, unused_response: None
56
57
58class HttpProxyTest(unittest.TestCase):
59  def setUp(self):
60    self.has_proxy_server_bound_port = False
61    self.has_proxy_server_started = False
62
63  def set_up_proxy_server(self, response):
64    """
65    Args:
66      response: An instance of ArchivedHttpResponse that is returned for each
67      request.
68    """
69    HttpProxyTest.HANDLED_REQUEST_COUNT = 0
70    self.host = 'localhost'
71    self.port = 8889
72    custom_handlers = MockCustomResponseHandler(response)
73    rules = MockRules()
74    http_archive_fetch = MockHttpArchiveFetch()
75    self.proxy_server = httpproxy.HttpProxyServer(
76        http_archive_fetch, custom_handlers, rules,
77        host=self.host, port=self.port)
78    self.proxy_server.RequestHandlerClass = MockHttpArchiveHandler
79    self.has_proxy_server_bound_port = True
80
81  def tearDown(self):
82    if self.has_proxy_server_started:
83      self.proxy_server.shutdown()
84    if self.has_proxy_server_bound_port:
85      self.proxy_server.server_close()
86
87  def serve_requests_forever(self):
88    self.has_proxy_server_started = True
89    self.proxy_server.serve_forever(poll_interval=0.01)
90
91  # Tests that handle_one_request does not leak threads, and does not try to
92  # re-handle connections that are finished.
93  def test_handle_one_request_closes_connection(self):
94    # By default, BaseHTTPServer.py treats all HTTP 1.1 requests as keep-alive.
95    # Intentionally use HTTP 1.0 to prevent this behavior.
96    response = httparchive.ArchivedHttpResponse(
97        version=10, status=200, reason="OK",
98        headers=[], response_data=["bat1"])
99    self.set_up_proxy_server(response)
100    t = threading.Thread(
101        target=HttpProxyTest.serve_requests_forever, args=(self,))
102    t.start()
103
104    initial_thread_count = threading.activeCount()
105
106    # Make a bunch of requests.
107    request_count = 10
108    for _ in range(request_count):
109      conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
110      conn.request("GET", "/index.html")
111      res = conn.getresponse().read()
112      self.assertEqual(res, "bat1")
113      conn.close()
114
115    # Check to make sure that there is no leaked thread.
116    util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 2)
117
118    self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
119
120
121  # Tests that keep-alive header works.
122  def test_keep_alive_header(self):
123    response = httparchive.ArchivedHttpResponse(
124        version=11, status=200, reason="OK",
125        headers=[("Connection", "keep-alive")], response_data=["bat1"])
126    self.set_up_proxy_server(response)
127    t = threading.Thread(
128        target=HttpProxyTest.serve_requests_forever, args=(self,))
129    t.start()
130
131    initial_thread_count = threading.activeCount()
132
133    # Make a bunch of requests.
134    request_count = 10
135    connections = []
136    for _ in range(request_count):
137      conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
138      conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
139      res = conn.getresponse().read()
140      self.assertEqual(res, "bat1")
141      connections.append(conn)
142
143    # Repeat the same requests.
144    for conn in connections:
145      conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
146      res = conn.getresponse().read()
147      self.assertEqual(res, "bat1")
148
149    # Check that the right number of requests have been handled.
150    self.assertEqual(2 * request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
151
152    # Check to make sure that exactly "request_count" new threads are active.
153    self.assertEqual(
154        threading.activeCount(), initial_thread_count + request_count)
155
156    for conn in connections:
157      conn.close()
158
159    util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 1)
160
161  # Test that opening 400 simultaneous connections does not cause httpproxy to
162  # hit a process fd limit. The default limit is 256 fds.
163  def test_max_fd(self):
164    response = httparchive.ArchivedHttpResponse(
165        version=11, status=200, reason="OK",
166        headers=[("Connection", "keep-alive")], response_data=["bat1"])
167    self.set_up_proxy_server(response)
168    t = threading.Thread(
169        target=HttpProxyTest.serve_requests_forever, args=(self,))
170    t.start()
171
172    # Make a bunch of requests.
173    request_count = 400
174    connections = []
175    for _ in range(request_count):
176      conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
177      conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
178      res = conn.getresponse().read()
179      self.assertEqual(res, "bat1")
180      connections.append(conn)
181
182    # Check that the right number of requests have been handled.
183    self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
184
185    for conn in connections:
186      conn.close()
187
188if __name__ == '__main__':
189  unittest.main()
190