1#!/usr/bin/env python 2# Copyright 2013 The Chromium Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6# Run build_server so that files needed by tests are copied to the local 7# third_party directory. 8import build_server 9build_server.main() 10 11import json 12import optparse 13import os 14import posixpath 15import sys 16import time 17import unittest 18 19from branch_utility import BranchUtility 20from chroot_file_system import ChrootFileSystem 21from extensions_paths import EXTENSIONS, PUBLIC_TEMPLATES 22from fake_fetchers import ConfigureFakeFetchers 23from handler import Handler 24from link_error_detector import LinkErrorDetector, StringifyBrokenLinks 25from local_file_system import LocalFileSystem 26from local_renderer import LocalRenderer 27from servlet import Request 28from test_util import EnableLogging, DisableLogging, ChromiumPath 29 30# Arguments set up if __main__ specifies them. 31_EXPLICIT_TEST_FILES = None 32_REBASE = False 33_VERBOSE = False 34 35 36def _ToPosixPath(os_path): 37 return os_path.replace(os.sep, '/') 38 39def _GetPublicFiles(): 40 '''Gets all public files mapped to their contents. 41 ''' 42 public_path = ChromiumPath(PUBLIC_TEMPLATES) 43 public_files = {} 44 for path, dirs, files in os.walk(public_path, topdown=True): 45 dirs[:] = [d for d in dirs if d != '.svn'] 46 relative_posix_path = _ToPosixPath(path[len(public_path):]) 47 for filename in files: 48 with open(os.path.join(path, filename), 'r') as f: 49 public_files['/'.join((relative_posix_path, filename))] = f.read() 50 return public_files 51 52class IntegrationTest(unittest.TestCase): 53 def setUp(self): 54 ConfigureFakeFetchers() 55 56 @EnableLogging('info') 57 def testCronAndPublicFiles(self): 58 '''Runs cron then requests every public file. Cron needs to be run first 59 because the public file requests are offline. 60 ''' 61 if _EXPLICIT_TEST_FILES is not None: 62 return 63 64 print('Running cron...') 65 start_time = time.time() 66 try: 67 response = Handler(Request.ForTest('/_cron')).Get() 68 self.assertEqual(200, response.status) 69 self.assertEqual('Success', response.content.ToString()) 70 finally: 71 print('Took %s seconds' % (time.time() - start_time)) 72 73 print("Checking for broken links...") 74 start_time = time.time() 75 link_error_detector = LinkErrorDetector( 76 # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix. 77 ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS), 78 lambda path: Handler(Request.ForTest(path)).Get(), 79 'templates/public', 80 ('extensions/index.html', 'apps/about_apps.html')) 81 82 broken_links = link_error_detector.GetBrokenLinks() 83 if broken_links and _VERBOSE: 84 print('The broken links are:') 85 print(StringifyBrokenLinks(broken_links)) 86 87 broken_links_set = set(broken_links) 88 89 known_broken_links_path = os.path.join( 90 sys.path[0], 'known_broken_links.json') 91 try: 92 with open(known_broken_links_path, 'r') as f: 93 # The JSON file converts tuples and sets into lists, and for this 94 # set union/difference logic they need to be converted back. 95 known_broken_links = set(tuple(item) for item in json.load(f)) 96 except IOError: 97 known_broken_links = set() 98 99 newly_broken_links = broken_links_set - known_broken_links 100 fixed_links = known_broken_links - broken_links_set 101 102 if _REBASE: 103 print('Rebasing broken links with %s newly broken and %s fixed links.' % 104 (len(newly_broken_links), len(fixed_links))) 105 with open(known_broken_links_path, 'w') as f: 106 json.dump(broken_links, f, 107 indent=2, separators=(',', ': '), sort_keys=True) 108 else: 109 if fixed_links or newly_broken_links: 110 print('Found %s broken links, and some have changed. ' 111 'If this is acceptable or expected then run %s with the --rebase ' 112 'option.' % (len(broken_links), os.path.split(__file__)[-1])) 113 elif broken_links: 114 print('Found %s broken links, but there were no changes.' % 115 len(broken_links)) 116 if fixed_links: 117 print('%s broken links have been fixed:' % len(fixed_links)) 118 print(StringifyBrokenLinks(fixed_links)) 119 if newly_broken_links: 120 print('There are %s new broken links:' % len(newly_broken_links)) 121 print(StringifyBrokenLinks(newly_broken_links)) 122 self.fail('See logging for details.') 123 124 print('Took %s seconds.' % (time.time() - start_time)) 125 126 print('Searching for orphaned pages...') 127 start_time = time.time() 128 orphaned_pages = link_error_detector.GetOrphanedPages() 129 if orphaned_pages: 130 # TODO(jshumway): Test should fail when orphaned pages are detected. 131 print('Warning: Found %d orphaned pages:' % len(orphaned_pages)) 132 for page in orphaned_pages: 133 print(page) 134 print('Took %s seconds.' % (time.time() - start_time)) 135 136 public_files = _GetPublicFiles() 137 138 print('Rendering %s public files...' % len(public_files.keys())) 139 start_time = time.time() 140 try: 141 for path, content in public_files.iteritems(): 142 if path.endswith('redirects.json'): 143 continue 144 def check_result(response): 145 self.assertEqual(200, response.status, 146 'Got %s when rendering %s' % (response.status, path)) 147 # This is reaaaaally rough since usually these will be tiny templates 148 # that render large files. At least it'll catch zero-length responses. 149 self.assertTrue(len(response.content) >= len(content), 150 'Content was "%s" when rendering %s' % (response.content, path)) 151 152 check_result(Handler(Request.ForTest(path)).Get()) 153 154 # Make sure that leaving out the .html will temporarily redirect to the 155 # path with the .html. 156 if path.startswith(('apps/', 'extensions/')): 157 redirect_result = Handler( 158 Request.ForTest(posixpath.splitext(path)[0])).Get() 159 self.assertEqual((path, False), redirect_result.GetRedirect()) 160 161 # Make sure including a channel will permanently redirect to the same 162 # path without a channel. 163 for channel in BranchUtility.GetAllChannelNames(): 164 redirect_result = Handler( 165 Request.ForTest('%s/%s' % (channel, path))).Get() 166 self.assertEqual((path, True), redirect_result.GetRedirect()) 167 168 # Samples are internationalized, test some locales. 169 if path.endswith('/samples.html'): 170 for lang in ['en-US', 'es', 'ar']: 171 check_result(Handler(Request.ForTest( 172 path, 173 headers={'Accept-Language': '%s;q=0.8' % lang})).Get()) 174 finally: 175 print('Took %s seconds' % (time.time() - start_time)) 176 177 # TODO(kalman): Move this test elsewhere, it's not an integration test. 178 # Perhaps like "presubmit_tests" or something. 179 def testExplicitFiles(self): 180 '''Tests just the files in _EXPLICIT_TEST_FILES. 181 ''' 182 if _EXPLICIT_TEST_FILES is None: 183 return 184 for filename in _EXPLICIT_TEST_FILES: 185 print('Rendering %s...' % filename) 186 start_time = time.time() 187 try: 188 response = LocalRenderer.Render(_ToPosixPath(filename)) 189 self.assertEqual(200, response.status) 190 self.assertTrue(response.content != '') 191 finally: 192 print('Took %s seconds' % (time.time() - start_time)) 193 194 # TODO(jshumway): Check page for broken links (currently prohibited by the 195 # time it takes to render the pages). 196 197 @DisableLogging('warning') 198 def testFileNotFound(self): 199 response = Handler(Request.ForTest('/extensions/notfound.html')).Get() 200 self.assertEqual(404, response.status) 201 202if __name__ == '__main__': 203 parser = optparse.OptionParser() 204 parser.add_option('-a', '--all', action='store_true', default=False, 205 help='Render all pages, not just the one specified') 206 parser.add_option('-r', '--rebase', action='store_true', default=False, 207 help='Rewrites the known_broken_links.json file with ' 208 'the current set of broken links') 209 parser.add_option('-v', '--verbose', action='store_true', default=False, 210 help='Show verbose output like currently broken links') 211 (opts, args) = parser.parse_args() 212 if not opts.all: 213 _EXPLICIT_TEST_FILES = args 214 _REBASE = opts.rebase 215 _VERBOSE = opts.verbose 216 # Kill sys.argv because we have our own flags. 217 sys.argv = [sys.argv[0]] 218 unittest.main() 219