1# Copyright (c) 2011 The Chromium Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5"""SiteCompare command to invoke the same page in two versions of a browser. 6 7Does the easiest compatibility test: equality comparison between two different 8versions of the same browser. Invoked with a series of command line options 9that specify which URLs to check, which browser to use, where to store results, 10etc. 11""" 12 13import os # Functions for walking the directory tree 14import tempfile # Get a temporary directory to hold intermediates 15 16import command_line 17import drivers # Functions for driving keyboard/mouse/windows, OS-specific 18import operators # Functions that, given two bitmaps as input, produce 19 # output depending on the performance of an operation 20import scrapers # Functions that know how to capture a render from 21 # particular browsers 22 23 24def CreateCommand(cmdline): 25 """Inserts the command and arguments into a command line for parsing.""" 26 cmd = cmdline.AddCommand( 27 ["compare2"], 28 "Compares the output of two browsers on the same URL or list of URLs", 29 ValidateCompare2, 30 ExecuteCompare2) 31 32 cmd.AddArgument( 33 ["-b1", "--browser1"], "Full path to first browser's executable", 34 type="readfile", metaname="PATH", required=True) 35 cmd.AddArgument( 36 ["-b2", "--browser2"], "Full path to second browser's executable", 37 type="readfile", metaname="PATH", required=True) 38 cmd.AddArgument( 39 ["-b", "--browser"], "Which browser to use", type="string", 40 default="chrome") 41 cmd.AddArgument( 42 ["-b1v", "--browser1ver"], "Version of first browser", metaname="VERSION") 43 cmd.AddArgument( 44 ["-b2v", "--browser2ver"], "Version of second browser", metaname="VERSION") 45 cmd.AddArgument( 46 ["-b1n", "--browser1name"], "Optional name for first browser (used in " 47 "directory to hold intermediate files)", metaname="NAME") 48 cmd.AddArgument( 49 ["-b2n", "--browser2name"], "Optional name for second browser (used in " 50 "directory to hold intermediate files)", metaname="NAME") 51 cmd.AddArgument( 52 ["-o", "--outdir"], "Directory to store scrape files", metaname="DIR") 53 cmd.AddArgument( 54 ["-u", "--url"], "URL to compare") 55 cmd.AddArgument( 56 ["-l", "--list"], "List of URLs to compare", type="readfile") 57 cmd.AddMutualExclusion(["--url", "--list"]) 58 cmd.AddArgument( 59 ["-s", "--startline"], "First line of URL list", type="int") 60 cmd.AddArgument( 61 ["-e", "--endline"], "Last line of URL list (exclusive)", type="int") 62 cmd.AddArgument( 63 ["-c", "--count"], "Number of lines of URL file to use", type="int") 64 cmd.AddDependency("--startline", "--list") 65 cmd.AddRequiredGroup(["--url", "--list"]) 66 cmd.AddDependency("--endline", "--list") 67 cmd.AddDependency("--count", "--list") 68 cmd.AddMutualExclusion(["--count", "--endline"]) 69 cmd.AddDependency("--count", "--startline") 70 cmd.AddArgument( 71 ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to " 72 "finish loading", 73 type="int", default=60) 74 cmd.AddArgument( 75 ["-log", "--logfile"], "File to write output", type="string", required=True) 76 cmd.AddArgument( 77 ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords") 78 cmd.AddArgument( 79 ["-m", "--maskdir"], "Path that holds masks to use for comparison") 80 cmd.AddArgument( 81 ["-d", "--diffdir"], "Path to hold the difference of comparisons that fail") 82 83 84def ValidateCompare2(command): 85 """Validate the arguments to compare2. Raises ParseError if failed.""" 86 executables = [".exe", ".com", ".bat"] 87 if (os.path.splitext(command["--browser1"])[1].lower() not in executables or 88 os.path.splitext(command["--browser2"])[1].lower() not in executables): 89 raise command_line.ParseError("Browser filename must be an executable") 90 91 92def ExecuteCompare2(command): 93 """Executes the Compare2 command.""" 94 if command["--url"]: 95 url_list = [command["--url"]] 96 else: 97 startline = command["--startline"] 98 if command["--count"]: 99 endline = startline+command["--count"] 100 else: 101 endline = command["--endline"] 102 url_list = [url.strip() for url in 103 open(command["--list"], "r").readlines()[startline:endline]] 104 105 log_file = open(command["--logfile"], "w") 106 107 outdir = command["--outdir"] 108 if not outdir: outdir = tempfile.gettempdir() 109 110 scrape_info_list = [] 111 112 class ScrapeInfo(object): 113 """Helper class to hold information about a scrape.""" 114 __slots__ = ["browser_path", "scraper", "outdir", "result"] 115 116 for index in xrange(1, 3): 117 scrape_info = ScrapeInfo() 118 scrape_info.browser_path = command["--browser%d" % index] 119 scrape_info.scraper = scrapers.GetScraper( 120 (command["--browser"], command["--browser%dver" % index])) 121 122 if command["--browser%dname" % index]: 123 scrape_info.outdir = os.path.join(outdir, 124 command["--browser%dname" % index]) 125 else: 126 scrape_info.outdir = os.path.join(outdir, str(index)) 127 128 drivers.windowing.PreparePath(scrape_info.outdir) 129 scrape_info_list.append(scrape_info) 130 131 compare = operators.GetOperator("equals_with_mask") 132 133 for url in url_list: 134 success = True 135 136 for scrape_info in scrape_info_list: 137 scrape_info.result = scrape_info.scraper.Scrape( 138 [url], scrape_info.outdir, command["--size"], (0, 0), 139 command["--timeout"], path=scrape_info.browser_path) 140 141 if not scrape_info.result: 142 scrape_info.result = "success" 143 else: 144 success = False 145 146 result = "unknown" 147 148 if success: 149 result = "equal" 150 151 file1 = drivers.windowing.URLtoFilename( 152 url, scrape_info_list[0].outdir, ".bmp") 153 file2 = drivers.windowing.URLtoFilename( 154 url, scrape_info_list[1].outdir, ".bmp") 155 156 comparison_result = compare.Compare(file1, file2, 157 maskdir=command["--maskdir"]) 158 159 if comparison_result is not None: 160 result = "not-equal" 161 162 if command["--diffdir"]: 163 comparison_result[1].save( 164 drivers.windowing.URLtoFilename(url, command["--diffdir"], ".bmp")) 165 166 # TODO(jhaas): maybe use the logging module rather than raw file writes 167 log_file.write("%s %s %s %s\n" % (url, 168 scrape_info_list[0].result, 169 scrape_info_list[1].result, 170 result)) 171