1# Please keep this code python 2.4 compatible and stand alone. 2 3import logging, os, shutil, sys, tempfile, time, urllib2 4import subprocess, re 5from distutils.version import LooseVersion 6 7from autotest_lib.client.common_lib import autotemp, revision_control, utils 8 9_READ_SIZE = 64*1024 10_MAX_PACKAGE_SIZE = 100*1024*1024 11_CHROMEOS_MIRROR = ('http://commondatastorage.googleapis.com/' 12 'chromeos-mirror/gentoo/distfiles/') 13 14 15class Error(Exception): 16 """Local exception to be raised by code in this file.""" 17 18class FetchError(Error): 19 """Failed to fetch a package from any of its listed URLs.""" 20 21 22def _checksum_file(full_path): 23 """@returns The hex checksum of a file given its pathname.""" 24 inputfile = open(full_path, 'rb') 25 try: 26 hex_sum = utils.hash('sha1', inputfile.read()).hexdigest() 27 finally: 28 inputfile.close() 29 return hex_sum 30 31 32def system(commandline): 33 """Same as os.system(commandline) but logs the command first. 34 35 @param commandline: commandline to be called. 36 """ 37 logging.info(commandline) 38 return os.system(commandline) 39 40 41def find_top_of_autotest_tree(): 42 """@returns The full path to the top of the autotest directory tree.""" 43 dirname = os.path.dirname(__file__) 44 autotest_dir = os.path.abspath(os.path.join(dirname, '..')) 45 return autotest_dir 46 47 48class ExternalPackage(object): 49 """ 50 Defines an external package with URLs to fetch its sources from and 51 a build_and_install() method to unpack it, build it and install it 52 beneath our own autotest/site-packages directory. 53 54 Base Class. Subclass this to define packages. 55 Note: Unless your subclass has a specific reason to, it should not 56 re-install the package every time build_externals is invoked, as this 57 happens periodically through the scheduler. To avoid doing so the is_needed 58 method needs to return an appropriate value. 59 60 Attributes: 61 @attribute urls - A tuple of URLs to try fetching the package from. 62 @attribute local_filename - A local filename to use when saving the 63 fetched package. 64 @attribute dist_name - The name of the Python distribution. For example, 65 the package MySQLdb is included in the distribution named 66 MySQL-python. This is generally the PyPI name. Defaults to the 67 name part of the local_filename. 68 @attribute hex_sum - The hex digest (currently SHA1) of this package 69 to be used to verify its contents. 70 @attribute module_name - The installed python module name to be used for 71 for a version check. Defaults to the lower case class name with 72 the word Package stripped off. 73 @attribute extracted_package_path - The path to package directory after 74 extracting. 75 @attribute version - The desired minimum package version. 76 @attribute os_requirements - A dictionary mapping pathname tuples on the 77 the OS distribution to a likely name of a package the user 78 needs to install on their system in order to get this file. 79 One of the files in the tuple must exist. 80 @attribute name - Read only, the printable name of the package. 81 @attribute subclasses - This class attribute holds a list of all defined 82 subclasses. It is constructed dynamically using the metaclass. 83 """ 84 # Modules that are meant to be installed in system directory, rather than 85 # autotest/site-packages. These modules should be skipped if the module 86 # is already installed in system directory. This prevents an older version 87 # of the module from being installed in system directory. 88 SYSTEM_MODULES = ['setuptools'] 89 90 subclasses = [] 91 urls = () 92 local_filename = None 93 dist_name = None 94 hex_sum = None 95 module_name = None 96 version = None 97 os_requirements = None 98 99 100 class __metaclass__(type): 101 """Any time a subclass is defined, add it to our list.""" 102 def __init__(mcs, name, bases, dict): 103 if name != 'ExternalPackage' and not name.startswith('_'): 104 mcs.subclasses.append(mcs) 105 106 107 def __init__(self): 108 self.verified_package = '' 109 if not self.module_name: 110 self.module_name = self.name.lower() 111 if not self.dist_name and self.local_filename: 112 self.dist_name = self.local_filename[:self.local_filename.rindex('-')] 113 self.installed_version = '' 114 115 116 @property 117 def extracted_package_path(self): 118 """Return the package path after extracting. 119 120 If the package has assigned its own extracted_package_path, use it. 121 Or use part of its local_filename as the extracting path. 122 """ 123 return self.local_filename[:-len(self._get_extension( 124 self.local_filename))] 125 126 127 @property 128 def name(self): 129 """Return the class name with any trailing 'Package' stripped off.""" 130 class_name = self.__class__.__name__ 131 if class_name.endswith('Package'): 132 return class_name[:-len('Package')] 133 return class_name 134 135 136 def is_needed(self, install_dir): 137 """ 138 Check to see if we need to reinstall a package. This is contingent on: 139 1. Module name: If the name of the module is different from the package, 140 the class that installs it needs to specify a module_name string, 141 so we can try importing the module. 142 143 2. Installed version: If the module doesn't contain a __version__ the 144 class that installs it needs to override the 145 _get_installed_version_from_module method to return an appropriate 146 version string. 147 148 3. Version/Minimum version: The class that installs the package should 149 contain a version string, and an optional minimum version string. 150 151 4. install_dir: If the module exists in a different directory, e.g., 152 /usr/lib/python2.7/dist-packages/, the module will be forced to be 153 installed in install_dir. 154 155 @param install_dir: install directory. 156 @returns True if self.module_name needs to be built and installed. 157 """ 158 if not self.module_name or not self.version: 159 logging.warning('version and module_name required for ' 160 'is_needed() check to work.') 161 return True 162 try: 163 module = __import__(self.module_name) 164 except ImportError, e: 165 logging.info("%s isn't present. Will install.", self.module_name) 166 return True 167 # Check if we're getting a module installed somewhere else, 168 # e.g. on the system. 169 if self.module_name not in self.SYSTEM_MODULES: 170 if (hasattr(module, '__file__') 171 and not module.__file__.startswith(install_dir)): 172 path = module.__file__ 173 elif (hasattr(module, '__path__') 174 and module.__path__ 175 and not module.__path__[0].startswith(install_dir)): 176 path = module.__path__[0] 177 else: 178 logging.warning('module %s has no __file__ or __path__', 179 self.module_name) 180 return True 181 logging.info( 182 'Found %s installed in %s, installing our version in %s', 183 self.module_name, path, install_dir) 184 return True 185 self.installed_version = self._get_installed_version_from_module(module) 186 if not self.installed_version: 187 return True 188 189 logging.info('imported %s version %s.', self.module_name, 190 self.installed_version) 191 if hasattr(self, 'minimum_version'): 192 return LooseVersion(self.minimum_version) > LooseVersion( 193 self.installed_version) 194 else: 195 return LooseVersion(self.version) > LooseVersion( 196 self.installed_version) 197 198 199 def _get_installed_version_from_module(self, module): 200 """Ask our module its version string and return it or '' if unknown.""" 201 try: 202 return module.__version__ 203 except AttributeError: 204 logging.error('could not get version from %s', module) 205 return '' 206 207 208 def _build_and_install(self, install_dir): 209 """Subclasses MUST provide their own implementation.""" 210 raise NotImplementedError 211 212 213 def _build_and_install_current_dir(self, install_dir): 214 """ 215 Subclasses that use _build_and_install_from_package() MUST provide 216 their own implementation of this method. 217 """ 218 raise NotImplementedError 219 220 221 def build_and_install(self, install_dir): 222 """ 223 Builds and installs the package. It must have been fetched already. 224 225 @param install_dir - The package installation directory. If it does 226 not exist it will be created. 227 """ 228 if not self.verified_package: 229 raise Error('Must call fetch() first. - %s' % self.name) 230 self._check_os_requirements() 231 return self._build_and_install(install_dir) 232 233 234 def _check_os_requirements(self): 235 if not self.os_requirements: 236 return 237 failed = False 238 for file_names, package_name in self.os_requirements.iteritems(): 239 if not any(os.path.exists(file_name) for file_name in file_names): 240 failed = True 241 logging.error('Can\'t find %s, %s probably needs it.', 242 ' or '.join(file_names), self.name) 243 logging.error('Perhaps you need to install something similar ' 244 'to the %s package for OS first.', package_name) 245 if failed: 246 raise Error('Missing OS requirements for %s. (see above)' % 247 self.name) 248 249 250 def _build_and_install_current_dir_setup_py(self, install_dir): 251 """For use as a _build_and_install_current_dir implementation.""" 252 egg_path = self._build_egg_using_setup_py(setup_py='setup.py') 253 if not egg_path: 254 return False 255 return self._install_from_egg(install_dir, egg_path) 256 257 258 def _build_and_install_current_dir_setupegg_py(self, install_dir): 259 """For use as a _build_and_install_current_dir implementation.""" 260 egg_path = self._build_egg_using_setup_py(setup_py='setupegg.py') 261 if not egg_path: 262 return False 263 return self._install_from_egg(install_dir, egg_path) 264 265 266 def _build_and_install_current_dir_noegg(self, install_dir): 267 if not self._build_using_setup_py(): 268 return False 269 return self._install_using_setup_py_and_rsync(install_dir) 270 271 272 def _get_extension(self, package): 273 """Get extension of package.""" 274 valid_package_extensions = ['.tar.gz', '.tar.bz2', '.zip'] 275 extension = None 276 277 for ext in valid_package_extensions: 278 if package.endswith(ext): 279 extension = ext 280 break 281 282 if not extension: 283 raise Error('Unexpected package file extension on %s' % package) 284 285 return extension 286 287 288 def _build_and_install_from_package(self, install_dir): 289 """ 290 This method may be used as a _build_and_install() implementation 291 for subclasses if they implement _build_and_install_current_dir(). 292 293 Extracts the .tar.gz file, chdirs into the extracted directory 294 (which is assumed to match the tar filename) and calls 295 _build_and_isntall_current_dir from there. 296 297 Afterwards the build (regardless of failure) extracted .tar.gz 298 directory is cleaned up. 299 300 @returns True on success, False otherwise. 301 302 @raises OSError If the expected extraction directory does not exist. 303 """ 304 self._extract_compressed_package() 305 extension = self._get_extension(self.verified_package) 306 os.chdir(os.path.dirname(self.verified_package)) 307 os.chdir(self.extracted_package_path) 308 extracted_dir = os.getcwd() 309 try: 310 return self._build_and_install_current_dir(install_dir) 311 finally: 312 os.chdir(os.path.join(extracted_dir, '..')) 313 shutil.rmtree(extracted_dir) 314 315 316 def _extract_compressed_package(self): 317 """Extract the fetched compressed .tar or .zip within its directory.""" 318 if not self.verified_package: 319 raise Error('Package must have been fetched first.') 320 os.chdir(os.path.dirname(self.verified_package)) 321 if self.verified_package.endswith('gz'): 322 status = system("tar -xzf '%s'" % self.verified_package) 323 elif self.verified_package.endswith('bz2'): 324 status = system("tar -xjf '%s'" % self.verified_package) 325 elif self.verified_package.endswith('zip'): 326 status = system("unzip '%s'" % self.verified_package) 327 else: 328 raise Error('Unknown compression suffix on %s.' % 329 self.verified_package) 330 if status: 331 raise Error('tar failed with %s' % (status,)) 332 333 334 def _build_using_setup_py(self, setup_py='setup.py'): 335 """ 336 Assuming the cwd is the extracted python package, execute a simple 337 python setup.py build. 338 339 @param setup_py - The name of the setup.py file to execute. 340 341 @returns True on success, False otherwise. 342 """ 343 if not os.path.exists(setup_py): 344 raise Error('%s does not exist in %s' % (setup_py, os.getcwd())) 345 status = system("'%s' %s build" % (sys.executable, setup_py)) 346 if status: 347 logging.error('%s build failed.', self.name) 348 return False 349 return True 350 351 352 def _build_egg_using_setup_py(self, setup_py='setup.py'): 353 """ 354 Assuming the cwd is the extracted python package, execute a simple 355 python setup.py bdist_egg. 356 357 @param setup_py - The name of the setup.py file to execute. 358 359 @returns The relative path to the resulting egg file or '' on failure. 360 """ 361 if not os.path.exists(setup_py): 362 raise Error('%s does not exist in %s' % (setup_py, os.getcwd())) 363 egg_subdir = 'dist' 364 if os.path.isdir(egg_subdir): 365 shutil.rmtree(egg_subdir) 366 status = system("'%s' %s bdist_egg" % (sys.executable, setup_py)) 367 if status: 368 logging.error('bdist_egg of setuptools failed.') 369 return '' 370 # I've never seen a bdist_egg lay multiple .egg files. 371 for filename in os.listdir(egg_subdir): 372 if filename.endswith('.egg'): 373 return os.path.join(egg_subdir, filename) 374 375 376 def _install_from_egg(self, install_dir, egg_path): 377 """ 378 Install a module from an egg file by unzipping the necessary parts 379 into install_dir. 380 381 @param install_dir - The installation directory. 382 @param egg_path - The pathname of the egg file. 383 """ 384 status = system("unzip -q -o -d '%s' '%s'" % (install_dir, egg_path)) 385 if status: 386 logging.error('unzip of %s failed', egg_path) 387 return False 388 egg_info_dir = os.path.join(install_dir, 'EGG-INFO') 389 if os.path.isdir(egg_info_dir): 390 egg_info_new_path = self._get_egg_info_path(install_dir) 391 if egg_info_new_path: 392 if os.path.exists(egg_info_new_path): 393 shutil.rmtree(egg_info_new_path) 394 os.rename(egg_info_dir, egg_info_new_path) 395 else: 396 shutil.rmtree(egg_info_dir) 397 return True 398 399 400 def _get_egg_info_path(self, install_dir): 401 """Get egg-info path for this package. 402 403 Example path: install_dir/MySQL_python-1.2.3.egg-info 404 405 """ 406 if self.dist_name: 407 egg_info_name_part = self.dist_name.replace('-', '_') 408 if self.version: 409 egg_info_filename = '%s-%s.egg-info' % (egg_info_name_part, 410 self.version) 411 else: 412 egg_info_filename = '%s.egg-info' % (egg_info_name_part,) 413 return os.path.join(install_dir, egg_info_filename) 414 else: 415 return None 416 417 418 def _get_temp_dir(self): 419 return tempfile.mkdtemp(dir='/var/tmp') 420 421 422 def _site_packages_path(self, temp_dir): 423 # This makes assumptions about what python setup.py install 424 # does when given a prefix. Is this always correct? 425 python_xy = 'python%s' % sys.version[:3] 426 return os.path.join(temp_dir, 'lib', python_xy, 'site-packages') 427 428 429 def _rsync (self, temp_site_dir, install_dir): 430 """Rsync contents. """ 431 status = system("rsync -r '%s/' '%s/'" % 432 (os.path.normpath(temp_site_dir), 433 os.path.normpath(install_dir))) 434 if status: 435 logging.error('%s rsync to install_dir failed.', self.name) 436 return False 437 return True 438 439 440 def _install_using_setup_py_and_rsync(self, install_dir, 441 setup_py='setup.py', 442 temp_dir=None): 443 """ 444 Assuming the cwd is the extracted python package, execute a simple: 445 446 python setup.py install --prefix=BLA 447 448 BLA will be a temporary directory that everything installed will 449 be picked out of and rsynced to the appropriate place under 450 install_dir afterwards. 451 452 Afterwards, it deconstructs the extra lib/pythonX.Y/site-packages/ 453 directory tree that setuptools created and moves all installed 454 site-packages directly up into install_dir itself. 455 456 @param install_dir the directory for the install to happen under. 457 @param setup_py - The name of the setup.py file to execute. 458 459 @returns True on success, False otherwise. 460 """ 461 if not os.path.exists(setup_py): 462 raise Error('%s does not exist in %s' % (setup_py, os.getcwd())) 463 464 if temp_dir is None: 465 temp_dir = self._get_temp_dir() 466 467 try: 468 status = system("'%s' %s install --no-compile --prefix='%s'" 469 % (sys.executable, setup_py, temp_dir)) 470 if status: 471 logging.error('%s install failed.', self.name) 472 return False 473 474 if os.path.isdir(os.path.join(temp_dir, 'lib')): 475 # NOTE: This ignores anything outside of the lib/ dir that 476 # was installed. 477 temp_site_dir = self._site_packages_path(temp_dir) 478 else: 479 temp_site_dir = temp_dir 480 481 return self._rsync(temp_site_dir, install_dir) 482 finally: 483 shutil.rmtree(temp_dir) 484 485 486 487 def _build_using_make(self, install_dir): 488 """Build the current package using configure/make. 489 490 @returns True on success, False otherwise. 491 """ 492 install_prefix = os.path.join(install_dir, 'usr', 'local') 493 status = system('./configure --prefix=%s' % install_prefix) 494 if status: 495 logging.error('./configure failed for %s', self.name) 496 return False 497 status = system('make') 498 if status: 499 logging.error('make failed for %s', self.name) 500 return False 501 status = system('make check') 502 if status: 503 logging.error('make check failed for %s', self.name) 504 return False 505 return True 506 507 508 def _install_using_make(self): 509 """Install the current package using make install. 510 511 Assumes the install path was set up while running ./configure (in 512 _build_using_make()). 513 514 @returns True on success, False otherwise. 515 """ 516 status = system('make install') 517 return status == 0 518 519 520 def fetch(self, dest_dir): 521 """ 522 Fetch the package from one its URLs and save it in dest_dir. 523 524 If the the package already exists in dest_dir and the checksum 525 matches this code will not fetch it again. 526 527 Sets the 'verified_package' attribute with the destination pathname. 528 529 @param dest_dir - The destination directory to save the local file. 530 If it does not exist it will be created. 531 532 @returns A boolean indicating if we the package is now in dest_dir. 533 @raises FetchError - When something unexpected happens. 534 """ 535 if not os.path.exists(dest_dir): 536 os.makedirs(dest_dir) 537 local_path = os.path.join(dest_dir, self.local_filename) 538 539 # If the package exists, verify its checksum and be happy if it is good. 540 if os.path.exists(local_path): 541 actual_hex_sum = _checksum_file(local_path) 542 if self.hex_sum == actual_hex_sum: 543 logging.info('Good checksum for existing %s package.', 544 self.name) 545 self.verified_package = local_path 546 return True 547 logging.warning('Bad checksum for existing %s package. ' 548 'Re-downloading', self.name) 549 os.rename(local_path, local_path + '.wrong-checksum') 550 551 # Download the package from one of its urls, rejecting any if the 552 # checksum does not match. 553 for url in self.urls: 554 logging.info('Fetching %s', url) 555 try: 556 url_file = urllib2.urlopen(url) 557 except (urllib2.URLError, EnvironmentError): 558 logging.warning('Could not fetch %s package from %s.', 559 self.name, url) 560 continue 561 562 data_length = int(url_file.info().get('Content-Length', 563 _MAX_PACKAGE_SIZE)) 564 if data_length <= 0 or data_length > _MAX_PACKAGE_SIZE: 565 raise FetchError('%s from %s fails Content-Length %d ' 566 'sanity check.' % (self.name, url, 567 data_length)) 568 checksum = utils.hash('sha1') 569 total_read = 0 570 output = open(local_path, 'wb') 571 try: 572 while total_read < data_length: 573 data = url_file.read(_READ_SIZE) 574 if not data: 575 break 576 output.write(data) 577 checksum.update(data) 578 total_read += len(data) 579 finally: 580 output.close() 581 if self.hex_sum != checksum.hexdigest(): 582 logging.warning('Bad checksum for %s fetched from %s.', 583 self.name, url) 584 logging.warning('Got %s', checksum.hexdigest()) 585 logging.warning('Expected %s', self.hex_sum) 586 os.unlink(local_path) 587 continue 588 logging.info('Good checksum.') 589 self.verified_package = local_path 590 return True 591 else: 592 return False 593 594 595# NOTE: This class definition must come -before- all other ExternalPackage 596# classes that need to use this version of setuptools so that is is inserted 597# into the ExternalPackage.subclasses list before them. 598class SetuptoolsPackage(ExternalPackage): 599 """setuptools package""" 600 # For all known setuptools releases a string compare works for the 601 # version string. Hopefully they never release a 0.10. (Their own 602 # version comparison code would break if they did.) 603 # Any system with setuptools > 18.0.1 is fine. If none installed, then 604 # try to install the latest found on the upstream. 605 minimum_version = '18.0.1' 606 version = '18.0.1' 607 urls = (_CHROMEOS_MIRROR + 'setuptools-%s.tar.gz' % (version,),) 608 local_filename = 'setuptools-%s.tar.gz' % version 609 hex_sum = 'ebc4fe81b7f6d61d923d9519f589903824044f52' 610 611 SUDO_SLEEP_DELAY = 15 612 613 614 def _build_and_install(self, install_dir): 615 """Install setuptools on the system.""" 616 logging.info('NOTE: setuptools install does not use install_dir.') 617 return self._build_and_install_from_package(install_dir) 618 619 620 def _build_and_install_current_dir(self, install_dir): 621 egg_path = self._build_egg_using_setup_py() 622 if not egg_path: 623 return False 624 625 print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n' 626 print 'About to run sudo to install setuptools', self.version 627 print 'on your system for use by', sys.executable, '\n' 628 print '!! ^C within', self.SUDO_SLEEP_DELAY, 'seconds to abort.\n' 629 time.sleep(self.SUDO_SLEEP_DELAY) 630 631 # Copy the egg to the local filesystem /var/tmp so that root can 632 # access it properly (avoid NFS squashroot issues). 633 temp_dir = self._get_temp_dir() 634 try: 635 shutil.copy(egg_path, temp_dir) 636 egg_name = os.path.split(egg_path)[1] 637 temp_egg = os.path.join(temp_dir, egg_name) 638 p = subprocess.Popen(['sudo', '/bin/sh', temp_egg], 639 stdout=subprocess.PIPE) 640 regex = re.compile('Copying (.*?) to (.*?)\n') 641 match = regex.search(p.communicate()[0]) 642 status = p.wait() 643 644 if match: 645 compiled = os.path.join(match.group(2), match.group(1)) 646 os.system("sudo chmod a+r '%s'" % compiled) 647 finally: 648 shutil.rmtree(temp_dir) 649 650 if status: 651 logging.error('install of setuptools from egg failed.') 652 return False 653 return True 654 655 656class MySQLdbPackage(ExternalPackage): 657 """mysql package, used in scheduler.""" 658 module_name = 'MySQLdb' 659 version = '1.2.3' 660 local_filename = 'MySQL-python-%s.tar.gz' % version 661 urls = ('http://commondatastorage.googleapis.com/chromeos-mirror/gentoo/' 662 'distfiles/%s' % local_filename,) 663 hex_sum = '3511bb8c57c6016eeafa531d5c3ea4b548915e3c' 664 665 _build_and_install_current_dir = ( 666 ExternalPackage._build_and_install_current_dir_setup_py) 667 668 669 def _build_and_install(self, install_dir): 670 if not os.path.exists('/usr/bin/mysql_config'): 671 error_msg = '''\ 672You need to install /usr/bin/mysql_config. 673On recent Debian based distros, run: \ 674sudo apt-get install libmariadbclient-dev-compat 675On older Debian based distros, run: sudo apt-get install libmysqlclient15-dev 676''' 677 logging.error(error_msg) 678 return False, error_msg 679 return self._build_and_install_from_package(install_dir) 680 681 682class DjangoPackage(ExternalPackage): 683 """django package.""" 684 version = '1.5.1' 685 local_filename = 'Django-%s.tar.gz' % version 686 urls = (_CHROMEOS_MIRROR + local_filename,) 687 hex_sum = '0ab97b90c4c79636e56337f426f1e875faccbba1' 688 689 _build_and_install = ExternalPackage._build_and_install_from_package 690 _build_and_install_current_dir = ( 691 ExternalPackage._build_and_install_current_dir_noegg) 692 693 694 def _get_installed_version_from_module(self, module): 695 try: 696 return module.get_version().split()[0] 697 except AttributeError: 698 return '0.9.6' 699 700 701 702class NumpyPackage(ExternalPackage): 703 """numpy package, required by matploglib.""" 704 version = '1.7.0' 705 local_filename = 'numpy-%s.tar.gz' % version 706 urls = (_CHROMEOS_MIRROR + local_filename,) 707 hex_sum = 'ba328985f20390b0f969a5be2a6e1141d5752cf9' 708 709 _build_and_install = ExternalPackage._build_and_install_from_package 710 _build_and_install_current_dir = ( 711 ExternalPackage._build_and_install_current_dir_setupegg_py) 712 713 714 715class JsonRPCLib(ExternalPackage): 716 """jsonrpclib package""" 717 version = '0.1.3' 718 module_name = 'jsonrpclib' 719 local_filename = '%s-%s.tar.gz' % (module_name, version) 720 urls = (_CHROMEOS_MIRROR + local_filename,) 721 hex_sum = '431714ed19ab677f641ce5d678a6a95016f5c452' 722 723 def _get_installed_version_from_module(self, module): 724 # jsonrpclib doesn't contain a proper version 725 return self.version 726 727 _build_and_install = ExternalPackage._build_and_install_from_package 728 _build_and_install_current_dir = ( 729 ExternalPackage._build_and_install_current_dir_noegg) 730 731 732class GwtPackage(ExternalPackage): 733 """Fetch and extract a local copy of GWT used to build the frontend.""" 734 735 version = '2.3.0' 736 local_filename = 'gwt-%s.zip' % version 737 urls = (_CHROMEOS_MIRROR + local_filename,) 738 hex_sum = 'd51fce9166e6b31349659ffca89baf93e39bc84b' 739 name = 'gwt' 740 about_filename = 'about.txt' 741 module_name = None # Not a Python module. 742 743 744 def is_needed(self, install_dir): 745 gwt_dir = os.path.join(install_dir, self.name) 746 about_file = os.path.join(install_dir, self.name, self.about_filename) 747 748 if not os.path.exists(gwt_dir) or not os.path.exists(about_file): 749 logging.info('gwt not installed for autotest') 750 return True 751 752 f = open(about_file, 'r') 753 version_line = f.readline() 754 f.close() 755 756 match = re.match(r'Google Web Toolkit (.*)', version_line) 757 if not match: 758 logging.info('did not find gwt version') 759 return True 760 761 logging.info('found gwt version %s', match.group(1)) 762 return match.group(1) != self.version 763 764 765 def _build_and_install(self, install_dir): 766 os.chdir(install_dir) 767 self._extract_compressed_package() 768 extracted_dir = self.local_filename[:-len('.zip')] 769 target_dir = os.path.join(install_dir, self.name) 770 if os.path.exists(target_dir): 771 shutil.rmtree(target_dir) 772 os.rename(extracted_dir, target_dir) 773 return True 774 775 776class PyudevPackage(ExternalPackage): 777 """ 778 pyudev module 779 780 Used in unittests. 781 """ 782 version = '0.16.1' 783 url_filename = 'pyudev-%s.tar.gz' % version 784 local_filename = url_filename 785 urls = (_CHROMEOS_MIRROR + local_filename,) 786 hex_sum = 'b36bc5c553ce9b56d32a5e45063a2c88156771c0' 787 788 _build_and_install = ExternalPackage._build_and_install_from_package 789 _build_and_install_current_dir = ( 790 ExternalPackage._build_and_install_current_dir_setup_py) 791 792 793class PyMoxPackage(ExternalPackage): 794 """ 795 mox module 796 797 Used in unittests. 798 """ 799 module_name = 'mox' 800 version = '0.5.3' 801 # Note: url_filename does not match local_filename, because of 802 # an uncontrolled fork at some point in time of mox versions. 803 url_filename = 'mox-%s-autotest.tar.gz' % version 804 local_filename = 'mox-%s.tar.gz' % version 805 urls = (_CHROMEOS_MIRROR + url_filename,) 806 hex_sum = '1c502d2c0a8aefbba2c7f385a83d33e7d822452a' 807 808 _build_and_install = ExternalPackage._build_and_install_from_package 809 _build_and_install_current_dir = ( 810 ExternalPackage._build_and_install_current_dir_noegg) 811 812 def _get_installed_version_from_module(self, module): 813 # mox doesn't contain a proper version 814 return self.version 815 816 817class PySeleniumPackage(ExternalPackage): 818 """ 819 selenium module 820 821 Used in wifi_interop suite. 822 """ 823 module_name = 'selenium' 824 version = '2.37.2' 825 url_filename = 'selenium-%s.tar.gz' % version 826 local_filename = url_filename 827 urls = (_CHROMEOS_MIRROR + local_filename,) 828 hex_sum = '66946d5349e36d946daaad625c83c30c11609e36' 829 830 _build_and_install = ExternalPackage._build_and_install_from_package 831 _build_and_install_current_dir = ( 832 ExternalPackage._build_and_install_current_dir_setup_py) 833 834 835class FaultHandlerPackage(ExternalPackage): 836 """ 837 faulthandler module 838 """ 839 module_name = 'faulthandler' 840 version = '2.3' 841 url_filename = '%s-%s.tar.gz' % (module_name, version) 842 local_filename = url_filename 843 urls = (_CHROMEOS_MIRROR + local_filename,) 844 hex_sum = 'efb30c068414fba9df892e48fcf86170cbf53589' 845 846 _build_and_install = ExternalPackage._build_and_install_from_package 847 _build_and_install_current_dir = ( 848 ExternalPackage._build_and_install_current_dir_noegg) 849 850 851class PsutilPackage(ExternalPackage): 852 """ 853 psutil module 854 """ 855 module_name = 'psutil' 856 version = '2.1.1' 857 url_filename = '%s-%s.tar.gz' % (module_name, version) 858 local_filename = url_filename 859 urls = (_CHROMEOS_MIRROR + local_filename,) 860 hex_sum = '0c20a20ed316e69f2b0881530439213988229916' 861 862 _build_and_install = ExternalPackage._build_and_install_from_package 863 _build_and_install_current_dir = ( 864 ExternalPackage._build_and_install_current_dir_setup_py) 865 866 867class ElasticSearchPackage(ExternalPackage): 868 """elasticsearch-py package.""" 869 version = '1.6.0' 870 url_filename = 'elasticsearch-%s.tar.gz' % version 871 local_filename = url_filename 872 urls = ('https://pypi.python.org/packages/source/e/elasticsearch/%s' % 873 (url_filename),) 874 hex_sum = '3e676c96f47935b1f52df82df3969564bd356b1c' 875 _build_and_install = ExternalPackage._build_and_install_from_package 876 _build_and_install_current_dir = ( 877 ExternalPackage._build_and_install_current_dir_setup_py) 878 879 def _get_installed_version_from_module(self, module): 880 # Elastic's version format is like tuple (1, 6, 0), which needs to be 881 # transferred to 1.6.0. 882 try: 883 return '.'.join(str(i) for i in module.__version__) 884 except: 885 return self.version 886 887 888class Urllib3Package(ExternalPackage): 889 """elasticsearch-py package.""" 890 version = '1.9' 891 url_filename = 'urllib3-%s.tar.gz' % version 892 local_filename = url_filename 893 urls = (_CHROMEOS_MIRROR + local_filename,) 894 hex_sum = '9522197efb2a2b49ce804de3a515f06d97b6602f' 895 _build_and_install = ExternalPackage._build_and_install_from_package 896 _build_and_install_current_dir = ( 897 ExternalPackage._build_and_install_current_dir_setup_py) 898 899class ImagingLibraryPackage(ExternalPackage): 900 """Python Imaging Library (PIL).""" 901 version = '1.1.7' 902 url_filename = 'Imaging-%s.tar.gz' % version 903 local_filename = url_filename 904 urls = ('http://commondatastorage.googleapis.com/chromeos-mirror/gentoo/' 905 'distfiles/%s' % url_filename,) 906 hex_sum = '76c37504251171fda8da8e63ecb8bc42a69a5c81' 907 908 def _build_and_install(self, install_dir): 909 #The path of zlib library might be different from what PIL setup.py is 910 #expected. Following change does the best attempt to link the library 911 #to a path PIL setup.py will try. 912 libz_possible_path = '/usr/lib/x86_64-linux-gnu/libz.so' 913 libz_expected_path = '/usr/lib/libz.so' 914 # TODO(crbug.com/957186): this sudo command fails if build_externals 915 # is running in non-interactive mode, and requires a workaround when 916 # running within a docker build process. Remove this operation, or 917 # remove this entire package. 918 if (os.path.exists(libz_possible_path) and 919 not os.path.exists(libz_expected_path)): 920 utils.run('sudo ln -s %s %s' % 921 (libz_possible_path, libz_expected_path)) 922 return self._build_and_install_from_package(install_dir) 923 924 _build_and_install_current_dir = ( 925 ExternalPackage._build_and_install_current_dir_noegg) 926 927 928class AstroidPackage(ExternalPackage): 929 """astroid package.""" 930 version = '1.5.3' 931 url_filename = 'astroid-%s.tar.gz' % version 932 local_filename = url_filename 933 urls = (_CHROMEOS_MIRROR + local_filename,) 934 hex_sum = 'e654225ab5bd2788e5e246b156910990bf33cde6' 935 _build_and_install = ExternalPackage._build_and_install_from_package 936 _build_and_install_current_dir = ( 937 ExternalPackage._build_and_install_current_dir_setup_py) 938 939 940class LazyObjectProxyPackage(ExternalPackage): 941 """lazy-object-proxy package (dependency for astroid).""" 942 version = '1.3.1' 943 url_filename = 'lazy-object-proxy-%s.tar.gz' % version 944 local_filename = url_filename 945 urls = (_CHROMEOS_MIRROR + local_filename,) 946 hex_sum = '984828d8f672986ca926373986214d7057b772fb' 947 _build_and_install = ExternalPackage._build_and_install_from_package 948 _build_and_install_current_dir = ( 949 ExternalPackage._build_and_install_current_dir_setup_py) 950 951 952class SingleDispatchPackage(ExternalPackage): 953 """singledispatch package (dependency for astroid).""" 954 version = '3.4.0.3' 955 url_filename = 'singledispatch-%s.tar.gz' % version 956 local_filename = url_filename 957 urls = (_CHROMEOS_MIRROR + local_filename,) 958 hex_sum = 'f93241b06754a612af8bb7aa208c4d1805637022' 959 _build_and_install = ExternalPackage._build_and_install_from_package 960 _build_and_install_current_dir = ( 961 ExternalPackage._build_and_install_current_dir_setup_py) 962 963 964class Enum34Package(ExternalPackage): 965 """enum34 package (dependency for astroid).""" 966 version = '1.1.6' 967 url_filename = 'enum34-%s.tar.gz' % version 968 local_filename = url_filename 969 urls = (_CHROMEOS_MIRROR + local_filename,) 970 hex_sum = '014ef5878333ff91099893d615192c8cd0b1525a' 971 _build_and_install = ExternalPackage._build_and_install_from_package 972 _build_and_install_current_dir = ( 973 ExternalPackage._build_and_install_current_dir_setup_py) 974 975 976class WraptPackage(ExternalPackage): 977 """wrapt package (dependency for astroid).""" 978 version = '1.10.10' 979 url_filename = 'wrapt-%s.tar.gz' % version 980 local_filename = url_filename 981 #md5=97365e906afa8b431f266866ec4e2e18 982 urls = ('https://pypi.python.org/packages/a3/bb/' 983 '525e9de0a220060394f4aa34fdf6200853581803d92714ae41fc3556e7d7/%s' % 984 (url_filename),) 985 hex_sum = '6be4f1bb50db879863f4247692360eb830a3eb33' 986 _build_and_install = ExternalPackage._build_and_install_from_package 987 _build_and_install_current_dir = ( 988 ExternalPackage._build_and_install_current_dir_noegg) 989 990 991class SixPackage(ExternalPackage): 992 """six package (dependency for astroid).""" 993 version = '1.10.0' 994 url_filename = 'six-%s.tar.gz' % version 995 local_filename = url_filename 996 urls = (_CHROMEOS_MIRROR + local_filename,) 997 hex_sum = '30d480d2e352e8e4c2aae042cf1bf33368ff0920' 998 _build_and_install = ExternalPackage._build_and_install_from_package 999 _build_and_install_current_dir = ( 1000 ExternalPackage._build_and_install_current_dir_setup_py) 1001 1002 1003class LruCachePackage(ExternalPackage): 1004 """backports.functools_lru_cache package (dependency for astroid).""" 1005 version = '1.4' 1006 url_filename = 'backports.functools_lru_cache-%s.tar.gz' % version 1007 local_filename = url_filename 1008 urls = (_CHROMEOS_MIRROR + local_filename,) 1009 hex_sum = '8a546e7887e961c2873c9b053f4e2cd2a96bd71d' 1010 _build_and_install = ExternalPackage._build_and_install_from_package 1011 _build_and_install_current_dir = ( 1012 ExternalPackage._build_and_install_current_dir_setup_py) 1013 1014 1015class LogilabCommonPackage(ExternalPackage): 1016 """logilab-common package.""" 1017 version = '1.2.2' 1018 module_name = 'logilab' 1019 url_filename = 'logilab-common-%s.tar.gz' % version 1020 local_filename = url_filename 1021 urls = (_CHROMEOS_MIRROR + local_filename,) 1022 hex_sum = 'ecad2d10c31dcf183c8bed87b6ec35e7ed397d27' 1023 _build_and_install = ExternalPackage._build_and_install_from_package 1024 _build_and_install_current_dir = ( 1025 ExternalPackage._build_and_install_current_dir_setup_py) 1026 1027 1028class PyLintPackage(ExternalPackage): 1029 """pylint package.""" 1030 version = '1.7.2' 1031 url_filename = 'pylint-%s.tar.gz' % version 1032 local_filename = url_filename 1033 urls = (_CHROMEOS_MIRROR + local_filename,) 1034 hex_sum = '42d8b9394e5a485377ae128b01350f25d8b131e0' 1035 _build_and_install = ExternalPackage._build_and_install_from_package 1036 _build_and_install_current_dir = ( 1037 ExternalPackage._build_and_install_current_dir_setup_py) 1038 1039 1040class ConfigParserPackage(ExternalPackage): 1041 """configparser package (dependency for pylint).""" 1042 version = '3.5.0' 1043 url_filename = 'configparser-%s.tar.gz' % version 1044 local_filename = url_filename 1045 urls = (_CHROMEOS_MIRROR + local_filename,) 1046 hex_sum = '8ee6b29c6a11977c0e094da1d4f5f71e7e7ac78b' 1047 _build_and_install = ExternalPackage._build_and_install_from_package 1048 _build_and_install_current_dir = ( 1049 ExternalPackage._build_and_install_current_dir_setup_py) 1050 1051 1052class IsortPackage(ExternalPackage): 1053 """isort package (dependency for pylint).""" 1054 version = '4.2.15' 1055 url_filename = 'isort-%s.tar.gz' % version 1056 local_filename = url_filename 1057 urls = (_CHROMEOS_MIRROR + local_filename,) 1058 hex_sum = 'acacc36e476b70e13e6fda812c193f4c3c187781' 1059 _build_and_install = ExternalPackage._build_and_install_from_package 1060 _build_and_install_current_dir = ( 1061 ExternalPackage._build_and_install_current_dir_setup_py) 1062 1063 1064class DateutilPackage(ExternalPackage): 1065 """python-dateutil package.""" 1066 version = '2.6.1' 1067 local_filename = 'python-dateutil-%s.tar.gz' % version 1068 urls = (_CHROMEOS_MIRROR + local_filename,) 1069 hex_sum = 'db2ace298dee7e47fd720ed03eb790885347bf4e' 1070 1071 _build_and_install = ExternalPackage._build_and_install_from_package 1072 _build_and_install_current_dir = ( 1073 ExternalPackage._build_and_install_current_dir_setup_py) 1074 1075 1076class Pytz(ExternalPackage): 1077 """Pytz package.""" 1078 version = '2016.10' 1079 url_filename = 'pytz-%s.tar.gz' % version 1080 local_filename = url_filename 1081 #md5=cc9f16ba436efabdcef3c4d32ae4919c 1082 urls = ('https://pypi.python.org/packages/42/00/' 1083 '5c89fc6c9b305df84def61863528e899e9dccb196f8438f6cbe960758fc5/%s' % 1084 (url_filename),) 1085 hex_sum = '8d63f1e9b1ee862841b990a7d8ad1d4508d9f0be' 1086 _build_and_install = ExternalPackage._build_and_install_from_package 1087 _build_and_install_current_dir = ( 1088 ExternalPackage._build_and_install_current_dir_setup_py) 1089 1090 1091class Tzlocal(ExternalPackage): 1092 """Tzlocal package.""" 1093 version = '1.3' 1094 url_filename = 'tzlocal-%s.tar.gz' % version 1095 local_filename = url_filename 1096 urls = (_CHROMEOS_MIRROR + local_filename,) 1097 hex_sum = '730e9d7112335865a1dcfabec69c8c3086be424f' 1098 _build_and_install = ExternalPackage._build_and_install_from_package 1099 _build_and_install_current_dir = ( 1100 ExternalPackage._build_and_install_current_dir_setup_py) 1101 1102 1103class PyYAMLPackage(ExternalPackage): 1104 """pyyaml package.""" 1105 version = '3.12' 1106 local_filename = 'PyYAML-%s.tar.gz' % version 1107 urls = (_CHROMEOS_MIRROR + local_filename,) 1108 hex_sum = 'cb7fd3e58c129494ee86e41baedfec69eb7dafbe' 1109 _build_and_install = ExternalPackage._build_and_install_from_package 1110 _build_and_install_current_dir = ( 1111 ExternalPackage._build_and_install_current_dir_noegg) 1112 1113 1114class GoogleAuthPackage(ExternalPackage): 1115 """Google Auth Client.""" 1116 version = '1.6.3' 1117 local_filename = 'google-auth-%s.tar.gz' % version 1118 urls = (_CHROMEOS_MIRROR + local_filename,) 1119 hex_sum = 'a76f97686ebe42097d91e0996a72b26b54118f3b' 1120 _build_and_install = ExternalPackage._build_and_install_from_package 1121 _build_and_install_current_dir = ( 1122 ExternalPackage._build_and_install_current_dir_setup_py) 1123 1124 1125class CachetoolsPackage(ExternalPackage): 1126 """Cachetools package.""" 1127 version = '3.1.1' 1128 local_filename = 'cachetools-%s.tar.gz' % version 1129 urls = (_CHROMEOS_MIRROR + local_filename,) 1130 hex_sum = 'd030bfdfa91b0b1188993f5e8d7da077308c1eaf' 1131 _build_and_install = ExternalPackage._build_and_install_from_package 1132 _build_and_install_current_dir = ( 1133 ExternalPackage._build_and_install_current_dir_setup_py) 1134 1135 1136class GrpcioPackage(ExternalPackage): 1137 """GrpcioPackage package.""" 1138 version = '1.26.0' 1139 hex_sum = "b9a61f855bf3656d9b8ac305bd1e52442e120c48" 1140 local_filename = 'grpcio-%s.tar.gz' % version 1141 urls = (_CHROMEOS_MIRROR + local_filename,) 1142 _build_and_install = ExternalPackage._build_and_install_from_package 1143 _build_and_install_current_dir = ( 1144 ExternalPackage._build_and_install_current_dir_setup_py) 1145 1146 1147class GrpcioToolsPackage(ExternalPackage): 1148 """GrpcioPackage package.""" 1149 version = '1.26.0' 1150 hex_sum = "298724d8704523c6ff443303e0c26fc1d54f9acb" 1151 local_filename = 'grpcio-tools-%s.tar.gz' % version 1152 urls = (_CHROMEOS_MIRROR + local_filename,) 1153 _build_and_install = ExternalPackage._build_and_install_from_package 1154 _build_and_install_current_dir = ( 1155 ExternalPackage._build_and_install_current_dir_setup_py) 1156 1157 1158class Protobuf(ExternalPackage): 1159 """GrpcioPackage package.""" 1160 version = '3.11.2' 1161 hex_sum = "e1f3ffa028ece5a529149dd56a3d64aea4ae1b1a" 1162 local_filename = 'protobuf-%s.tar.gz' % version 1163 urls = (_CHROMEOS_MIRROR + local_filename,) 1164 _build_and_install_current_dir = ( 1165 ExternalPackage._build_and_install_current_dir_setup_py) 1166 1167 def _build_and_install(self, install_dir): 1168 """ 1169 This method may be used as a _build_and_install() implementation 1170 for subclasses if they implement _build_and_install_current_dir(). 1171 1172 Extracts the .tar.gz file, chdirs into the extracted directory 1173 (which is assumed to match the tar filename) and calls 1174 _build_and_isntall_current_dir from there. 1175 1176 Afterwards the build (regardless of failure) extracted .tar.gz 1177 directory is cleaned up. 1178 1179 @returns True on success, False otherwise. 1180 1181 @raises OSError If the expected extraction directory does not exist. 1182 """ 1183 self._extract_compressed_package() 1184 extension = self._get_extension(self.verified_package) 1185 os.chdir(os.path.dirname(self.verified_package)) 1186 os.chdir(os.path.join(self.extracted_package_path, "python")) 1187 extracted_dir = os.getcwd() 1188 try: 1189 return self._build_and_install_current_dir(install_dir) 1190 finally: 1191 os.chdir(os.path.join(extracted_dir, '..')) 1192 shutil.rmtree(extracted_dir) 1193 1194 1195class _ExternalGitRepo(ExternalPackage): 1196 """ 1197 Parent class for any package which needs to pull a git repo. 1198 1199 This class inherits from ExternalPackage only so we can sync git 1200 repos through the build_externals script. We do not reuse any of 1201 ExternalPackage's other methods. Any package that needs a git repo 1202 should subclass this and override build_and_install or fetch as 1203 they see appropriate. 1204 """ 1205 1206 os_requirements = {('/usr/bin/git') : 'git-core'} 1207 1208 # All the chromiumos projects used on the lab servers should have a 'prod' 1209 # branch used to track the software version deployed in prod. 1210 PROD_BRANCH = 'prod' 1211 MASTER_BRANCH = 'master' 1212 1213 def is_needed(self, unused_install_dir): 1214 """Tell build_externals that we need to fetch.""" 1215 # TODO(beeps): check if we're already upto date. 1216 return True 1217 1218 1219 def build_and_install(self, unused_install_dir): 1220 """ 1221 Fall through method to install a package. 1222 1223 Overwritten in base classes to pull a git repo. 1224 """ 1225 raise NotImplementedError 1226 1227 1228 def fetch(self, unused_dest_dir): 1229 """Fallthrough method to fetch a package.""" 1230 return True 1231 1232 1233class HdctoolsRepo(_ExternalGitRepo): 1234 """Clones or updates the hdctools repo.""" 1235 1236 module_name = 'servo' 1237 temp_hdctools_dir = tempfile.mktemp(suffix='hdctools') 1238 _GIT_URL = ('https://chromium.googlesource.com/' 1239 'chromiumos/third_party/hdctools') 1240 1241 def fetch(self, unused_dest_dir): 1242 """ 1243 Fetch repo to a temporary location. 1244 1245 We use an intermediate temp directory to stage our 1246 installation because we only care about the servo package. 1247 If we can't get at the top commit hash after fetching 1248 something is wrong. This can happen when we've cloned/pulled 1249 an empty repo. Not something we expect to do. 1250 1251 @parma unused_dest_dir: passed in because we inherit from 1252 ExternalPackage. 1253 1254 @return: True if repo sync was successful. 1255 """ 1256 git_repo = revision_control.GitRepo( 1257 self.temp_hdctools_dir, 1258 self._GIT_URL, 1259 None, 1260 abs_work_tree=self.temp_hdctools_dir) 1261 git_repo.reinit_repo_at(self.PROD_BRANCH) 1262 1263 if git_repo.get_latest_commit_hash(): 1264 return True 1265 return False 1266 1267 1268 def build_and_install(self, install_dir): 1269 """Reach into the hdctools repo and rsync only the servo directory.""" 1270 1271 servo_dir = os.path.join(self.temp_hdctools_dir, 'servo') 1272 if not os.path.exists(servo_dir): 1273 return False 1274 1275 rv = self._rsync(servo_dir, os.path.join(install_dir, 'servo')) 1276 shutil.rmtree(self.temp_hdctools_dir) 1277 return rv 1278 1279 1280class ChromiteRepo(_ExternalGitRepo): 1281 """Clones or updates the chromite repo.""" 1282 1283 _GIT_URL = ('https://chromium.googlesource.com/chromiumos/chromite') 1284 1285 def build_and_install(self, install_dir, master_branch=False): 1286 """ 1287 Clone if the repo isn't initialized, pull clean bits if it is. 1288 1289 Unlike it's hdctools counterpart the chromite repo clones master 1290 directly into site-packages. It doesn't use an intermediate temp 1291 directory because it doesn't need installation. 1292 1293 @param install_dir: destination directory for chromite installation. 1294 @param master_branch: if True, install master branch. Otherwise, 1295 install prod branch. 1296 """ 1297 init_branch = (self.MASTER_BRANCH if master_branch 1298 else self.PROD_BRANCH) 1299 local_chromite_dir = os.path.join(install_dir, 'chromite') 1300 git_repo = revision_control.GitRepo( 1301 local_chromite_dir, 1302 self._GIT_URL, 1303 abs_work_tree=local_chromite_dir) 1304 git_repo.reinit_repo_at(init_branch) 1305 1306 1307 if git_repo.get_latest_commit_hash(): 1308 return True 1309 return False 1310 1311 1312class SuiteSchedulerRepo(_ExternalGitRepo): 1313 """Clones or updates the suite_scheduler repo.""" 1314 1315 _GIT_URL = ('https://chromium.googlesource.com/chromiumos/' 1316 'infra/suite_scheduler') 1317 1318 def build_and_install(self, install_dir): 1319 """ 1320 Clone if the repo isn't initialized, pull clean bits if it is. 1321 1322 @param install_dir: destination directory for suite_scheduler 1323 installation. 1324 @param master_branch: if True, install master branch. Otherwise, 1325 install prod branch. 1326 """ 1327 local_dir = os.path.join(install_dir, 'suite_scheduler') 1328 git_repo = revision_control.GitRepo( 1329 local_dir, 1330 self._GIT_URL, 1331 abs_work_tree=local_dir) 1332 git_repo.reinit_repo_at(self.MASTER_BRANCH) 1333 1334 if git_repo.get_latest_commit_hash(): 1335 return True 1336 return False 1337 1338 1339class BtsocketRepo(_ExternalGitRepo): 1340 """Clones or updates the btsocket repo.""" 1341 1342 _GIT_URL = ('https://chromium.googlesource.com/' 1343 'chromiumos/platform/btsocket') 1344 1345 def fetch(self, unused_dest_dir): 1346 """ 1347 Fetch repo to a temporary location. 1348 1349 We use an intermediate temp directory because we have to build an 1350 egg for installation. If we can't get at the top commit hash after 1351 fetching something is wrong. This can happen when we've cloned/pulled 1352 an empty repo. Not something we expect to do. 1353 1354 @parma unused_dest_dir: passed in because we inherit from 1355 ExternalPackage. 1356 1357 @return: True if repo sync was successful. 1358 """ 1359 self.temp_btsocket_dir = autotemp.tempdir(unique_id='btsocket') 1360 try: 1361 git_repo = revision_control.GitRepo( 1362 self.temp_btsocket_dir.name, 1363 self._GIT_URL, 1364 None, 1365 abs_work_tree=self.temp_btsocket_dir.name) 1366 git_repo.reinit_repo_at(self.PROD_BRANCH) 1367 1368 if git_repo.get_latest_commit_hash(): 1369 return True 1370 except: 1371 self.temp_btsocket_dir.clean() 1372 raise 1373 1374 self.temp_btsocket_dir.clean() 1375 return False 1376 1377 1378 def build_and_install(self, install_dir): 1379 """ 1380 Install the btsocket module using setup.py 1381 1382 @param install_dir: Target installation directory. 1383 1384 @return: A boolean indicating success of failure. 1385 """ 1386 work_dir = os.getcwd() 1387 try: 1388 os.chdir(self.temp_btsocket_dir.name) 1389 rv = self._build_and_install_current_dir_setup_py(install_dir) 1390 finally: 1391 os.chdir(work_dir) 1392 self.temp_btsocket_dir.clean() 1393 return rv 1394 1395 1396class SkylabInventoryRepo(_ExternalGitRepo): 1397 """Clones or updates the skylab_inventory repo.""" 1398 1399 _GIT_URL = ('https://chromium.googlesource.com/chromiumos/infra/' 1400 'skylab_inventory') 1401 1402 # TODO(nxia): create a prod branch for skylab_inventory. 1403 def build_and_install(self, install_dir): 1404 """ 1405 @param install_dir: destination directory for skylab_inventory 1406 installation. 1407 """ 1408 local_skylab_dir = os.path.join(install_dir, 'infra_skylab_inventory') 1409 git_repo = revision_control.GitRepo( 1410 local_skylab_dir, 1411 self._GIT_URL, 1412 abs_work_tree=local_skylab_dir) 1413 git_repo.reinit_repo_at(self.MASTER_BRANCH) 1414 1415 # The top-level __init__.py for skylab is at venv/skylab_inventory. 1416 source = os.path.join(local_skylab_dir, 'venv', 'skylab_inventory') 1417 link_name = os.path.join(install_dir, 'skylab_inventory') 1418 1419 if (os.path.exists(link_name) and 1420 os.path.realpath(link_name) != os.path.realpath(source)): 1421 os.remove(link_name) 1422 1423 if not os.path.exists(link_name): 1424 os.symlink(source, link_name) 1425 1426 if git_repo.get_latest_commit_hash(): 1427 return True 1428 return False 1429