• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python3
2#
3# Copyright (C) 2016 The Android Open-Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#      http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17from __future__ import print_function
18
19import argparse
20import atexit
21import getpass
22import hashlib
23import itertools
24import logging
25import os
26import re
27import shutil
28import string
29import subprocess
30import sys
31import tempfile
32import time
33import xml.etree.cElementTree as ElementTree
34import zipfile
35
36import lib.build_artifact_fetcher
37import lib.util
38
39
40_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
41
42_EXPECTED_TARGET_PRODUCTS = {
43    '^x86': ('cheets_x86', 'cheets_x86_64', 'bertha_x86', 'bertha_x86_64'),
44    '^arm': ('cheets_arm', 'bertha_arm',),
45    '^aarch64$': ('cheets_arm', 'bertha_arm',),
46}
47_ANDROID_ROOT = '/opt/google/containers/android'
48_ANDROID_ROOT_STATEFUL = os.path.join('/usr/local',
49                                      os.path.relpath(_ANDROID_ROOT, '/'))
50_CONTAINER_INSTANCE_ROOT_WILDCARD = '/run/containers/android*'
51_CONTAINER_ROOT = os.path.join(_ANDROID_ROOT, 'rootfs', 'root')
52_RSYNC_COMMAND = ['rsync', '--inplace', '-v', '--progress']
53_SCP_COMMAND = ['scp']
54
55_BUILD_FILENAME = string.Template('${product}-img-${build_id}.zip')
56_BUILD_TARGET = string.Template('${product}-${build_variant}')
57
58_CHROMEOS_ARC_ANDROID_SDK_VERSION = 'CHROMEOS_ARC_ANDROID_SDK_VERSION='
59
60_GENERIC_DEVICE = 'generic_%(arch)s_%(product)s'
61_RO_BUILD_TYPE = 'ro.build.type='
62_RO_BUILD_VERSION_SDK = 'ro.build.version.sdk='
63_RO_PRODUCT_DEVICE = 'ro.product.device='
64
65_ANDROID_REL_KEY_SIGNATURE_SUBSTRING = (
66    '55b390dd7fdb9418631895d5f759f30112687ff621410c069308a')
67_APK_KEY_DEBUG = 'debug-key'
68_APK_KEY_RELEASE = 'release-key'
69_APK_KEY_UNKNOWN = 'unknown'
70_GMS_CORE_PACKAGE_NAME = 'com.google.android.gms'
71
72_ANDROID_SDK_MAPPING = {
73    23: "MNC (API 23)",
74    24: "NYC (API 24)",
75    25: "NYC_MR1 (API 25)",
76    26: "OC (API 26)",
77}
78
79# Bytes per Megabyte.
80_MB = 1024**2
81
82
83class RemoteProxy(object):
84  """Proxy class to run command line on the remote test device."""
85
86  def __init__(self, remote, dryrun):
87    self._remote = remote
88    self._dryrun = dryrun
89    self._sync_command = (
90        _RSYNC_COMMAND if self._has_rsync_on_remote_device() else _SCP_COMMAND)
91
92  def check_call(self, remote_command):
93    """Runs |remote_command| on the remote test device via ssh."""
94    command = self.get_ssh_commandline(remote_command)
95    lib.util.check_call(dryrun=self._dryrun, *command)
96
97  def check_output(self, remote_command):
98    """Runs |remote_command| on the remote test device via ssh, and returns
99       its output."""
100    command = self.get_ssh_commandline(remote_command)
101    return lib.util.check_output(dryrun=self._dryrun, *command)
102
103  def sync(self, file_list, dest_dir):
104    """Copies |file_list| to the |dest_dir| on the remote test device."""
105    target = 'root@%s:%s' % (self._remote, dest_dir)
106    command = self._sync_command + file_list + [target]
107    lib.util.check_call(dryrun=self._dryrun, *command)
108
109  def read(self, src_path):
110    """Gets the contents of |src_path| from the remote test device."""
111    return self.check_output('/bin/cat %s' % src_path)
112
113  def write(self, contents, dest_path):
114    """Writes |contents| into |dest_path| on the remote test device."""
115    with tempfile.NamedTemporaryFile(mode='w') as f:
116      f.write(contents)
117      self.push(f.name, dest_path)
118
119  def push(self, source_path, dest_path):
120    """Pushes |source_path| on the host, to |dest_path| on the remote test
121       device.
122
123    Args:
124        source_path: Host file path to be pushed.
125        dest_path: Path to the destination location on the remote test device.
126    """
127    target = 'root@%s:%s' % (self._remote, dest_path)
128    command = _SCP_COMMAND + [source_path, target]
129    lib.util.check_call(dryrun=self._dryrun, *command)
130
131  def pull(self, source_path, dest_path):
132    """Pulls |source_path| from the remote test device, to |dest_path| on the
133       host.
134
135    Args:
136        source_path: Remote test device file path to be pulled.
137        dest_path: Path to the destination location on the host.
138    """
139    target = 'root@%s:%s' % (self._remote, source_path)
140    command = _SCP_COMMAND + [target, dest_path]
141    return lib.util.check_call(dryrun=self._dryrun, *command)
142
143  def get_ssh_commandline(self, remote_command):
144    return ['ssh', 'root@' + self._remote, remote_command]
145
146  def _has_rsync_on_remote_device(self):
147    command = self.get_ssh_commandline('which rsync')
148    logging.debug('Calling: %s', lib.util.get_command_str(command))
149    # Always return true for --dryrun.
150    return self._dryrun or subprocess.call(command) == 0
151
152
153class TemporaryDirectory(object):
154  """A context object that has a temporary directory with the same lifetime."""
155
156  def __init__(self):
157    self.name = None
158
159  def __enter__(self):
160    self.name = tempfile.mkdtemp()
161    return self
162
163  def __exit__(self, exception_type, exception_value, traceback):
164    lib.util.check_call('rm', '-rf', self.name, sudo=True)
165
166
167class MountWrapper(object):
168  """A context object that mounts an image during the lifetime."""
169
170  def __init__(self, image_path, mountpoint):
171    self._image_path = image_path
172    self._mountpoint = mountpoint
173
174  def __enter__(self):
175    lib.util.check_call('/bin/mount', '-o', 'loop', self._image_path,
176                        self._mountpoint, sudo=True)
177    return self
178
179  def __exit__(self, exception_type, exception_value, traceback):
180    try:
181      lib.util.check_call('/bin/umount', self._mountpoint, sudo=True)
182    except Exception:
183      if not exception_type:
184        raise
185      # Instead of propagate the exception, record the one from exit body.
186      logging.exception('Failed to umount ' + self._mountpoint)
187
188
189class Simg2img(object):
190  """Wrapper class of simg2img"""
191
192  def __init__(self, simg2img_path, dryrun):
193    self._path = simg2img_path
194    self._dryrun = dryrun
195
196  def convert(self, src, dest):
197    """Converts the image to the raw image by simg2img command line.
198
199    If |dryrun| is set, does not execute the commandline.
200    """
201    lib.util.check_call(self._path, src, dest, dryrun=self._dryrun)
202
203
204def _verify_machine_arch(remote_proxy, target_product, dryrun):
205  """Verifies if the data being pushed is build for the target architecture.
206
207  Args:
208      remote_proxy: RemoteProxy instance for the remote test device.
209      target_product: Target product name of the image being pushed. This is
210          usually set by "lunch" command. E.g. "cheets_x86" or "cheets_arm".
211      dryrun: If set, this function assumes the machine architectures match.
212
213  Raises:
214      AssertionError: If the pushing image does not match to the remote test
215          device.
216  """
217  if dryrun:
218    logging.debug('Pretending machine architectures match')
219    return
220  remote_arch = remote_proxy.check_output('uname -m')
221  for arch_pattern, expected_set in _EXPECTED_TARGET_PRODUCTS.items():
222    if re.search(arch_pattern, remote_arch):
223      expected = itertools.chain.from_iterable(
224          (expected, 'aosp_' + expected, 'sdk_google_' + expected)
225          for expected in expected_set)
226      assert target_product in expected, (
227          ('Architecture mismatch: Deploying \'%s\' to \'%s\' seems incorrect.'
228           % (target_product, remote_arch)))
229      return
230  logging.warning('Unknown remote machine type \'%s\'. Skipping '
231                  'architecture sanity check.', remote_arch)
232
233
234def _convert_images(simg2img, out, product, push_vendor_image,
235                    shift_ugids, mksquashfs_path, unsquashfs_path,
236                    shift_uid_py_path, dryrun):
237  """Converts the images being pushed to the raw images.
238
239  Returns:
240      A tuple of (large_file_list, file_list). Each list consists of paths of
241      converted files.
242  """
243  result = []
244  result_large = []
245
246  system_raw_img = os.path.join(out, 'system.raw.img')
247  simg2img.convert(os.path.join(out, 'system.img'), system_raw_img)
248  file_contexts_path = None
249  if not dryrun:
250    with ContainerImageEditor(mksquashfs_path, unsquashfs_path, system_raw_img,
251                              '/', out) as e:
252      file_contexts_path = e.file_contexts_path
253      if 'x86' in product:
254        logging.debug('Creating \'system/lib/arm\' dir and houdini symlinks in '
255                      'the system image')
256        # Create system/lib/arm dir
257        dir_name = os.path.join(e.tmp_dir_name, 'system/lib/arm')
258        logging.debug('Creating directory: %s', dir_name)
259        lib.util.check_call('mkdir', '-p', dir_name, sudo=True)
260        # Create a symlink: system/bin/houdini --> /vendor/bin/houdini
261        lib.util.check_call('ln', '-sf', '/vendor/bin/houdini',
262                            os.path.join(e.tmp_dir_name, 'system/bin/houdini'),
263                            sudo=True)
264        # Create a symlink: system/lib/libhoudini.so --> /vendor/lib/libhoudini.so
265        lib.util.check_call('ln', '-sf', '/vendor/lib/libhoudini.so',
266                            os.path.join(e.tmp_dir_name,
267                                         'system/lib/libhoudini.so'), sudo=True)
268      # TODO(b/65117245): This needs to be part of the build.
269      if shift_ugids:
270        # Shift the UIDs/GIDs.
271        lib.util.check_call(shift_uid_py_path, e.tmp_dir_name, sudo=True)
272
273  result_large.append(system_raw_img)
274
275  if push_vendor_image:
276    vendor_raw_img = os.path.join(out, 'vendor.raw.img')
277    simg2img.convert(os.path.join(out, 'vendor.img'), vendor_raw_img)
278    # TODO(b/65117245): This needs to be part of the build.
279    if shift_ugids and not dryrun:
280      with ContainerImageEditor(mksquashfs_path, unsquashfs_path,
281                                vendor_raw_img, 'vendor', out,
282                                file_contexts_path=file_contexts_path) as e:
283        # Shift the UIDs/GIDs.
284        lib.util.check_call(shift_uid_py_path, e.tmp_dir_name,
285                            sudo=True)
286    result.append(vendor_raw_img)
287
288  return (result_large, result)
289
290
291def _update_build_fingerprint(remote_proxy, build_fingerprint):
292  """Updates CHROMEOS_ARC_VERSION in /etc/lsb-release.
293
294  Args:
295      remote_proxy: RemoteProxy instance connected to the test device.
296      build_fingerprint: The version code which should be embedded into
297          /etc/lsb-release.
298  """
299  if not build_fingerprint:
300    logging.warning(
301        'Skipping version update. ARC version will be reported incorrectly')
302    return
303
304  # Replace the ARC version on disk with what we're pushing there.
305  logging.info('Updating CHROMEOS_ARC_VERSION...')
306  remote_proxy.check_call(' '.join([
307      '/bin/sed', '-i',
308      # Note: we assume build_fingerprint does not contain any char which
309      # needs to be escaped.
310      r'"s/^\(CHROMEOS_ARC_VERSION=\).*/\1%(_BUILD_FINGERPRINT)s/"',
311      '/etc/lsb-release'
312  ]) % {'_BUILD_FINGERPRINT': build_fingerprint})
313
314
315def _get_remote_device_android_sdk_version(remote_proxy, dryrun):
316  """ Returns the Android SDK version on the remote device.
317
318  Args:
319      remote_proxy: RemoteProxy instance for the remote test device.
320      dryrun: If set, this function assumes Android SDK version is 1.
321  """
322  if dryrun:
323    logging.debug('Pretending target device\'s Android SDK version is 1')
324    return 1
325  try:
326    line = remote_proxy.check_output(
327        'grep ^%s /etc/lsb-release' % _CHROMEOS_ARC_ANDROID_SDK_VERSION).strip()
328  except subprocess.CalledProcessError:
329    logging.exception('Failed to inspect /etc/lsb-release remotely')
330    return None
331
332  if not line.startswith(_CHROMEOS_ARC_ANDROID_SDK_VERSION):
333    logging.warning('Failed to find the correct string format.\n'
334                    'Expected format: "%s"\nActual string: "%s"',
335                    _CHROMEOS_ARC_ANDROID_SDK_VERSION, line)
336    return None
337
338  android_sdk_version = int(
339      line[len(_CHROMEOS_ARC_ANDROID_SDK_VERSION):].strip())
340  logging.debug('Target device\'s Android SDK version: %d', android_sdk_version)
341  return android_sdk_version
342
343
344def _verify_android_sdk_version(remote_proxy, provider, dryrun):
345  """Verifies if the Android SDK versions of the pushing image and the test
346  device are the same.
347
348  Args:
349      remote_proxy: RemoteProxy instance for the remote test device.
350      provider: Android image provider.
351      dryrun: If set, this function assumes Android SDK versions match.
352
353  Raises:
354      AssertionError: If the Android SDK version of pushing image does not match
355          the Android SDK version on the remote test device.
356  """
357  if dryrun:
358    logging.debug('Pretending Android SDK versions match')
359    return
360  logging.debug('New image\'s Android SDK version: %d',
361                provider.get_build_version_sdk())
362
363  device_android_sdk_version = _get_remote_device_android_sdk_version(
364      remote_proxy, dryrun)
365
366  if device_android_sdk_version is None:
367    if not boolean_prompt('Unable to determine the target device\'s Android '
368                          'SDK version. Continue?', default=False):
369      sys.exit(1)
370  else:
371    assert device_android_sdk_version == provider.get_build_version_sdk(), (
372        'Android SDK versions do not match. The target device has {}, while '
373        'the new image is {}'.format(
374            _android_sdk_version_to_string(device_android_sdk_version),
375            _android_sdk_version_to_string(provider.get_build_version_sdk())))
376
377
378def _android_sdk_version_to_string(android_sdk_version):
379  """Converts the |android_sdk_version| to a human readable string
380
381  Args:
382    android_sdk_version: The Android SDK version number as a string
383  """
384  return _ANDROID_SDK_MAPPING.get(
385      android_sdk_version,
386      'Unknown SDK Version (API {})'.format(android_sdk_version))
387
388
389def _get_selinux_file_contexts_contents(out):
390  """Returns the final contents of the SELinux file_contexts file."""
391  contents = []
392  for filename in ('file_contexts', 'plat_file_contexts',
393                   'nonplat_file_contexts'):
394    path = os.path.join(out, 'root', filename)
395    # Some files are always expected to be missing due to not being present in
396    # the branch.
397    if not os.path.exists(path):
398      logging.debug('Skipping %s since it is missing', path)
399      continue
400    with open(path, 'r') as f:
401      contents.append(f.read())
402  return '\n'.join(contents)
403
404
405def _is_selinux_file_contexts_updated(remote_proxy, out, dryrun):
406  """Returns True if SELinux file_contexts is updated."""
407  if dryrun:
408    logging.debug('Pretending file_contexts is not updated in dryrun mode')
409    return False
410  remote_file_contexts_sha1, _ = remote_proxy.check_output(
411      'sha1sum /etc/selinux/arc/contexts/files/android_file_contexts').split()
412  file_contexts_contents = _get_selinux_file_contexts_contents(out)
413  # hashlib expects bytes, not str.
414  host_file_contexts_sha1 = hashlib.sha1(
415      file_contexts_contents.encode('utf-8')).hexdigest()
416  return remote_file_contexts_sha1 != host_file_contexts_sha1
417
418
419def _update_selinux_file_contexts(remote_proxy, out):
420  """Updates the selinux file_contexts file."""
421  android_file_contexts_contents = _get_selinux_file_contexts_contents(out)
422  remote_proxy.write(android_file_contexts_contents,
423                     '/etc/selinux/arc/contexts/files/android_file_contexts')
424  file_contexts_contents = []
425  for line in android_file_contexts_contents.split('\n'):
426    line = line.strip()
427    if not line or line.startswith('#'):
428      continue
429    file_contexts_contents.append(
430        '%s/rootfs/root%s' % (_ANDROID_ROOT, line))
431  remote_file_contexts_path = '/etc/selinux/arc/contexts/files/file_contexts'
432  remote_file_contexts_contents = remote_proxy.read(remote_file_contexts_path)
433  try:
434    # This string comes from
435    # private-overlays/project-cheets-private/chromeos-base/\
436    #    android-container-<VERSION>/files/chromeos_file_contexts
437    header_idx = remote_file_contexts_contents.index(
438        '# Chrome OS file contexts')
439  except ValueError:
440    # The header was missing. Will concat the whole file.
441    logging.warning('Could not find Chrome OS header in %s. '
442                    'Will use the whole file', remote_file_contexts_path)
443    header_idx = 0
444  file_contexts_contents.append(remote_file_contexts_contents[header_idx:])
445  remote_proxy.write('\n'.join(file_contexts_contents),
446                     remote_file_contexts_path)
447
448
449def _is_selinux_policy_updated(remote_proxy, out, dryrun):
450  """Returns True if SELinux policy is updated."""
451  if dryrun:
452    logging.debug('Pretending sepolicy is not updated in dryrun mode')
453    return False
454  remote_sepolicy_sha1, _ = remote_proxy.check_output(
455      'sha1sum /etc/selinux/arc/policy/policy.30').split()
456  with open(os.path.join(out, 'root', 'sepolicy'), 'rb') as f:
457    host_sepolicy_sha1 = hashlib.sha1(f.read()).hexdigest()
458  return remote_sepolicy_sha1 != host_sepolicy_sha1
459
460
461def _update_selinux_policy(remote_proxy, out):
462  """Updates the selinux policy file."""
463  remote_proxy.push(os.path.join(out, 'root', 'sepolicy'),
464                    '/etc/selinux/arc/policy/policy.30')
465
466
467def _remount_rootfs_as_writable(remote_proxy):
468  """Remounts root file system to make it writable."""
469  remote_proxy.check_call('mount -o remount,rw /')
470
471
472def _get_free_space(remote_proxy):
473  """Gets the number of free bytes in the root partition."""
474  return int(remote_proxy.check_output(
475      'echo $(( '
476      '    $(df --output=avail --local --block-size 1 / | tail -n1) + '
477      '    $(du --bytes /opt/google/containers/android/system.raw.img | '
478      '      awk \'{print $1}\') '
479      '))'))
480
481
482def boolean_prompt(prompt, default=True, true_value='yes', false_value='no',
483                   prolog=None):
484  """Helper function for processing boolean choice prompts.
485
486  Args:
487    prompt: The question to present to the user.
488    default: Boolean to return if the user just presses enter.
489    true_value: The text to display that represents a True returned.
490    false_value: The text to display that represents a False returned.
491    prolog: The text to display before prompt.
492
493  Returns:
494    True or False.
495  """
496  true_value, false_value = true_value.lower(), false_value.lower()
497  true_text, false_text = true_value, false_value
498  if true_value == false_value:
499    raise ValueError('true_value and false_value must differ: got %r'
500                     % true_value)
501
502  if default:
503    true_text = true_text[0].upper() + true_text[1:]
504  else:
505    false_text = false_text[0].upper() + false_text[1:]
506
507  prompt = ('\n%s (%s/%s)? ' % (prompt, true_text, false_text))
508
509  if prolog:
510    prompt = ('\n%s\n%s' % (prolog, prompt))
511
512  while True:
513    try:
514      response = input(prompt).lower()
515    except EOFError:
516      # If the user hits CTRL+D, or stdin is disabled, use the default.
517      print(file=sys.stderr)
518      response = None
519    except KeyboardInterrupt:
520      # If the user hits CTRL+C, just exit the process.
521      print(file=sys.stderr)
522      print('CTRL+C detected; exiting', file=sys.stderr)
523      raise
524
525    if not response:
526      return default
527    if true_value.startswith(response):
528      if not false_value.startswith(response):
529        return True
530      # common prefix between the two...
531    elif false_value.startswith(response):
532      return False
533
534
535def _disable_rootfs_verification(force, remote_proxy):
536  make_dev_ssd_path = \
537      '/usr/libexec/debugd/helpers/dev_features_rootfs_verification'
538  make_dev_ssd_command = remote_proxy.get_ssh_commandline(make_dev_ssd_path)
539  logging.info('Detected that the device has rootfs verification enabled.')
540  logging.info('This script can remove the rootfs verification using `%s`, '
541               'which requires that the device is rebooted afterwards.',
542               lib.util.get_command_str(make_dev_ssd_command))
543  if not force:
544    logging.info('Automatically remove rootfs verification and skip this '
545                 'prompt by specifying --force.')
546    if not boolean_prompt('Remove rootfs verification?', default=False):
547      return False
548  remote_proxy.check_call(make_dev_ssd_path)
549  reboot_time = time.time()
550  remote_proxy.check_call('reboot')
551  logging.debug('Waiting up to 10 seconds for the machine to reboot')
552  for _ in range(10):
553    time.sleep(1)
554    try:
555      device_boot_time = remote_proxy.check_output('grep btime /proc/stat | ' +
556                                                   'cut -d" " -f2')
557      if int(device_boot_time) >= reboot_time:
558        return True
559    except subprocess.CalledProcessError:
560      pass
561  logging.error('Failed to detect whether the device had successfully rebooted')
562  return False
563
564
565def _stop_ui(remote_proxy):
566  remote_proxy.check_call('\n'.join([
567      # Unmount the container root/vendor and root if necessary. This also stops
568      # UI.
569      'stop arc-system-mount || true',
570  ]))
571
572
573class ImageUpdateMode(object):
574  """Context object to manage remote host writable status."""
575
576  def __init__(self, remote_proxy, is_selinux_policy_updated, push_to_stateful,
577               clobber_data, force):
578    self._remote_proxy = remote_proxy
579    self._is_selinux_policy_updated = is_selinux_policy_updated
580    self._push_to_stateful = push_to_stateful
581    self._clobber_data = clobber_data
582    self._force = force
583
584  def __enter__(self):
585    logging.info('Setting up ChromeOS device to image-writable...')
586
587    if self._clobber_data:
588      self._remote_proxy.check_call(
589          'if [ -e %(ANDROID_ROOT_WILDCARD)s/root/data ]; then'
590          '  kill -9 `cat %(ANDROID_ROOT_WILDCARD)s/container.pid`;'
591          '  find %(ANDROID_ROOT_WILDCARD)s/root/data'
592          '       %(ANDROID_ROOT_WILDCARD)s/root/cache -mindepth 1 -delete;'
593          'fi' % {'ANDROID_ROOT_WILDCARD': _CONTAINER_INSTANCE_ROOT_WILDCARD})
594
595    _stop_ui(self._remote_proxy)
596    try:
597      _remount_rootfs_as_writable(self._remote_proxy)
598    except subprocess.CalledProcessError:
599      if not _disable_rootfs_verification(self._force, self._remote_proxy):
600        raise
601      _stop_ui(self._remote_proxy)
602      # Try to remount rootfs as writable. Bail out if it fails this time.
603      _remount_rootfs_as_writable(self._remote_proxy)
604    self._remote_proxy.check_call('\n'.join([
605        # Delete the image file if it is a symlink.
606        'if [ -L %(_ANDROID_ROOT)s/system.raw.img ]; then'
607        '  rm %(_ANDROID_ROOT)s/system.raw.img;'
608        'fi',
609    ]) % {'_ANDROID_ROOT': _ANDROID_ROOT})
610    if self._push_to_stateful:
611      self._remote_proxy.check_call('\n'.join([
612          # Create the destination directory in the stateful partition.
613          'mkdir -p %(_ANDROID_ROOT_STATEFUL)s',
614      ]) % {'_ANDROID_ROOT_STATEFUL': _ANDROID_ROOT_STATEFUL})
615
616  def __exit__(self, exc_type, exc_value, traceback):
617    if self._push_to_stateful:
618      # Push the image to _ANDROID_ROOT_STATEFUL instead of _ANDROID_ROOT.
619      # Create a symlink so that arc-system-mount can handle it.
620      self._remote_proxy.check_call('\n'.join([
621          'ln -sf %(_ANDROID_ROOT_STATEFUL)s/system.raw.img '
622          '  %(_ANDROID_ROOT)s/system.raw.img',
623      ]) % {'_ANDROID_ROOT': _ANDROID_ROOT,
624            '_ANDROID_ROOT_STATEFUL': _ANDROID_ROOT_STATEFUL})
625
626    if self._is_selinux_policy_updated:
627      logging.info('*** SELinux policy updated. ***')
628    else:
629      logging.info('*** SELinux policy is not updated. Restarting ui. ***')
630      try:
631        self._remote_proxy.check_call('\n'.join([
632            # Make the whole invocation fail if any individual command does.
633            'set -e',
634
635            # Remount the root file system to readonly.
636            'mount -o remount,ro /',
637
638            # Restart UI.
639            'start ui',
640
641            # Mount the updated {system,vendor}.raw.img. This will also trigger
642            # android-ureadahead once it's done and should remove the packfile.
643            'start arc-system-mount',
644        ]))
645        return
646      except Exception:
647        # The above commands are just an optimization to avoid having to reboot
648        # every single time an image is pushed, which saves 6-10s. If any of
649        # them fail, the only safe thing to do is reboot the device.
650        logging.exception('Failed to cleanly restart ui, fall back to reboot')
651
652    logging.info('*** Reboot required. ***')
653    try:
654      self._remote_proxy.check_call('reboot')
655    except Exception:
656      if exc_type is None:
657        raise
658      # If the body block of a with statement also raises an error, here we
659      # just log the exception, so that the main exception will be propagated to
660      # the caller properly.
661      logging.exception('Failed to reboot the device')
662
663
664class PreserveTimestamps(object):
665  """Context object to modify a file but preserve the original timestamp."""
666
667  def __init__(self, path):
668    self.path = path
669    self._original_timestamp = None
670
671  def __enter__(self):
672    # Save the original timestamp
673    self._original_timestamp = os.stat(self.path)
674    return self
675
676  def __exit__(self, exception_type, exception_value, traceback):
677    # Apply the original timestamp
678    os.utime(self.path, (self._original_timestamp.st_atime,
679                         self._original_timestamp.st_mtime))
680
681
682def _download_artifact(out_dir, build_id, product, build_variant):
683  mapping = dict(build_id=build_id, product=product)
684  uploaded_filename = _BUILD_FILENAME.substitute(mapping)
685  filename = os.path.join(out_dir, uploaded_filename)
686  fetcher = lib.build_artifact_fetcher.BuildArtifactFetcher(
687      lib.util.get_product_arch(product), build_variant, build_id)
688  fetcher.fetch(uploaded_filename, filename)
689  return filename
690
691
692def _extract_files(unsquashfs_path, out_dir, system_raw_img, paths):
693  with TemporaryDirectory() as tmp_dir:
694    if lib.util.get_image_type(system_raw_img) is 'squashfs':
695      lib.util.check_call(
696          unsquashfs_path, '-d', tmp_dir.name, '-no-progress', '-f',
697          system_raw_img, '-no-xattrs', *[path[0] for path in paths], sudo=True)
698      _extract_files_helper(tmp_dir.name, out_dir, paths)
699    else:
700      with MountWrapper(system_raw_img, tmp_dir.name):
701        _extract_files_helper(tmp_dir.name, out_dir, paths)
702
703
704def _extract_files_helper(source_root, destination_root, paths):
705  for source, destination in paths:
706    source = os.path.join(source_root, source)
707    # Some files are always expected to be missing due to not being
708    # present in the branch.
709    if not os.path.exists(source):
710      logging.debug('Skipping %s since it is missing', source)
711      continue
712    destination = os.path.join(destination_root, destination)
713    if not os.path.exists(os.path.dirname(destination)):
714      os.makedirs(os.path.dirname(destination))
715    # 'sudo' is needed due to b/65117245. Android P shifts ugids of extracted
716    # files.
717    lib.util.check_call('cp', source, destination, sudo=True)
718    lib.util.check_call('chown', getpass.getuser(), destination, sudo=True)
719
720
721def _extract_artifact(simg2img, unsquashfs_path, out_dir, filename):
722  with zipfile.ZipFile(filename, 'r') as z:
723    z.extract('system.img', out_dir)
724    z.extract('vendor.img', out_dir)
725  # Note that the same simg2img conversion is performed again for system.img
726  # later, but the extra run is acceptable (<2s).  If this is important, we
727  # could try to change the program flow.
728  simg2img.convert(os.path.join(out_dir, 'system.img'),
729                   os.path.join(out_dir, 'system.raw.img'))
730  _extract_files(unsquashfs_path, out_dir,
731                 os.path.join(out_dir, 'system.raw.img'),
732                 [('sepolicy', 'root/sepolicy'),
733                  ('file_contexts', 'root/file_contexts'),
734                  ('plat_file_contexts', 'root/plat_file_contexts'),
735                  ('nonplat_file_contexts', 'root/nonplat_file_contexts'),
736                  ('system/build.prop', 'build.prop')])
737
738
739def _make_tempdir_deleted_on_exit():
740  d = tempfile.mkdtemp()
741  atexit.register(shutil.rmtree, d, ignore_errors=True)
742  return d
743
744
745def _detect_cert_inconsistency(force, remote_proxy, new_variant, dryrun):
746  """Prompt to ask for deleting data based on detected situation (best effort).
747
748  Detection is only accurate for active session, so it won't fix other profiles.
749
750  As GMS apps are signed with different key between user and non-user build,
751  the container won't run correctly if old key has been registered in /data.
752  """
753  if dryrun:
754    return False
755
756  # Get current build variant on device.
757  cmd = 'grep %s %s' % (_RO_BUILD_TYPE,
758                        os.path.join(_CONTAINER_ROOT, 'system/build.prop'))
759  try:
760    line = remote_proxy.check_output(cmd).strip()
761  except subprocess.CalledProcessError:
762    # Catch any error to avoid blocking the push.
763    logging.exception('Failed to inspect build property remotely')
764    return False
765  device_variant = line[len(_RO_BUILD_TYPE):]
766
767  device_apk_key = _APK_KEY_UNKNOWN
768  try:
769    device_apk_key = _get_remote_device_apk_key(remote_proxy)
770  except Exception as e:
771    logging.warning('There was an error getting the remote device APK '
772                    'key signature %s. Assuming APK key signature is '
773                    '\'unknown\'', e)
774
775  logging.debug('device apk key: %s; build variant: %s -> %s', device_apk_key,
776                device_variant, new_variant)
777
778  # GMS signature in /data is inconsistent with the new build.
779  is_inconsistent = (
780      (device_apk_key == _APK_KEY_RELEASE and new_variant != 'user') or
781      (device_apk_key == _APK_KEY_DEBUG and new_variant == 'user'))
782
783  if is_inconsistent:
784    new_apk_key = _APK_KEY_RELEASE if new_variant == 'user' else _APK_KEY_DEBUG
785    logging.info('Detected apk signature change (%s -> %s[%s]) on current user.'
786                 % (device_apk_key, new_apk_key, new_variant))
787    if force:
788        logging.info('Deleting /data and /cache.')
789        return True
790    logging.info('Automatically delete and skip this prompt by specifying '
791                 '--force.')
792    return boolean_prompt('Delete /data and /cache?', default=True)
793
794  # Switching from/to user build.
795  if (device_variant == 'user') != (new_variant == 'user'):
796    logging.warn('\n\n** You are switching build variant (%s -> %s).  If you '
797                 'have ever run with the old image, make sure to wipe out '
798                 '/data first before starting the container. **\n',
799                 device_variant, new_variant)
800  return False
801
802
803def _get_remote_device_apk_key(remote_proxy):
804  """Retrieves the APK key signature of the remote test device.
805
806    Args:
807        remote_proxy: RemoteProxy instance for the remote test device.
808  """
809  remote_packages_xml = os.path.join(_CONTAINER_INSTANCE_ROOT_WILDCARD,
810                                     'root/data/system/packages.xml')
811  with TemporaryDirectory() as tmp_dir:
812    host_packages_xml = os.path.join(tmp_dir.name, 'packages.xml')
813    remote_proxy.pull(remote_packages_xml, host_packages_xml)
814    return _get_apk_key_from_xml(host_packages_xml)
815
816
817def _get_apk_key_from_xml(xml_file):
818  """Parses |xml_file| to determine the APK key signature.
819
820    Args:
821        xml_file: The XML file to parse.
822  """
823  if not os.path.exists(xml_file):
824    logging.warning('XML file doesn\'t exist: %s' % xml_file)
825    return _APK_KEY_UNKNOWN
826
827  root = ElementTree.parse(xml_file).getroot()
828  gms_core_elements = root.findall('package[@name=\'%s\']'
829                                   % _GMS_CORE_PACKAGE_NAME)
830  assert len(gms_core_elements) == 1, ('Invalid number of GmsCore package '
831                                       'elements. Expected: 1 Actual: %d'
832                                       % len(gms_core_elements))
833  gms_core_element = gms_core_elements[0]
834  sigs_element = gms_core_element.find('sigs')
835  assert sigs_element, ('Unable to find the |sigs| tag under the GmsCore '
836                        'package tag.')
837  sigs_count_attribute = int(sigs_element.get('count'))
838  assert sigs_count_attribute == 1, ('Invalid signature count. Expected: 1 '
839                                     'Actual: %d' % sigs_count_attribute)
840  cert_element = sigs_element.find('cert')
841  gms_core_cert_index = int(cert_element.get('index', -1))
842  logging.debug('GmsCore cert index: %d' % gms_core_cert_index)
843  if gms_core_cert_index == -1:
844    logging.warning('Invalid cert index (%d)' % gms_core_cert_index)
845    return _APK_KEY_UNKNOWN
846
847  cert_key = cert_element.get('key')
848  if cert_key:
849    return _get_android_key_type_from_cert_key(cert_key)
850
851  # The GmsCore package element for |cert| contains the cert index, but not the
852  # cert key. Find its the matching cert key.
853  for cert_element in root.findall('package/sigs/cert'):
854    cert_index = int(cert_element.get('index'))
855    cert_key = cert_element.get('key')
856    if cert_key and cert_index == gms_core_cert_index:
857      return _get_android_key_type_from_cert_key(cert_key)
858  logging.warning('Unable to find a cert key matching index %d' % cert_index)
859  return _APK_KEY_UNKNOWN
860
861
862def _get_android_key_type_from_cert_key(cert_key):
863  """Returns |_APK_KEY_RELEASE| if |cert_key| contains the Android release key
864     signature substring, otherwise it returns |_APK_KEY_DEBUG|."""
865  if _ANDROID_REL_KEY_SIGNATURE_SUBSTRING in cert_key:
866    return _APK_KEY_RELEASE
867  else:
868    return _APK_KEY_DEBUG
869
870
871def _find_build_property(line, build_property_name):
872  """Returns the value that matches |build_property_name| in |line|."""
873  if line.startswith(build_property_name):
874    return line[len(build_property_name):].strip()
875  return None
876
877
878class ContainerImageEditor(object):
879  """A context object that allows edits to the Android container image"""
880
881  def __init__(self, mksquashfs_path, unsquashfs_path, image_path, mount_point,
882               out_dir, file_contexts_path=None):
883    self._mksquashfs_path = mksquashfs_path
884    self._unsquashfs_path = unsquashfs_path
885    self._image_path = image_path
886    self._mount_point = mount_point
887    self._out_dir = out_dir
888    self.file_contexts_path = file_contexts_path
889    # Since we shift UIDs/GIDs of all extracted files there, we are going to
890    # need sudo permission to remove the temporary directory. shutil doesn't
891    # have the ability to run as sudo, so not using TemporaryDirectory() here.
892    self.tmp_dir_name = tempfile.mkdtemp()
893
894  def __enter__(self):
895    self._is_squashfs = lib.util.get_image_type(self._image_path) is 'squashfs'
896    logging.debug('Is the file system squashfs? %s', self._is_squashfs)
897
898    if self._is_squashfs:
899      # Extract the files.
900      lib.util.check_call(self._unsquashfs_path, '-force', '-d',
901                          self.tmp_dir_name, self._image_path, sudo=True)
902
903      self._update_file_context_path()
904      if not os.path.exists(self.file_contexts_path):
905        raise EnvironmentError('"%s" not found.' % self.file_contexts_path)
906    else:
907      self._mount_wrapper = MountWrapper(self._image_path, self.tmp_dir_name)
908      self._mount_wrapper.__enter__()
909      self._update_file_context_path()
910
911    return self
912
913  def __exit__(self, exception_type, exception_value, traceback):
914    try:
915      if self._is_squashfs:
916        # Re-compress the files back to raw.img.
917        lib.util.check_call(
918            self._mksquashfs_path, self.tmp_dir_name, self._image_path,
919            '-no-progress', '-comp', 'gzip', '-no-exports', '-noappend',
920            '-mount-point', self._mount_point, '-product-out', self._out_dir,
921            '-context-file', self.file_contexts_path, '-no-recovery',
922            '-no-fragments', '-no-duplicates', '-b', '131072', '-t', '0',
923            sudo=True)
924      else:
925        self._mount_wrapper.__exit__(exception_type, exception_value, traceback)
926    finally:
927        lib.util.check_call('rm', '-rf', self.tmp_dir_name, sudo=True)
928
929  def _update_file_context_path(self):
930    if self.file_contexts_path:
931      logging.debug('file_contexts_path is already set')
932      return
933    self.file_contexts_path = os.path.join(_make_tempdir_deleted_on_exit(),
934                                           'file_contexts')
935    file_contexts_path = os.path.join(self.tmp_dir_name, 'file_contexts')
936    if os.path.exists(file_contexts_path):
937      logging.debug('Found file_contexts in image')
938      lib.util.check_call('cp', file_contexts_path, self.file_contexts_path)
939      return
940    plat_file_contexts_path = os.path.join(self.tmp_dir_name,
941                                           'plat_file_contexts')
942    nonplat_file_contexts_path = os.path.join(self.tmp_dir_name,
943                                              'nonplat_file_contexts')
944    if (os.path.exists(plat_file_contexts_path) and
945       os.path.exists(nonplat_file_contexts_path)):
946      logging.debug('Combining \'plat_file_contexts\' and '
947                    '\'nonplat_file_contexts\' files')
948      with TemporaryDirectory() as tmp_dir:
949        file_contexts_path = os.path.join(tmp_dir.name, 'file_contexts')
950        with open(file_contexts_path, 'w') as outfile:
951          if os.path.exists(plat_file_contexts_path):
952            logging.debug('Writing plat_file_contexts to %s',
953                          file_contexts_path)
954          with open(plat_file_contexts_path) as infile:
955            for line in infile:
956              outfile.write(line)
957          if os.path.exists(nonplat_file_contexts_path):
958            logging.debug('Writing nonplat_file_contexts to %s',
959                          file_contexts_path)
960            with open(nonplat_file_contexts_path) as infile:
961              for line in infile:
962                outfile.write(line)
963        if not os.path.exists(file_contexts_path):
964          raise EnvironmentError('%s not found.' % file_contexts_path)
965        lib.util.check_call('cp', file_contexts_path, self.file_contexts_path)
966      return
967    raise EnvironmentError('Unable to find file_contexts or '
968                           '[non]plat_file_contexts in the image')
969
970
971class BaseProvider(object):
972  """Base class of image provider.
973
974  Subclass should provide a directory with images in it.
975  """
976
977  def __init__(self):
978    self._build_variant = None
979    self._build_version_sdk = None
980
981  def prepare(self):
982    """Subclass should prepare image in its implementation.
983
984    Subclass must return the (image directory, product, fingerprint) tuple.
985    Product is a string like "cheets_arm".  Fingerprint is the string that
986    will be updated to CHROMEOS_ARC_VERSION in /etc/lsb-release.
987    """
988    raise NotImplementedError()
989
990  def get_build_variant(self):
991    """ Returns the extracted build variant."""
992    return self._build_variant
993
994  def get_build_version_sdk(self):
995    """ Returns the extracted Android SDK version."""
996    return self._build_version_sdk
997
998  def read_build_prop_file(self, build_prop_file, remove_file=True):
999    """ Reads the specified build property file, and extracts the
1000    "ro.build.variant" and "ro.build.version.sdk" fields. This method optionally
1001    deletes |build_prop_file| when done
1002
1003    Args:
1004        build_prop_file: The fully qualified path to the build.prop file.
1005        remove_file: Removes the |build_prop_file| when done. (default=True)
1006    """
1007    logging.debug('Reading build prop file: %s', build_prop_file)
1008    with open(build_prop_file, 'r') as f:
1009      for line in f:
1010        if self._build_version_sdk is None:
1011          value = _find_build_property(line, _RO_BUILD_VERSION_SDK)
1012          if value is not None:
1013            self._build_version_sdk = int(value)
1014        if self._build_variant is None:
1015          value = _find_build_property(line, _RO_BUILD_TYPE)
1016          if value is not None:
1017            self._build_variant = value
1018        if self._build_variant and self._build_version_sdk:
1019          break
1020    if remove_file:
1021      logging.info('Deleting prop file: %s...', build_prop_file)
1022      os.remove(build_prop_file)
1023
1024
1025class PrebuiltProvider(BaseProvider):
1026  """A provider to provides prebuilt image from Android builder."""
1027
1028  def __init__(self, product, build_variant, build_id, simg2img,
1029               unsquashfs_path):
1030    super(PrebuiltProvider, self).__init__()
1031    self._product = product
1032    self._build_variant = build_variant
1033    self._build_id = build_id
1034    self._simg2img = simg2img
1035    self._unsquashfs_path = unsquashfs_path
1036
1037  def prepare(self):
1038    fingerprint = '_'.join([self._product, self._build_variant, self._build_id])
1039
1040    out_dir = _make_tempdir_deleted_on_exit()
1041    filename = _download_artifact(out_dir, self._build_id, self._product,
1042                                  self._build_variant)
1043    _extract_artifact(self._simg2img, self._unsquashfs_path, out_dir, filename)
1044
1045    build_prop_file = os.path.join(out_dir, 'build.prop')
1046    self.read_build_prop_file(build_prop_file)
1047    return out_dir, self._product, fingerprint
1048
1049
1050class LocalPrebuiltProvider(BaseProvider):
1051  """A provider that provides prebuilt image from a local file."""
1052
1053  def __init__(self, prebuilt_file, simg2img, unsquashfs_path):
1054    super(LocalPrebuiltProvider, self).__init__()
1055    self._prebuilt_file = prebuilt_file
1056    self._simg2img = simg2img
1057    self._unsquashfs_path = unsquashfs_path
1058
1059  def prepare(self):
1060    out_dir = _make_tempdir_deleted_on_exit()
1061    logging.debug('Extracting artifacts')
1062    _extract_artifact(self._simg2img, self._unsquashfs_path, out_dir,
1063                      self._prebuilt_file)
1064
1065    build_prop_file = os.path.join(out_dir, 'build.prop')
1066    self.read_build_prop_file(build_prop_file)
1067    if self._build_variant is None:
1068      self._build_variant = 'user'  # default to non-eng
1069
1070    m = re.match(r'((?:bertha|cheets)_\w+)-img-P?\d+\.zip',
1071                 os.path.basename(self._prebuilt_file))
1072    if not m:
1073      sys.exit('Unrecognized file name of prebuilt image archive.')
1074    product = m.group(1)
1075
1076    fingerprint = os.path.splitext(os.path.basename(self._prebuilt_file))[0]
1077    return out_dir, product, fingerprint
1078
1079
1080class LocalBuildProvider(BaseProvider):
1081  """A provider that provides local built image."""
1082
1083  def __init__(self, build_fingerprint, skip_build_prop_update,
1084               added_build_properties, dryrun):
1085    super(LocalBuildProvider, self).__init__()
1086    self._build_fingerprint = build_fingerprint
1087    self._skip_build_prop_update = skip_build_prop_update
1088    self._added_build_properties = added_build_properties
1089    self._dryrun = dryrun
1090    expected_env = ('TARGET_BUILD_VARIANT', 'TARGET_PRODUCT', 'OUT')
1091    if not all(var in os.environ for var in expected_env):
1092      sys.exit('Did you run lunch?')
1093    self._build_variant = os.environ.get('TARGET_BUILD_VARIANT')
1094    self._target_product = os.environ.get('TARGET_PRODUCT')
1095    self._out_dir = os.environ.get('OUT')
1096
1097    # Create the generic device name by extracting the architecture type
1098    # from the target product.
1099    generic_device = _GENERIC_DEVICE % dict(
1100        arch=lib.util.get_product_arch(self._target_product),
1101        product=lib.util.get_product_name(self._target_product))
1102    new_prop_value = '%s%s' % (_RO_PRODUCT_DEVICE, generic_device)
1103    if new_prop_value not in self._added_build_properties:
1104      self._added_build_properties.append(new_prop_value)
1105
1106  def prepare(self):
1107    # Use build fingerprint if set. Otherwise, read it from the text file.
1108    build_fingerprint = self._build_fingerprint
1109    if not build_fingerprint:
1110      fingerprint_filepath = os.path.join(self._out_dir,
1111                                          'build_fingerprint.txt')
1112      if os.path.isfile(fingerprint_filepath):
1113        with open(fingerprint_filepath) as f:
1114          build_fingerprint = f.read().strip().replace('/', '_')
1115
1116    # Find the absolute path of build.prop.
1117    build_prop_file = os.path.join(self._out_dir, 'system/build.prop')
1118    if not self._skip_build_prop_update:
1119      self._update_local_build_prop_file(build_prop_file)
1120    self.read_build_prop_file(build_prop_file, False)
1121    return self._out_dir, self._target_product, build_fingerprint
1122
1123  def _update_local_build_prop_file(self, build_prop_file):
1124    """Updates build.prop of the local prebuilt image."""
1125
1126    if not build_prop_file:
1127      logging.warning('Skipping. build_prop_file was not specified.')
1128      return
1129
1130    file_updated = False
1131    for prop in self._added_build_properties:
1132      key_and_value = prop.split('=')
1133      # Check whether the property is already in build.prop.
1134      try:
1135        current_prop_value = lib.util.check_output('grep',
1136                                                   '%s=' % (key_and_value[0]),
1137                                                   build_prop_file).strip()
1138      except subprocess.CalledProcessError:
1139        # grep did not find a match
1140        current_prop_value = None
1141
1142      if not current_prop_value:
1143        file_updated = True
1144        logging.info('Appending "%s" to build.prop...', prop)
1145
1146        with PreserveTimestamps(build_prop_file) as f:
1147          lib.util.check_call(
1148              '/bin/sed', '-i',
1149              r'$a%s' % prop,
1150              f.path)
1151      elif prop != current_prop_value:
1152        file_updated = True
1153        logging.info('Setting "%s" to "%s" in build.prop...',
1154                     key_and_value[0], key_and_value[1])
1155        with PreserveTimestamps(build_prop_file) as f:
1156          # Make the changes to build.prop
1157          lib.util.check_call(
1158              '/bin/sed', '-i',
1159              r's/^\(%(_KEY)s=\).*/\1%(_VALUE)s/'
1160              % {'_KEY': key_and_value[0], '_VALUE': key_and_value[1]},
1161              f.path)
1162
1163    if not file_updated:
1164      logging.info('build.prop does not need to be updated.')
1165      return
1166
1167    logging.info('Recreating the system image with the updated build.prop ' +
1168                 'file...')
1169    system_dir = os.path.join(self._out_dir, 'system')
1170    system_image_info_file = os.path.join(
1171        self._out_dir,
1172        'obj/PACKAGING/systemimage_intermediates/system_image_info.txt')
1173    system_image_file = os.path.join(self._out_dir, 'system.img')
1174    if self._dryrun:
1175      return
1176    with PreserveTimestamps(system_image_file) as f:
1177      # Recreate system.img
1178      lib.util.check_call(
1179          './build/tools/releasetools/build_image.py',
1180          system_dir,
1181          system_image_info_file,
1182          f.path,
1183          system_dir)
1184
1185
1186class NullProvider(BaseProvider):
1187  """ Provider used for dry runs """
1188
1189  def __init__(self):
1190    super(NullProvider, self).__init__()
1191    self._build_variant = 'user'
1192    self._build_version_sdk = 1
1193
1194  def prepare(self):
1195    return ('<dir>', '<product>', '<fingerprint>')
1196
1197
1198def _parse_prebuilt(param):
1199  m = re.search(
1200      r'^((?:bertha|cheets)_(?:arm|x86|x86_64))/(user|userdebug|eng)/(P?\d+)$',
1201      param)
1202  if not m:
1203    sys.exit('Invalid format of --use-prebuilt')
1204  return m.group(1), m.group(2), m.group(3)
1205
1206
1207def _default_mksquashfs_path():
1208  # Automatically resolve mksquashfs path if possible.
1209  android_host_out_path = os.environ.get('ANDROID_HOST_OUT')
1210  if android_host_out_path:
1211    path = os.path.join(android_host_out_path, 'bin', 'mksquashfs')
1212    if os.path.isfile(path):
1213      return path
1214  path = os.path.join(_SCRIPT_DIR, 'mksquashfs')
1215  if os.path.isfile(path):
1216    return path
1217  return None
1218
1219
1220def _default_unsquashfs_path():
1221  # Automatically resolve unsquashfs path if possible.
1222  _UNSQUASHFS_PATH = '/usr/bin/unsquashfs'
1223  if os.path.exists(_UNSQUASHFS_PATH):
1224    return _UNSQUASHFS_PATH
1225  return None
1226
1227
1228def _default_shift_uid_py_path():
1229  # Automatically resolve shift_uid.py path if possible.
1230  path = os.path.join(_SCRIPT_DIR, 'shift_uid.py')
1231  if os.path.isfile(path):
1232    return path
1233  _SHIFT_UID_PATH = os.path.join(
1234      lib.util.find_repo_root(),
1235      'vendor/google_devices/bertha/scripts/shift_uid.py')
1236  if os.path.exists(_SHIFT_UID_PATH):
1237    return _SHIFT_UID_PATH
1238  return None
1239
1240
1241def _default_simg2img_path():
1242  # Automatically resolve simg2img path if possible.
1243  dirs_to_find = []
1244  if 'ANDROID_HOST_OUT' in os.environ:
1245    dirs_to_find.append(os.path.join(os.environ.get('ANDROID_HOST_OUT'), 'bin'))
1246  dirs_to_find.append(_SCRIPT_DIR)
1247  if 'PATH' in os.environ:
1248    dirs_to_find += os.environ['PATH'].split(os.pathsep)
1249
1250  for dir in dirs_to_find:
1251    path = os.path.join(dir, 'simg2img')
1252    if os.path.isfile(path) and os.access(path, os.X_OK):
1253      return path
1254  return None
1255
1256
1257def _resolve_args(args):
1258  if not args.simg2img_path:
1259    sys.exit('Cannot determine the path of simg2img')
1260  if not args.mksquashfs_path:
1261    sys.exit('Cannot determine the path of mksquashfs')
1262  if not args.unsquashfs_path:
1263    sys.exit('Cannot determine the path of unsquashfs. You may need to '
1264             'install it with sudo apt install squashfs-tools.')
1265
1266
1267def _parse_args():
1268  """Parses the arguments."""
1269  parser = argparse.ArgumentParser(
1270      formatter_class=argparse.RawDescriptionHelpFormatter,
1271      description='Push image to Chromebook',
1272      epilog="""Examples:
1273
1274To push from local build
1275$ %(prog)s <remote>
1276
1277To push from Android build prebuilt
1278$ %(prog)s --use-prebuilt cheets_arm/eng/123456 <remote>
1279
1280To push from local prebuilt
1281$ %(prog)s --use-prebuilt-file path/to/cheets_arm-img-123456.zip <remote>
1282""")
1283  parser.add_argument(
1284      '--push-vendor-image', action='store_true', help='Push vendor image')
1285  parser.add_argument(
1286      '--use-prebuilt', metavar='PRODUCT/BUILD_VARIANT/BUILD_ID',
1287      type=_parse_prebuilt,
1288      help='Push prebuilt image instead.  Example value: cheets_arm/eng/123456')
1289  parser.add_argument(
1290      '--use-prebuilt-file', dest='prebuilt_file', metavar='<path>',
1291      help='The downloaded image path')
1292  parser.add_argument(
1293      '--build-fingerprint', default=os.environ.get('BUILD_FINGERPRINT'),
1294      help='If set, embed this fingerprint data to the /etc/lsb-release '
1295      'as CHROMEOS_ARC_VERSION value.')
1296  parser.add_argument(
1297      '--dryrun', action='store_true',
1298      help='Do not execute subprocesses.')
1299  parser.add_argument(
1300      '--loglevel', default='INFO',
1301      choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
1302      help='Logging level.')
1303  parser.add_argument(
1304      '--simg2img-path', default=_default_simg2img_path(),
1305      help='Executable path of simg2img')
1306  parser.add_argument(
1307      '--mksquashfs-path', default=_default_mksquashfs_path(),
1308      help='Executable path of mksquashfs')
1309  parser.add_argument(
1310      '--unsquashfs-path', default=_default_unsquashfs_path(),
1311      help='Executable path of unsquashfs')
1312  parser.add_argument(
1313      '--shift-uid-py-path', default=_default_shift_uid_py_path(),
1314      help='Executable path of shift_uid.py')
1315  parser.add_argument(
1316      '--shift-ugids', action='store_true', help='Shift UIDs/GIDs recursively')
1317  parser.add_argument(
1318      '--force', action='store_true',
1319      help=('Skip all prompts (i.e., for disabling of rootfs verification).  '
1320            'This may result in the target machine being rebooted'))
1321  parser.add_argument(
1322      '--try-clobber-data', action='store_true',
1323      help='If currently logged in, also clobber /data and /cache')
1324  parser.add_argument(
1325      '--skip_build_prop_update', action='store_true',
1326      help=('Do not change ro.product.device to  "generic_cheets" for local '
1327            'builds'))
1328  parser.add_argument(
1329      '--push-to-stateful-partition', action='store_true',
1330      help=('Place the system.raw.img on the stateful partition instead of /. '
1331            'This is always used for builds that do not fit on /.'))
1332  parser.add_argument(
1333      '--add-build-property', action='append', default=[],
1334      dest='added_build_properties',
1335      help=('Update build.prop with the given property e.g. some.property=true,'
1336            'If the given property exists, it would update the property.'
1337            'Otherwise it would append the given property to the end.'))
1338  parser.add_argument(
1339      '--enable-assistant-prop', action='store_true',
1340      help=('Update build.prop with ro.opa.eligible_device=true, '
1341            'this is required to run assistant on ChromeOS.'))
1342  parser.add_argument(
1343      'remote',
1344      help=('The target test device. This is passed to ssh command etc., '
1345            'so IP or the name registered in your .ssh/config file can be '
1346            'accepted.'))
1347  args = parser.parse_args()
1348
1349  _resolve_args(args)
1350  return args
1351
1352
1353def main():
1354  # Set up arguments.
1355  args = _parse_args()
1356  logging.basicConfig(level=getattr(logging, args.loglevel))
1357
1358  if args.enable_assistant_prop:
1359    args.added_build_properties.append('ro.opa.eligible_device=true')
1360
1361  simg2img = Simg2img(args.simg2img_path, args.dryrun)
1362
1363  # Prepare local source.  A preparer is responsible to return an directory that
1364  # contains necessary files to push.  It also needs to return metadata like
1365  # product (e.g. cheets_arm) and a build fingerprint.
1366  if args.dryrun:
1367    provider = NullProvider()
1368  elif args.use_prebuilt:
1369    product, build_variant, build_id = args.use_prebuilt
1370    provider = PrebuiltProvider(product, build_variant, build_id, simg2img,
1371                                args.unsquashfs_path)
1372  elif args.prebuilt_file:
1373    provider = LocalPrebuiltProvider(args.prebuilt_file, simg2img,
1374                                     args.unsquashfs_path)
1375  else:
1376    provider = LocalBuildProvider(args.build_fingerprint,
1377                                  args.skip_build_prop_update,
1378                                  args.added_build_properties,
1379                                  args.dryrun)
1380
1381  # Actually prepare the files to push.
1382  out, product, fingerprint = provider.prepare()
1383
1384  # Update the image.
1385  remote_proxy = RemoteProxy(args.remote, args.dryrun)
1386  _verify_android_sdk_version(remote_proxy, provider, args.dryrun)
1387  _verify_machine_arch(remote_proxy, product, args.dryrun)
1388
1389  if args.try_clobber_data:
1390    clobber_data = True
1391  else:
1392    clobber_data = _detect_cert_inconsistency(
1393        args.force, remote_proxy, provider.get_build_variant(), args.dryrun)
1394
1395  logging.info('Converting images to raw images...')
1396  (large_image_list, image_list) = _convert_images(
1397      simg2img, out, product, args.push_vendor_image, args.shift_ugids,
1398      args.mksquashfs_path, args.unsquashfs_path, args.shift_uid_py_path, args.dryrun)
1399
1400  is_selinux_policy_updated = _is_selinux_policy_updated(remote_proxy, out,
1401                                                         args.dryrun)
1402  is_selinux_file_contexts_updated = _is_selinux_file_contexts_updated(
1403      remote_proxy, out, args.dryrun)
1404  total_bytes = sum(os.stat(filename).st_size for filename in large_image_list)
1405  free_bytes = _get_free_space(remote_proxy)
1406  push_to_stateful = (args.push_to_stateful_partition or
1407                      total_bytes >= free_bytes)
1408
1409  if not args.push_to_stateful_partition and push_to_stateful:
1410    logging.info('Pushing image to stateful partition '
1411                 'since it does not fit on / (%.2f MiB, %.2f free MiB).',
1412                 float(total_bytes) / _MB, float(free_bytes) / _MB)
1413
1414  with ImageUpdateMode(remote_proxy, is_selinux_policy_updated,
1415                       push_to_stateful, clobber_data, args.force):
1416    is_debuggable = 'user' != provider.get_build_variant()
1417    try:
1418      remote_proxy.check_call(' '.join([
1419          '/bin/sed', '-i',
1420          r'"s/^\(export ANDROID_DEBUGGABLE=\).*/\1%(_IS_DEBUGGABLE)d/"',
1421          '/etc/init/arc-setup-env'
1422      ]) % {'_IS_DEBUGGABLE': is_debuggable})
1423      # Unconditionally disable font sharing so that 'adb sync' will always
1424      # work. Disabling the feature is safe because locally built system
1425      # image always has all fonts. Images from ab/ also have all fonts.
1426      remote_proxy.check_call(' '.join([
1427          '/bin/sed', '-i',
1428          r'"s/^\(export SHARE_FONTS=\).*/\1%(_SHARE_FONTS)d/"',
1429          '/etc/init/arc-setup-env'
1430      ]) % {'_SHARE_FONTS': False})
1431    except Exception:
1432      # The device is old and doesn't have arc-setup-env. Fall back to the
1433      # older method.
1434      # TODO(yusukes): Remove the fallback code.
1435      remote_proxy.check_call(' '.join([
1436          '/bin/sed', '-i',
1437          r'"s/^\(env ANDROID_DEBUGGABLE=\).*/\1%(_IS_DEBUGGABLE)d/"',
1438          '/etc/init/arc-setup.conf'
1439      ]) % {'_IS_DEBUGGABLE': is_debuggable})
1440      remote_proxy.check_call(' '.join([
1441          '/bin/sed', '-i',
1442          r'"s/^\(env SHARE_FONTS=\).*/\1%(_SHARE_FONTS)d/"',
1443          '/etc/init/arc-system-mount.conf'
1444      ]) % {'_SHARE_FONTS': False})
1445
1446    logging.info('Syncing image files to ChromeOS...')
1447    if large_image_list:
1448      remote_proxy.sync(large_image_list,
1449                        _ANDROID_ROOT_STATEFUL if push_to_stateful else
1450                        _ANDROID_ROOT)
1451    if image_list:
1452      remote_proxy.sync(image_list, _ANDROID_ROOT)
1453    _update_build_fingerprint(remote_proxy, fingerprint)
1454    if is_selinux_file_contexts_updated:
1455      _update_selinux_file_contexts(remote_proxy, out)
1456    if is_selinux_policy_updated:
1457      _update_selinux_policy(remote_proxy, out)
1458
1459
1460if __name__ == '__main__':
1461  main()
1462