1# Copyright 2015 The Chromium Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import json 6import logging 7import os 8 9from py_utils import cloud_storage 10from dependency_manager import archive_info 11from dependency_manager import cloud_storage_info 12from dependency_manager import dependency_info 13from dependency_manager import exceptions 14from dependency_manager import local_path_info 15from dependency_manager import uploader 16 17 18class BaseConfig(object): 19 """A basic config class for use with the DependencyManager. 20 21 Initiated with a json file in the following format: 22 23 { "config_type": "BaseConfig", 24 "dependencies": { 25 "dep_name1": { 26 "cloud_storage_base_folder": "base_folder1", 27 "cloud_storage_bucket": "bucket1", 28 "file_info": { 29 "platform1": { 30 "cloud_storage_hash": "hash_for_platform1", 31 "download_path": "download_path111", 32 "version_in_cs": "1.11.1.11." 33 "local_paths": ["local_path1110", "local_path1111"] 34 }, 35 "platform2": { 36 "cloud_storage_hash": "hash_for_platform2", 37 "download_path": "download_path2", 38 "local_paths": ["local_path20", "local_path21"] 39 }, 40 ... 41 } 42 }, 43 "dependency_name_2": { 44 ... 45 }, 46 ... 47 } 48 } 49 50 Required fields: "dependencies" and "config_type". 51 Note that config_type must be "BaseConfig" 52 53 Assumptions: 54 "cloud_storage_base_folder" is a top level folder in the given 55 "cloud_storage_bucket" where all of the dependency files are stored 56 at "dependency_name"_"cloud_storage_hash". 57 58 "download_path" and all paths in "local_paths" are relative to the 59 config file's location. 60 61 All or none of the following cloud storage related fields must be 62 included in each platform dictionary: 63 "cloud_storage_hash", "download_path", "cs_remote_path" 64 65 "version_in_cs" is an optional cloud storage field, but is dependent 66 on the above cloud storage related fields. 67 68 69 Also note that platform names are often of the form os_architechture. 70 Ex: "win_AMD64" 71 72 More information on the fields can be found in dependencies_info.py 73 """ 74 def __init__(self, file_path, writable=False): 75 """ Initialize a BaseConfig for the DependencyManager. 76 77 Args: 78 writable: False: This config will be used to lookup information. 79 True: This config will be used to update information. 80 81 file_path: Path to a file containing a json dictionary in the expected 82 json format for this config class. Base format expected: 83 84 { "config_type": config_type, 85 "dependencies": dependencies_dict } 86 87 config_type: must match the return value of GetConfigType. 88 dependencies: A dictionary with the information needed to 89 create dependency_info instances for the given 90 dependencies. 91 92 See dependency_info.py for more information. 93 """ 94 self._config_path = file_path 95 self._writable = writable 96 self._pending_uploads = [] 97 if not self._config_path: 98 raise ValueError('Must supply config file path.') 99 if not os.path.exists(self._config_path): 100 if not writable: 101 raise exceptions.EmptyConfigError(file_path) 102 self._config_data = {} 103 self._WriteConfigToFile(self._config_path, dependencies=self._config_data) 104 else: 105 with open(file_path, 'r') as f: 106 config_data = json.load(f) 107 if not config_data: 108 raise exceptions.EmptyConfigError(file_path) 109 config_type = config_data.pop('config_type', None) 110 if config_type != self.GetConfigType(): 111 raise ValueError( 112 'Supplied config_type (%s) is not the expected type (%s) in file ' 113 '%s' % (config_type, self.GetConfigType(), file_path)) 114 self._config_data = config_data.get('dependencies', {}) 115 116 def IterDependencyInfo(self): 117 """ Yields a DependencyInfo for each dependency/platform pair. 118 119 Raises: 120 ReadWriteError: If called when the config is writable. 121 ValueError: If any of the dependencies contain partial information for 122 downloading from cloud_storage. (See dependency_info.py) 123 """ 124 if self._writable: 125 raise exceptions.ReadWriteError( 126 'Trying to read dependency info from a writable config. File for ' 127 'config: %s' % self._config_path) 128 base_path = os.path.dirname(self._config_path) 129 for dependency in self._config_data: 130 dependency_dict = self._config_data.get(dependency) 131 platforms_dict = dependency_dict.get('file_info', {}) 132 for platform in platforms_dict: 133 platform_info = platforms_dict.get(platform) 134 135 local_info = None 136 local_paths = platform_info.get('local_paths', []) 137 if local_paths: 138 paths = [] 139 for path in local_paths: 140 path = self._FormatPath(path) 141 paths.append(os.path.abspath(os.path.join(base_path, path))) 142 local_info = local_path_info.LocalPathInfo(paths) 143 144 cs_info = None 145 cs_bucket = dependency_dict.get('cloud_storage_bucket') 146 cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '') 147 download_path = platform_info.get('download_path') 148 if download_path: 149 download_path = self._FormatPath(download_path) 150 download_path = os.path.abspath( 151 os.path.join(base_path, download_path)) 152 153 cs_hash = platform_info.get('cloud_storage_hash') 154 if not cs_hash: 155 raise exceptions.ConfigError( 156 'Dependency %s has cloud storage info on platform %s, but is ' 157 'missing a cloud storage hash.', dependency, platform) 158 cs_remote_path = self._CloudStorageRemotePath( 159 dependency, cs_hash, cs_base_folder) 160 version_in_cs = platform_info.get('version_in_cs') 161 162 zip_info = None 163 path_within_archive = platform_info.get('path_within_archive') 164 if path_within_archive: 165 unzip_path = os.path.abspath( 166 os.path.join(os.path.dirname(download_path), 167 '%s_%s_%s' % (dependency, platform, cs_hash))) 168 stale_unzip_path_glob = os.path.abspath( 169 os.path.join(os.path.dirname(download_path), 170 '%s_%s_%s' % (dependency, platform, 171 '[0-9a-f]' * 40))) 172 zip_info = archive_info.ArchiveInfo( 173 download_path, unzip_path, path_within_archive, 174 stale_unzip_path_glob) 175 176 cs_info = cloud_storage_info.CloudStorageInfo( 177 cs_bucket, cs_hash, download_path, cs_remote_path, 178 version_in_cs=version_in_cs, archive_info=zip_info) 179 180 dep_info = dependency_info.DependencyInfo( 181 dependency, platform, self._config_path, 182 local_path_info=local_info, cloud_storage_info=cs_info) 183 yield dep_info 184 185 @classmethod 186 def GetConfigType(cls): 187 return 'BaseConfig' 188 189 @property 190 def config_path(self): 191 return self._config_path 192 193 def AddCloudStorageDependencyUpdateJob( 194 self, dependency, platform, dependency_path, version=None, 195 execute_job=True): 196 """Update the file downloaded from cloud storage for a dependency/platform. 197 198 Upload a new file to cloud storage for the given dependency and platform 199 pair and update the cloud storage hash and the version for the given pair. 200 201 Example usage: 202 The following should update the default platform for 'dep_name': 203 UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file') 204 205 The following should update both the mac and win platforms for 'dep_name', 206 or neither if either update fails: 207 UpdateCloudStorageDependency( 208 'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False) 209 UpdateCloudStorageDependency( 210 'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False) 211 ExecuteUpdateJobs() 212 213 Args: 214 dependency: The dependency to update. 215 platform: The platform to update the dependency info for. 216 dependency_path: Path to the new dependency to be used. 217 version: Version of the updated dependency, for checking future updates 218 against. 219 execute_job: True if the config should be written to disk and the file 220 should be uploaded to cloud storage after the update. False if 221 multiple updates should be performed atomically. Must call 222 ExecuteUpdateJobs after all non-executed jobs are added to complete 223 the update. 224 225 Raises: 226 ReadWriteError: If the config was not initialized as writable, or if 227 |execute_job| is True but the config has update jobs still pending 228 execution. 229 ValueError: If no information exists in the config for |dependency| on 230 |platform|. 231 """ 232 self._ValidateIsConfigUpdatable( 233 execute_job=execute_job, dependency=dependency, platform=platform) 234 cs_hash = cloud_storage.CalculateHash(dependency_path) 235 if version: 236 self._SetPlatformData(dependency, platform, 'version_in_cs', version) 237 self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash) 238 239 cs_base_folder = self._GetPlatformData( 240 dependency, platform, 'cloud_storage_base_folder') 241 cs_bucket = self._GetPlatformData( 242 dependency, platform, 'cloud_storage_bucket') 243 cs_remote_path = self._CloudStorageRemotePath( 244 dependency, cs_hash, cs_base_folder) 245 self._pending_uploads.append(uploader.CloudStorageUploader( 246 cs_bucket, cs_remote_path, dependency_path)) 247 if execute_job: 248 self.ExecuteUpdateJobs() 249 250 def ExecuteUpdateJobs(self, force=False): 251 """Write all config changes to the config_path specified in __init__. 252 253 Upload all files pending upload and then write the updated config to 254 file. Attempt to remove all uploaded files on failure. 255 256 Args: 257 force: True if files should be uploaded to cloud storage even if a 258 file already exists in the upload location. 259 260 Returns: 261 True: if the config was dirty and the upload succeeded. 262 False: if the config was not dirty. 263 264 Raises: 265 CloudStorageUploadConflictError: If |force| is False and the potential 266 upload location of a file already exists. 267 CloudStorageError: If copying an existing file to the backup location 268 or uploading a new file fails. 269 """ 270 self._ValidateIsConfigUpdatable() 271 if not self._IsDirty(): 272 logging.info('ExecuteUpdateJobs called on clean config') 273 return False 274 if not self._pending_uploads: 275 logging.debug('No files needing upload.') 276 else: 277 try: 278 for item_pending_upload in self._pending_uploads: 279 item_pending_upload.Upload(force) 280 self._WriteConfigToFile(self._config_path, self._config_data) 281 self._pending_uploads = [] 282 except: 283 # Attempt to rollback the update in any instance of failure, even user 284 # interrupt via Ctrl+C; but don't consume the exception. 285 logging.error('Update failed, attempting to roll it back.') 286 for upload_item in reversed(self._pending_uploads): 287 upload_item.Rollback() 288 raise 289 return True 290 291 def GetVersion(self, dependency, platform): 292 """Return the Version information for the given dependency.""" 293 return self._GetPlatformData( 294 dependency, platform, data_type='version_in_cs') 295 296 def _IsDirty(self): 297 with open(self._config_path, 'r') as fstream: 298 curr_config_data = json.load(fstream) 299 curr_config_data = curr_config_data.get('dependencies', {}) 300 return self._config_data != curr_config_data 301 302 def _SetPlatformData(self, dependency, platform, data_type, data): 303 self._ValidateIsConfigWritable() 304 dependency_dict = self._config_data.get(dependency, {}) 305 platform_dict = dependency_dict.get('file_info', {}).get(platform) 306 if not platform_dict: 307 raise ValueError('No platform data for platform %s on dependency %s' % 308 (platform, dependency)) 309 if (data_type == 'cloud_storage_bucket' or 310 data_type == 'cloud_storage_base_folder'): 311 self._config_data[dependency][data_type] = data 312 else: 313 self._config_data[dependency]['file_info'][platform][data_type] = data 314 315 def _GetPlatformData(self, dependency, platform, data_type=None): 316 dependency_dict = self._config_data.get(dependency, {}) 317 if not dependency_dict: 318 raise ValueError('Dependency %s is not in config.' % dependency) 319 platform_dict = dependency_dict.get('file_info', {}).get(platform) 320 if not platform_dict: 321 raise ValueError('No platform data for platform %s on dependency %s' % 322 (platform, dependency)) 323 if data_type: 324 if (data_type == 'cloud_storage_bucket' or 325 data_type == 'cloud_storage_base_folder'): 326 return dependency_dict.get(data_type) 327 return platform_dict.get(data_type) 328 return platform_dict 329 330 def _ValidateIsConfigUpdatable( 331 self, execute_job=False, dependency=None, platform=None): 332 self._ValidateIsConfigWritable() 333 if self._IsDirty() and execute_job: 334 raise exceptions.ReadWriteError( 335 'A change has already been made to this config. Either call without' 336 'using the execute_job option or first call ExecuteUpdateJobs().') 337 if dependency and not self._config_data.get(dependency): 338 raise ValueError('Cannot update information because dependency %s does ' 339 'not exist.' % dependency) 340 if platform and not self._GetPlatformData(dependency, platform): 341 raise ValueError('No dependency info is available for the given ' 342 'dependency: %s' % dependency) 343 344 def _ValidateIsConfigWritable(self): 345 if not self._writable: 346 raise exceptions.ReadWriteError( 347 'Trying to update the information from a read-only config. ' 348 'File for config: %s' % self._config_path) 349 350 @staticmethod 351 def _CloudStorageRemotePath(dependency, cs_hash, cs_base_folder): 352 cs_remote_file = '%s_%s' % (dependency, cs_hash) 353 cs_remote_path = cs_remote_file if not cs_base_folder else ( 354 '%s/%s' % (cs_base_folder, cs_remote_file)) 355 return cs_remote_path 356 357 @classmethod 358 def _FormatPath(cls, file_path): 359 """ Format |file_path| for the current file system. 360 361 We may be downloading files for another platform, so paths must be 362 downloadable on the current system. 363 """ 364 if not file_path: 365 return file_path 366 if os.path.sep != '\\': 367 return file_path.replace('\\', os.path.sep) 368 elif os.path.sep != '/': 369 return file_path.replace('/', os.path.sep) 370 return file_path 371 372 @classmethod 373 def _WriteConfigToFile(cls, file_path, dependencies=None): 374 json_dict = cls._GetJsonDict(dependencies) 375 file_dir = os.path.dirname(file_path) 376 if not os.path.exists(file_dir): 377 os.makedirs(file_dir) 378 with open(file_path, 'w') as outfile: 379 json.dump( 380 json_dict, outfile, indent=2, sort_keys=True, separators=(',', ': ')) 381 return json_dict 382 383 @classmethod 384 def _GetJsonDict(cls, dependencies=None): 385 dependencies = dependencies or {} 386 json_dict = {'config_type': cls.GetConfigType(), 387 'dependencies': dependencies} 388 return json_dict 389