1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
6
7 #include <utility>
8
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/common/chrome_switches.h"
26 #include "chrome/common/chrome_version_info.h"
27 #include "chrome/common/extensions/manifest_url_handler.h"
28 #include "content/public/browser/browser_thread.h"
29 #include "content/public/browser/notification_details.h"
30 #include "content/public/browser/notification_service.h"
31 #include "extensions/browser/updater/safe_manifest_parser.h"
32 #include "extensions/common/extension_urls.h"
33 #include "google_apis/gaia/identity_provider.h"
34 #include "net/base/backoff_entry.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/net_errors.h"
37 #include "net/http/http_request_headers.h"
38 #include "net/http/http_status_code.h"
39 #include "net/url_request/url_fetcher.h"
40 #include "net/url_request/url_request_context_getter.h"
41 #include "net/url_request/url_request_status.h"
42
43 using base::Time;
44 using base::TimeDelta;
45 using content::BrowserThread;
46
47 namespace extensions {
48
49 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
50
51 namespace {
52
53 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
54 // Number of initial errors (in sequence) to ignore before applying
55 // exponential back-off rules.
56 0,
57
58 // Initial delay for exponential back-off in ms.
59 2000,
60
61 // Factor by which the waiting time will be multiplied.
62 2,
63
64 // Fuzzing percentage. ex: 10% will spread requests randomly
65 // between 90%-100% of the calculated time.
66 0.1,
67
68 // Maximum amount of time we are willing to delay our request in ms.
69 -1,
70
71 // Time to keep an entry from being discarded even when it
72 // has no significant state, -1 to never discard.
73 -1,
74
75 // Don't use initial delay unless the last request was an error.
76 false,
77 };
78
79 const char kAuthUserQueryKey[] = "authuser";
80
81 const int kMaxAuthUserValue = 10;
82 const int kMaxOAuth2Attempts = 3;
83
84 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
85 const char kDefaultInstallSource[] = "";
86
87 const char kGoogleDotCom[] = "google.com";
88 const char kTokenServiceConsumerId[] = "extension_downloader";
89 const char kWebstoreOAuth2Scope[] =
90 "https://www.googleapis.com/auth/chromewebstore.readonly";
91
92 #define RETRY_HISTOGRAM(name, retry_count, url) \
93 if ((url).DomainIs(kGoogleDotCom)) { \
94 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
95 retry_count, \
96 1, \
97 kMaxRetries, \
98 kMaxRetries + 1); \
99 } else { \
100 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
101 retry_count, \
102 1, \
103 kMaxRetries, \
104 kMaxRetries + 1); \
105 }
106
ShouldRetryRequest(const net::URLRequestStatus & status,int response_code)107 bool ShouldRetryRequest(const net::URLRequestStatus& status,
108 int response_code) {
109 // Retry if the response code is a server error, or the request failed because
110 // of network errors as opposed to file errors.
111 return ((response_code >= 500 && status.is_success()) ||
112 status.status() == net::URLRequestStatus::FAILED);
113 }
114
115 // This parses and updates a URL query such that the value of the |authuser|
116 // query parameter is incremented by 1. If parameter was not present in the URL,
117 // it will be added with a value of 1. All other query keys and values are
118 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
119 // maximum.
IncrementAuthUserIndex(GURL * url)120 bool IncrementAuthUserIndex(GURL* url) {
121 int user_index = 0;
122 std::string old_query = url->query();
123 std::vector<std::string> new_query_parts;
124 url::Component query(0, old_query.length());
125 url::Component key, value;
126 while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
127 std::string key_string = old_query.substr(key.begin, key.len);
128 std::string value_string = old_query.substr(value.begin, value.len);
129 if (key_string == kAuthUserQueryKey) {
130 base::StringToInt(value_string, &user_index);
131 } else {
132 new_query_parts.push_back(base::StringPrintf(
133 "%s=%s", key_string.c_str(), value_string.c_str()));
134 }
135 }
136 if (user_index >= kMaxAuthUserValue)
137 return false;
138 new_query_parts.push_back(
139 base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
140 std::string new_query_string = JoinString(new_query_parts, '&');
141 url::Component new_query(0, new_query_string.size());
142 url::Replacements<char> replacements;
143 replacements.SetQuery(new_query_string.c_str(), new_query);
144 *url = url->ReplaceComponents(replacements);
145 return true;
146 }
147
148 } // namespace
149
UpdateDetails(const std::string & id,const Version & version)150 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
151 : id(id), version(version) {}
152
~UpdateDetails()153 UpdateDetails::~UpdateDetails() {}
154
ExtensionFetch()155 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
156 : url(), credentials(CREDENTIALS_NONE) {
157 }
158
ExtensionFetch(const std::string & id,const GURL & url,const std::string & package_hash,const std::string & version,const std::set<int> & request_ids)159 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
160 const std::string& id,
161 const GURL& url,
162 const std::string& package_hash,
163 const std::string& version,
164 const std::set<int>& request_ids)
165 : id(id),
166 url(url),
167 package_hash(package_hash),
168 version(version),
169 request_ids(request_ids),
170 credentials(CREDENTIALS_NONE),
171 oauth2_attempt_count(0) {
172 }
173
~ExtensionFetch()174 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
175
ExtensionDownloader(ExtensionDownloaderDelegate * delegate,net::URLRequestContextGetter * request_context)176 ExtensionDownloader::ExtensionDownloader(
177 ExtensionDownloaderDelegate* delegate,
178 net::URLRequestContextGetter* request_context)
179 : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
180 delegate_(delegate),
181 request_context_(request_context),
182 manifests_queue_(&kDefaultBackoffPolicy,
183 base::Bind(&ExtensionDownloader::CreateManifestFetcher,
184 base::Unretained(this))),
185 extensions_queue_(&kDefaultBackoffPolicy,
186 base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
187 base::Unretained(this))),
188 extension_cache_(NULL),
189 enable_extra_update_metrics_(false),
190 weak_ptr_factory_(this) {
191 DCHECK(delegate_);
192 DCHECK(request_context_.get());
193 }
194
~ExtensionDownloader()195 ExtensionDownloader::~ExtensionDownloader() {}
196
AddExtension(const Extension & extension,int request_id)197 bool ExtensionDownloader::AddExtension(const Extension& extension,
198 int request_id) {
199 // Skip extensions with empty update URLs converted from user
200 // scripts.
201 if (extension.converted_from_user_script() &&
202 ManifestURL::GetUpdateURL(&extension).is_empty()) {
203 return false;
204 }
205
206 // If the extension updates itself from the gallery, ignore any update URL
207 // data. At the moment there is no extra data that an extension can
208 // communicate to the the gallery update servers.
209 std::string update_url_data;
210 if (!ManifestURL::UpdatesFromGallery(&extension))
211 update_url_data = delegate_->GetUpdateUrlData(extension.id());
212
213 std::string install_source;
214 bool force_update = delegate_->ShouldForceUpdate(extension.id(),
215 &install_source);
216 return AddExtensionData(extension.id(),
217 *extension.version(),
218 extension.GetType(),
219 ManifestURL::GetUpdateURL(&extension),
220 update_url_data,
221 request_id,
222 force_update,
223 install_source);
224 }
225
AddPendingExtension(const std::string & id,const GURL & update_url,int request_id)226 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
227 const GURL& update_url,
228 int request_id) {
229 // Use a zero version to ensure that a pending extension will always
230 // be updated, and thus installed (assuming all extensions have
231 // non-zero versions).
232 Version version("0.0.0.0");
233 DCHECK(version.IsValid());
234
235 return AddExtensionData(id,
236 version,
237 Manifest::TYPE_UNKNOWN,
238 update_url,
239 std::string(),
240 request_id,
241 false,
242 std::string());
243 }
244
StartAllPending(ExtensionCache * cache)245 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
246 if (cache) {
247 extension_cache_ = cache;
248 extension_cache_->Start(base::Bind(
249 &ExtensionDownloader::DoStartAllPending,
250 weak_ptr_factory_.GetWeakPtr()));
251 } else {
252 DoStartAllPending();
253 }
254 }
255
DoStartAllPending()256 void ExtensionDownloader::DoStartAllPending() {
257 ReportStats();
258 url_stats_ = URLStats();
259
260 for (FetchMap::iterator it = fetches_preparing_.begin();
261 it != fetches_preparing_.end(); ++it) {
262 std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
263 for (size_t i = 0; i < list.size(); ++i) {
264 StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
265 }
266 }
267 fetches_preparing_.clear();
268 }
269
StartBlacklistUpdate(const std::string & version,const ManifestFetchData::PingData & ping_data,int request_id)270 void ExtensionDownloader::StartBlacklistUpdate(
271 const std::string& version,
272 const ManifestFetchData::PingData& ping_data,
273 int request_id) {
274 // Note: it is very important that we use the https version of the update
275 // url here to avoid DNS hijacking of the blacklist, which is not validated
276 // by a public key signature like .crx files are.
277 scoped_ptr<ManifestFetchData> blacklist_fetch(CreateManifestFetchData(
278 extension_urls::GetWebstoreUpdateUrl(), request_id));
279 DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
280 blacklist_fetch->AddExtension(kBlacklistAppID,
281 version,
282 &ping_data,
283 std::string(),
284 kDefaultInstallSource,
285 false);
286 StartUpdateCheck(blacklist_fetch.Pass());
287 }
288
SetWebstoreIdentityProvider(scoped_ptr<IdentityProvider> identity_provider)289 void ExtensionDownloader::SetWebstoreIdentityProvider(
290 scoped_ptr<IdentityProvider> identity_provider) {
291 identity_provider_.swap(identity_provider);
292 }
293
AddExtensionData(const std::string & id,const Version & version,Manifest::Type extension_type,const GURL & extension_update_url,const std::string & update_url_data,int request_id,bool force_update,const std::string & install_source_override)294 bool ExtensionDownloader::AddExtensionData(
295 const std::string& id,
296 const Version& version,
297 Manifest::Type extension_type,
298 const GURL& extension_update_url,
299 const std::string& update_url_data,
300 int request_id,
301 bool force_update,
302 const std::string& install_source_override) {
303 GURL update_url(extension_update_url);
304 // Skip extensions with non-empty invalid update URLs.
305 if (!update_url.is_empty() && !update_url.is_valid()) {
306 LOG(WARNING) << "Extension " << id << " has invalid update url "
307 << update_url;
308 return false;
309 }
310
311 // Make sure we use SSL for store-hosted extensions.
312 if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
313 !update_url.SchemeIsSecure())
314 update_url = extension_urls::GetWebstoreUpdateUrl();
315
316 // Skip extensions with empty IDs.
317 if (id.empty()) {
318 LOG(WARNING) << "Found extension with empty ID";
319 return false;
320 }
321
322 if (update_url.DomainIs(kGoogleDotCom)) {
323 url_stats_.google_url_count++;
324 } else if (update_url.is_empty()) {
325 url_stats_.no_url_count++;
326 // Fill in default update URL.
327 update_url = extension_urls::GetWebstoreUpdateUrl();
328 } else {
329 url_stats_.other_url_count++;
330 }
331
332 switch (extension_type) {
333 case Manifest::TYPE_THEME:
334 ++url_stats_.theme_count;
335 break;
336 case Manifest::TYPE_EXTENSION:
337 case Manifest::TYPE_USER_SCRIPT:
338 ++url_stats_.extension_count;
339 break;
340 case Manifest::TYPE_HOSTED_APP:
341 case Manifest::TYPE_LEGACY_PACKAGED_APP:
342 ++url_stats_.app_count;
343 break;
344 case Manifest::TYPE_PLATFORM_APP:
345 ++url_stats_.platform_app_count;
346 break;
347 case Manifest::TYPE_UNKNOWN:
348 default:
349 ++url_stats_.pending_count;
350 break;
351 }
352
353 std::vector<GURL> update_urls;
354 update_urls.push_back(update_url);
355 // If metrics are enabled, also add to ManifestFetchData for the
356 // webstore update URL.
357 if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
358 enable_extra_update_metrics_) {
359 update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
360 }
361
362 for (size_t i = 0; i < update_urls.size(); ++i) {
363 DCHECK(!update_urls[i].is_empty());
364 DCHECK(update_urls[i].is_valid());
365
366 std::string install_source = i == 0 ?
367 kDefaultInstallSource : kNotFromWebstoreInstallSource;
368 if (!install_source_override.empty()) {
369 install_source = install_source_override;
370 }
371
372 ManifestFetchData::PingData ping_data;
373 ManifestFetchData::PingData* optional_ping_data = NULL;
374 if (delegate_->GetPingDataForExtension(id, &ping_data))
375 optional_ping_data = &ping_data;
376
377 // Find or create a ManifestFetchData to add this extension to.
378 bool added = false;
379 FetchMap::iterator existing_iter = fetches_preparing_.find(
380 std::make_pair(request_id, update_urls[i]));
381 if (existing_iter != fetches_preparing_.end() &&
382 !existing_iter->second.empty()) {
383 // Try to add to the ManifestFetchData at the end of the list.
384 ManifestFetchData* existing_fetch = existing_iter->second.back().get();
385 if (existing_fetch->AddExtension(id, version.GetString(),
386 optional_ping_data, update_url_data,
387 install_source,
388 force_update)) {
389 added = true;
390 }
391 }
392 if (!added) {
393 // Otherwise add a new element to the list, if the list doesn't exist or
394 // if its last element is already full.
395 linked_ptr<ManifestFetchData> fetch(
396 CreateManifestFetchData(update_urls[i], request_id));
397 fetches_preparing_[std::make_pair(request_id, update_urls[i])].
398 push_back(fetch);
399 added = fetch->AddExtension(id, version.GetString(),
400 optional_ping_data,
401 update_url_data,
402 install_source,
403 force_update);
404 DCHECK(added);
405 }
406 }
407
408 return true;
409 }
410
ReportStats() const411 void ExtensionDownloader::ReportStats() const {
412 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
413 url_stats_.extension_count);
414 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
415 url_stats_.theme_count);
416 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
417 url_stats_.app_count);
418 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
419 url_stats_.platform_app_count);
420 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
421 url_stats_.pending_count);
422 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
423 url_stats_.google_url_count);
424 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
425 url_stats_.other_url_count);
426 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
427 url_stats_.no_url_count);
428 }
429
StartUpdateCheck(scoped_ptr<ManifestFetchData> fetch_data)430 void ExtensionDownloader::StartUpdateCheck(
431 scoped_ptr<ManifestFetchData> fetch_data) {
432 const std::set<std::string>& id_set(fetch_data->extension_ids());
433
434 if (CommandLine::ForCurrentProcess()->HasSwitch(
435 switches::kDisableBackgroundNetworking)) {
436 NotifyExtensionsDownloadFailed(id_set,
437 fetch_data->request_ids(),
438 ExtensionDownloaderDelegate::DISABLED);
439 return;
440 }
441
442 RequestQueue<ManifestFetchData>::iterator i;
443 for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
444 if (fetch_data->full_url() == i->full_url()) {
445 // This url is already scheduled to be fetched.
446 i->Merge(*fetch_data);
447 return;
448 }
449 }
450
451 if (manifests_queue_.active_request() &&
452 manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
453 manifests_queue_.active_request()->Merge(*fetch_data);
454 } else {
455 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
456 fetch_data->full_url().possibly_invalid_spec().length());
457
458 manifests_queue_.ScheduleRequest(fetch_data.Pass());
459 }
460 }
461
CreateManifestFetcher()462 void ExtensionDownloader::CreateManifestFetcher() {
463 if (VLOG_IS_ON(2)) {
464 std::vector<std::string> id_vector(
465 manifests_queue_.active_request()->extension_ids().begin(),
466 manifests_queue_.active_request()->extension_ids().end());
467 std::string id_list = JoinString(id_vector, ',');
468 VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
469 << " for " << id_list;
470 }
471
472 manifest_fetcher_.reset(net::URLFetcher::Create(
473 kManifestFetcherId, manifests_queue_.active_request()->full_url(),
474 net::URLFetcher::GET, this));
475 manifest_fetcher_->SetRequestContext(request_context_.get());
476 manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
477 net::LOAD_DO_NOT_SAVE_COOKIES |
478 net::LOAD_DISABLE_CACHE);
479 // Update checks can be interrupted if a network change is detected; this is
480 // common for the retail mode AppPack on ChromeOS. Retrying once should be
481 // enough to recover in those cases; let the fetcher retry up to 3 times
482 // just in case. http://crosbug.com/130602
483 manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
484 manifest_fetcher_->Start();
485 }
486
OnURLFetchComplete(const net::URLFetcher * source)487 void ExtensionDownloader::OnURLFetchComplete(
488 const net::URLFetcher* source) {
489 VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
490
491 if (source == manifest_fetcher_.get()) {
492 std::string data;
493 source->GetResponseAsString(&data);
494 OnManifestFetchComplete(source->GetURL(),
495 source->GetStatus(),
496 source->GetResponseCode(),
497 source->GetBackoffDelay(),
498 data);
499 } else if (source == extension_fetcher_.get()) {
500 OnCRXFetchComplete(source,
501 source->GetURL(),
502 source->GetStatus(),
503 source->GetResponseCode(),
504 source->GetBackoffDelay());
505 } else {
506 NOTREACHED();
507 }
508 }
509
OnManifestFetchComplete(const GURL & url,const net::URLRequestStatus & status,int response_code,const base::TimeDelta & backoff_delay,const std::string & data)510 void ExtensionDownloader::OnManifestFetchComplete(
511 const GURL& url,
512 const net::URLRequestStatus& status,
513 int response_code,
514 const base::TimeDelta& backoff_delay,
515 const std::string& data) {
516 // We want to try parsing the manifest, and if it indicates updates are
517 // available, we want to fire off requests to fetch those updates.
518 if (status.status() == net::URLRequestStatus::SUCCESS &&
519 (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
520 RETRY_HISTOGRAM("ManifestFetchSuccess",
521 manifests_queue_.active_request_failure_count(), url);
522 VLOG(2) << "beginning manifest parse for " << url;
523 scoped_refptr<SafeManifestParser> safe_parser(
524 new SafeManifestParser(
525 data,
526 manifests_queue_.reset_active_request().release(),
527 base::Bind(&ExtensionDownloader::HandleManifestResults,
528 weak_ptr_factory_.GetWeakPtr())));
529 safe_parser->Start();
530 } else {
531 VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
532 << "' response code:" << response_code;
533 if (ShouldRetryRequest(status, response_code) &&
534 manifests_queue_.active_request_failure_count() < kMaxRetries) {
535 manifests_queue_.RetryRequest(backoff_delay);
536 } else {
537 RETRY_HISTOGRAM("ManifestFetchFailure",
538 manifests_queue_.active_request_failure_count(), url);
539 NotifyExtensionsDownloadFailed(
540 manifests_queue_.active_request()->extension_ids(),
541 manifests_queue_.active_request()->request_ids(),
542 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
543 }
544 }
545 manifest_fetcher_.reset();
546 manifests_queue_.reset_active_request();
547
548 // If we have any pending manifest requests, fire off the next one.
549 manifests_queue_.StartNextRequest();
550 }
551
HandleManifestResults(const ManifestFetchData & fetch_data,const UpdateManifest::Results * results)552 void ExtensionDownloader::HandleManifestResults(
553 const ManifestFetchData& fetch_data,
554 const UpdateManifest::Results* results) {
555 // Keep a list of extensions that will not be updated, so that the |delegate_|
556 // can be notified once we're done here.
557 std::set<std::string> not_updated(fetch_data.extension_ids());
558
559 if (!results) {
560 NotifyExtensionsDownloadFailed(
561 not_updated,
562 fetch_data.request_ids(),
563 ExtensionDownloaderDelegate::MANIFEST_INVALID);
564 return;
565 }
566
567 // Examine the parsed manifest and kick off fetches of any new crx files.
568 std::vector<int> updates;
569 DetermineUpdates(fetch_data, *results, &updates);
570 for (size_t i = 0; i < updates.size(); i++) {
571 const UpdateManifest::Result* update = &(results->list.at(updates[i]));
572 const std::string& id = update->extension_id;
573 not_updated.erase(id);
574
575 GURL crx_url = update->crx_url;
576 if (id != kBlacklistAppID) {
577 NotifyUpdateFound(update->extension_id, update->version);
578 } else {
579 // The URL of the blacklist file is returned by the server and we need to
580 // be sure that we continue to be able to reliably detect whether a URL
581 // references a blacklist file.
582 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
583
584 // Force https (crbug.com/129587).
585 if (!crx_url.SchemeIsSecure()) {
586 url::Replacements<char> replacements;
587 std::string scheme("https");
588 replacements.SetScheme(scheme.c_str(),
589 url::Component(0, scheme.size()));
590 crx_url = crx_url.ReplaceComponents(replacements);
591 }
592 }
593 scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
594 update->extension_id, crx_url, update->package_hash,
595 update->version, fetch_data.request_ids()));
596 FetchUpdatedExtension(fetch.Pass());
597 }
598
599 // If the manifest response included a <daystart> element, we want to save
600 // that value for any extensions which had sent a ping in the request.
601 if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
602 results->daystart_elapsed_seconds >= 0) {
603 Time day_start =
604 Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
605
606 const std::set<std::string>& extension_ids = fetch_data.extension_ids();
607 std::set<std::string>::const_iterator i;
608 for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
609 const std::string& id = *i;
610 ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
611 result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
612 result.day_start = day_start;
613 }
614 }
615
616 NotifyExtensionsDownloadFailed(
617 not_updated,
618 fetch_data.request_ids(),
619 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
620 }
621
DetermineUpdates(const ManifestFetchData & fetch_data,const UpdateManifest::Results & possible_updates,std::vector<int> * result)622 void ExtensionDownloader::DetermineUpdates(
623 const ManifestFetchData& fetch_data,
624 const UpdateManifest::Results& possible_updates,
625 std::vector<int>* result) {
626 // This will only be valid if one of possible_updates specifies
627 // browser_min_version.
628 Version browser_version;
629
630 for (size_t i = 0; i < possible_updates.list.size(); i++) {
631 const UpdateManifest::Result* update = &possible_updates.list[i];
632 const std::string& id = update->extension_id;
633
634 if (!fetch_data.Includes(id)) {
635 VLOG(2) << "Ignoring " << id << " from this manifest";
636 continue;
637 }
638
639 if (VLOG_IS_ON(2)) {
640 if (update->version.empty())
641 VLOG(2) << "manifest indicates " << id << " has no update";
642 else
643 VLOG(2) << "manifest indicates " << id
644 << " latest version is '" << update->version << "'";
645 }
646
647 if (!delegate_->IsExtensionPending(id)) {
648 // If we're not installing pending extension, and the update
649 // version is the same or older than what's already installed,
650 // we don't want it.
651 std::string version;
652 if (!delegate_->GetExtensionExistingVersion(id, &version)) {
653 VLOG(2) << id << " is not installed";
654 continue;
655 }
656
657 VLOG(2) << id << " is at '" << version << "'";
658
659 // We should skip the version check if update was forced.
660 if (!fetch_data.DidForceUpdate(id)) {
661 Version existing_version(version);
662 Version update_version(update->version);
663 if (!update_version.IsValid() ||
664 update_version.CompareTo(existing_version) <= 0) {
665 continue;
666 }
667 }
668 }
669
670 // If the update specifies a browser minimum version, do we qualify?
671 if (update->browser_min_version.length() > 0) {
672 // First determine the browser version if we haven't already.
673 if (!browser_version.IsValid()) {
674 chrome::VersionInfo version_info;
675 if (version_info.is_valid())
676 browser_version = Version(version_info.Version());
677 }
678 Version browser_min_version(update->browser_min_version);
679 if (browser_version.IsValid() && browser_min_version.IsValid() &&
680 browser_min_version.CompareTo(browser_version) > 0) {
681 // TODO(asargent) - We may want this to show up in the extensions UI
682 // eventually. (http://crbug.com/12547).
683 LOG(WARNING) << "Updated version of extension " << id
684 << " available, but requires chrome version "
685 << update->browser_min_version;
686 continue;
687 }
688 }
689 VLOG(2) << "will try to update " << id;
690 result->push_back(i);
691 }
692 }
693
694 // Begins (or queues up) download of an updated extension.
FetchUpdatedExtension(scoped_ptr<ExtensionFetch> fetch_data)695 void ExtensionDownloader::FetchUpdatedExtension(
696 scoped_ptr<ExtensionFetch> fetch_data) {
697 if (!fetch_data->url.is_valid()) {
698 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
699 LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
700 << "' for extension " << fetch_data->id;
701 return;
702 }
703
704 for (RequestQueue<ExtensionFetch>::iterator iter =
705 extensions_queue_.begin();
706 iter != extensions_queue_.end(); ++iter) {
707 if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
708 iter->request_ids.insert(fetch_data->request_ids.begin(),
709 fetch_data->request_ids.end());
710 return; // already scheduled
711 }
712 }
713
714 if (extensions_queue_.active_request() &&
715 extensions_queue_.active_request()->url == fetch_data->url) {
716 extensions_queue_.active_request()->request_ids.insert(
717 fetch_data->request_ids.begin(), fetch_data->request_ids.end());
718 } else {
719 std::string version;
720 if (extension_cache_ &&
721 extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
722 version == fetch_data->version) {
723 base::FilePath crx_path;
724 // Now get .crx file path and mark extension as used.
725 extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
726 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
727 } else {
728 extensions_queue_.ScheduleRequest(fetch_data.Pass());
729 }
730 }
731 }
732
NotifyDelegateDownloadFinished(scoped_ptr<ExtensionFetch> fetch_data,const base::FilePath & crx_path,bool file_ownership_passed)733 void ExtensionDownloader::NotifyDelegateDownloadFinished(
734 scoped_ptr<ExtensionFetch> fetch_data,
735 const base::FilePath& crx_path,
736 bool file_ownership_passed) {
737 delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
738 file_ownership_passed, fetch_data->url, fetch_data->version,
739 ping_results_[fetch_data->id], fetch_data->request_ids);
740 ping_results_.erase(fetch_data->id);
741 }
742
CreateExtensionFetcher()743 void ExtensionDownloader::CreateExtensionFetcher() {
744 const ExtensionFetch* fetch = extensions_queue_.active_request();
745 extension_fetcher_.reset(net::URLFetcher::Create(
746 kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
747 extension_fetcher_->SetRequestContext(request_context_.get());
748 extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
749
750 int load_flags = net::LOAD_DISABLE_CACHE;
751 bool is_secure = fetch->url.SchemeIsSecure();
752 if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
753 load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
754 net::LOAD_DO_NOT_SAVE_COOKIES;
755 }
756 extension_fetcher_->SetLoadFlags(load_flags);
757
758 // Download CRX files to a temp file. The blacklist is small and will be
759 // processed in memory, so it is fetched into a string.
760 if (fetch->id != kBlacklistAppID) {
761 extension_fetcher_->SaveResponseToTemporaryFile(
762 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
763 }
764
765 if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
766 is_secure) {
767 if (access_token_.empty()) {
768 // We should try OAuth2, but we have no token cached. This
769 // ExtensionFetcher will be started once the token fetch is complete,
770 // in either OnTokenFetchSuccess or OnTokenFetchFailure.
771 DCHECK(identity_provider_.get());
772 OAuth2TokenService::ScopeSet webstore_scopes;
773 webstore_scopes.insert(kWebstoreOAuth2Scope);
774 access_token_request_ =
775 identity_provider_->GetTokenService()->StartRequest(
776 identity_provider_->GetActiveAccountId(),
777 webstore_scopes,
778 this);
779 return;
780 }
781 extension_fetcher_->AddExtraRequestHeader(
782 base::StringPrintf("%s: Bearer %s",
783 net::HttpRequestHeaders::kAuthorization,
784 access_token_.c_str()));
785 }
786
787 VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
788 extension_fetcher_->Start();
789 }
790
OnCRXFetchComplete(const net::URLFetcher * source,const GURL & url,const net::URLRequestStatus & status,int response_code,const base::TimeDelta & backoff_delay)791 void ExtensionDownloader::OnCRXFetchComplete(
792 const net::URLFetcher* source,
793 const GURL& url,
794 const net::URLRequestStatus& status,
795 int response_code,
796 const base::TimeDelta& backoff_delay) {
797 ExtensionFetch& active_request = *extensions_queue_.active_request();
798 const std::string& id = active_request.id;
799 if (status.status() == net::URLRequestStatus::SUCCESS &&
800 (response_code == 200 || url.SchemeIsFile())) {
801 RETRY_HISTOGRAM("CrxFetchSuccess",
802 extensions_queue_.active_request_failure_count(), url);
803 base::FilePath crx_path;
804 // Take ownership of the file at |crx_path|.
805 CHECK(source->GetResponseAsFilePath(true, &crx_path));
806 scoped_ptr<ExtensionFetch> fetch_data =
807 extensions_queue_.reset_active_request();
808 if (extension_cache_) {
809 const std::string& version = fetch_data->version;
810 extension_cache_->PutExtension(id, crx_path, version,
811 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
812 weak_ptr_factory_.GetWeakPtr(),
813 base::Passed(&fetch_data)));
814 } else {
815 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
816 }
817 } else if (IterateFetchCredentialsAfterFailure(
818 &active_request,
819 status,
820 response_code)) {
821 extensions_queue_.RetryRequest(backoff_delay);
822 } else {
823 const std::set<int>& request_ids = active_request.request_ids;
824 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
825 VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
826 << "' response code:" << response_code;
827 if (ShouldRetryRequest(status, response_code) &&
828 extensions_queue_.active_request_failure_count() < kMaxRetries) {
829 extensions_queue_.RetryRequest(backoff_delay);
830 } else {
831 RETRY_HISTOGRAM("CrxFetchFailure",
832 extensions_queue_.active_request_failure_count(), url);
833 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
834 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
835 delegate_->OnExtensionDownloadFailed(
836 id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
837 }
838 ping_results_.erase(id);
839 extensions_queue_.reset_active_request();
840 }
841
842 extension_fetcher_.reset();
843
844 // If there are any pending downloads left, start the next one.
845 extensions_queue_.StartNextRequest();
846 }
847
NotifyExtensionsDownloadFailed(const std::set<std::string> & extension_ids,const std::set<int> & request_ids,ExtensionDownloaderDelegate::Error error)848 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
849 const std::set<std::string>& extension_ids,
850 const std::set<int>& request_ids,
851 ExtensionDownloaderDelegate::Error error) {
852 for (std::set<std::string>::const_iterator it = extension_ids.begin();
853 it != extension_ids.end(); ++it) {
854 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
855 delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
856 ping_results_.erase(*it);
857 }
858 }
859
NotifyUpdateFound(const std::string & id,const std::string & version)860 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
861 const std::string& version) {
862 UpdateDetails updateInfo(id, Version(version));
863 content::NotificationService::current()->Notify(
864 extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
865 content::NotificationService::AllBrowserContextsAndSources(),
866 content::Details<UpdateDetails>(&updateInfo));
867 }
868
IterateFetchCredentialsAfterFailure(ExtensionFetch * fetch,const net::URLRequestStatus & status,int response_code)869 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
870 ExtensionFetch* fetch,
871 const net::URLRequestStatus& status,
872 int response_code) {
873 bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
874 (status.status() == net::URLRequestStatus::SUCCESS &&
875 (response_code == net::HTTP_UNAUTHORIZED ||
876 response_code == net::HTTP_FORBIDDEN));
877 if (!auth_failure) {
878 return false;
879 }
880 // Here we decide what to do next if the server refused to authorize this
881 // fetch.
882 switch (fetch->credentials) {
883 case ExtensionFetch::CREDENTIALS_NONE:
884 if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
885 fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
886 } else {
887 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
888 }
889 return true;
890 case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
891 fetch->oauth2_attempt_count++;
892 // OAuth2 may fail due to an expired access token, in which case we
893 // should invalidate the token and try again.
894 if (response_code == net::HTTP_UNAUTHORIZED &&
895 fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
896 DCHECK(identity_provider_.get());
897 OAuth2TokenService::ScopeSet webstore_scopes;
898 webstore_scopes.insert(kWebstoreOAuth2Scope);
899 identity_provider_->GetTokenService()->InvalidateToken(
900 identity_provider_->GetActiveAccountId(),
901 webstore_scopes,
902 access_token_);
903 access_token_.clear();
904 return true;
905 }
906 // Either there is no Gaia identity available, the active identity
907 // doesn't have access to this resource, or the server keeps returning
908 // 401s and we've retried too many times. Fall back on cookies.
909 if (access_token_.empty() ||
910 response_code == net::HTTP_FORBIDDEN ||
911 fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
912 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
913 return true;
914 }
915 // Something else is wrong. Time to give up.
916 return false;
917 case ExtensionFetch::CREDENTIALS_COOKIES:
918 if (response_code == net::HTTP_FORBIDDEN) {
919 // Try the next session identity, up to some maximum.
920 return IncrementAuthUserIndex(&fetch->url);
921 }
922 return false;
923 default:
924 NOTREACHED();
925 }
926 NOTREACHED();
927 return false;
928 }
929
OnGetTokenSuccess(const OAuth2TokenService::Request * request,const std::string & access_token,const base::Time & expiration_time)930 void ExtensionDownloader::OnGetTokenSuccess(
931 const OAuth2TokenService::Request* request,
932 const std::string& access_token,
933 const base::Time& expiration_time) {
934 access_token_ = access_token;
935 extension_fetcher_->AddExtraRequestHeader(
936 base::StringPrintf("%s: Bearer %s",
937 net::HttpRequestHeaders::kAuthorization,
938 access_token_.c_str()));
939 extension_fetcher_->Start();
940 }
941
OnGetTokenFailure(const OAuth2TokenService::Request * request,const GoogleServiceAuthError & error)942 void ExtensionDownloader::OnGetTokenFailure(
943 const OAuth2TokenService::Request* request,
944 const GoogleServiceAuthError& error) {
945 // If we fail to get an access token, kick the pending fetch and let it fall
946 // back on cookies.
947 extension_fetcher_->Start();
948 }
949
CreateManifestFetchData(const GURL & update_url,int request_id)950 ManifestFetchData* ExtensionDownloader::CreateManifestFetchData(
951 const GURL& update_url,
952 int request_id) {
953 ManifestFetchData::PingMode ping_mode = ManifestFetchData::NO_PING;
954 if (update_url.DomainIs(ping_enabled_domain_.c_str())) {
955 if (enable_extra_update_metrics_) {
956 ping_mode = ManifestFetchData::PING_WITH_METRICS;
957 } else {
958 ping_mode = ManifestFetchData::PING;
959 }
960 }
961 return new ManifestFetchData(
962 update_url, request_id, brand_code_, manifest_query_params_, ping_mode);
963 }
964
965 } // namespace extensions
966