• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
6 
7 #include <utility>
8 
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
26 #include "chrome/browser/metrics/chrome_metrics_service_accessor.h"
27 #include "chrome/common/chrome_switches.h"
28 #include "chrome/common/chrome_version_info.h"
29 #include "chrome/common/extensions/extension_constants.h"
30 #include "chrome/common/extensions/manifest_url_handler.h"
31 #include "content/public/browser/browser_thread.h"
32 #include "content/public/browser/notification_details.h"
33 #include "content/public/browser/notification_service.h"
34 #include "net/base/backoff_entry.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/net_errors.h"
37 #include "net/url_request/url_fetcher.h"
38 #include "net/url_request/url_request_context_getter.h"
39 #include "net/url_request/url_request_status.h"
40 
41 using base::Time;
42 using base::TimeDelta;
43 using content::BrowserThread;
44 
45 namespace extensions {
46 
47 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
48 
49 namespace {
50 
51 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
52   // Number of initial errors (in sequence) to ignore before applying
53   // exponential back-off rules.
54   0,
55 
56   // Initial delay for exponential back-off in ms.
57   2000,
58 
59   // Factor by which the waiting time will be multiplied.
60   2,
61 
62   // Fuzzing percentage. ex: 10% will spread requests randomly
63   // between 90%-100% of the calculated time.
64   0.1,
65 
66   // Maximum amount of time we are willing to delay our request in ms.
67   -1,
68 
69   // Time to keep an entry from being discarded even when it
70   // has no significant state, -1 to never discard.
71   -1,
72 
73   // Don't use initial delay unless the last request was an error.
74   false,
75 };
76 
77 const char kAuthUserQueryKey[] = "authuser";
78 
79 const int kMaxAuthUserValue = 10;
80 
81 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
82 const char kDefaultInstallSource[] = "";
83 
84 #define RETRY_HISTOGRAM(name, retry_count, url) \
85     if ((url).DomainIs("google.com")) { \
86       UMA_HISTOGRAM_CUSTOM_COUNTS( \
87           "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
88           kMaxRetries, kMaxRetries+1); \
89     } else { \
90       UMA_HISTOGRAM_CUSTOM_COUNTS( \
91           "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
92           kMaxRetries, kMaxRetries+1); \
93     }
94 
ShouldRetryRequest(const net::URLRequestStatus & status,int response_code)95 bool ShouldRetryRequest(const net::URLRequestStatus& status,
96                         int response_code) {
97   // Retry if the response code is a server error, or the request failed because
98   // of network errors as opposed to file errors.
99   return ((response_code >= 500 && status.is_success()) ||
100           status.status() == net::URLRequestStatus::FAILED);
101 }
102 
ShouldRetryRequestWithCookies(const net::URLRequestStatus & status,int response_code,bool included_cookies)103 bool ShouldRetryRequestWithCookies(const net::URLRequestStatus& status,
104                                    int response_code,
105                                    bool included_cookies) {
106   if (included_cookies)
107     return false;
108 
109   if (status.status() == net::URLRequestStatus::CANCELED)
110     return true;
111 
112   // Retry if a 401 or 403 is received.
113   return (status.status() == net::URLRequestStatus::SUCCESS &&
114           (response_code == 401 || response_code == 403));
115 }
116 
ShouldRetryRequestWithNextUser(const net::URLRequestStatus & status,int response_code,bool included_cookies)117 bool ShouldRetryRequestWithNextUser(const net::URLRequestStatus& status,
118                                     int response_code,
119                                     bool included_cookies) {
120   // Retry if a 403 is received in response to a request including cookies.
121   // Note that receiving a 401 in response to a request which included cookies
122   // should indicate that the |authuser| index was out of bounds for the profile
123   // and therefore Chrome should NOT retry with another index.
124   return (status.status() == net::URLRequestStatus::SUCCESS &&
125           response_code == 403 && included_cookies);
126 }
127 
128 // This parses and updates a URL query such that the value of the |authuser|
129 // query parameter is incremented by 1. If parameter was not present in the URL,
130 // it will be added with a value of 1. All other query keys and values are
131 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
132 // maximum.
IncrementAuthUserIndex(GURL * url)133 bool IncrementAuthUserIndex(GURL* url) {
134   int user_index = 0;
135   std::string old_query = url->query();
136   std::vector<std::string> new_query_parts;
137   url::Component query(0, old_query.length());
138   url::Component key, value;
139   while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
140     std::string key_string = old_query.substr(key.begin, key.len);
141     std::string value_string = old_query.substr(value.begin, value.len);
142     if (key_string == kAuthUserQueryKey) {
143       base::StringToInt(value_string, &user_index);
144     } else {
145       new_query_parts.push_back(base::StringPrintf(
146           "%s=%s", key_string.c_str(), value_string.c_str()));
147     }
148   }
149   if (user_index >= kMaxAuthUserValue)
150     return false;
151   new_query_parts.push_back(
152       base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
153   std::string new_query_string = JoinString(new_query_parts, '&');
154   url::Component new_query(0, new_query_string.size());
155   url::Replacements<char> replacements;
156   replacements.SetQuery(new_query_string.c_str(), new_query);
157   *url = url->ReplaceComponents(replacements);
158   return true;
159 }
160 
161 }  // namespace
162 
UpdateDetails(const std::string & id,const Version & version)163 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
164     : id(id), version(version) {}
165 
~UpdateDetails()166 UpdateDetails::~UpdateDetails() {}
167 
ExtensionFetch()168 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
169     : url(), is_protected(false) {}
170 
ExtensionFetch(const std::string & id,const GURL & url,const std::string & package_hash,const std::string & version,const std::set<int> & request_ids)171 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
172     const std::string& id,
173     const GURL& url,
174     const std::string& package_hash,
175     const std::string& version,
176     const std::set<int>& request_ids)
177     : id(id), url(url), package_hash(package_hash), version(version),
178       request_ids(request_ids), is_protected(false) {}
179 
~ExtensionFetch()180 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
181 
ExtensionDownloader(ExtensionDownloaderDelegate * delegate,net::URLRequestContextGetter * request_context)182 ExtensionDownloader::ExtensionDownloader(
183     ExtensionDownloaderDelegate* delegate,
184     net::URLRequestContextGetter* request_context)
185     : delegate_(delegate),
186       request_context_(request_context),
187       weak_ptr_factory_(this),
188       manifests_queue_(&kDefaultBackoffPolicy,
189           base::Bind(&ExtensionDownloader::CreateManifestFetcher,
190                      base::Unretained(this))),
191       extensions_queue_(&kDefaultBackoffPolicy,
192           base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
193                      base::Unretained(this))),
194       extension_cache_(NULL) {
195   DCHECK(delegate_);
196   DCHECK(request_context_);
197 }
198 
~ExtensionDownloader()199 ExtensionDownloader::~ExtensionDownloader() {}
200 
AddExtension(const Extension & extension,int request_id)201 bool ExtensionDownloader::AddExtension(const Extension& extension,
202                                        int request_id) {
203   // Skip extensions with empty update URLs converted from user
204   // scripts.
205   if (extension.converted_from_user_script() &&
206       ManifestURL::GetUpdateURL(&extension).is_empty()) {
207     return false;
208   }
209 
210   // If the extension updates itself from the gallery, ignore any update URL
211   // data.  At the moment there is no extra data that an extension can
212   // communicate to the the gallery update servers.
213   std::string update_url_data;
214   if (!ManifestURL::UpdatesFromGallery(&extension))
215     update_url_data = delegate_->GetUpdateUrlData(extension.id());
216 
217   return AddExtensionData(extension.id(), *extension.version(),
218                           extension.GetType(),
219                           ManifestURL::GetUpdateURL(&extension),
220                           update_url_data, request_id);
221 }
222 
AddPendingExtension(const std::string & id,const GURL & update_url,int request_id)223 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
224                                               const GURL& update_url,
225                                               int request_id) {
226   // Use a zero version to ensure that a pending extension will always
227   // be updated, and thus installed (assuming all extensions have
228   // non-zero versions).
229   Version version("0.0.0.0");
230   DCHECK(version.IsValid());
231 
232   return AddExtensionData(id,
233                           version,
234                           Manifest::TYPE_UNKNOWN,
235                           update_url,
236                           std::string(),
237                           request_id);
238 }
239 
StartAllPending(ExtensionCache * cache)240 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
241   if (cache) {
242     extension_cache_ = cache;
243     extension_cache_->Start(base::Bind(
244         &ExtensionDownloader::DoStartAllPending,
245         weak_ptr_factory_.GetWeakPtr()));
246   } else {
247     DoStartAllPending();
248   }
249 }
250 
DoStartAllPending()251 void ExtensionDownloader::DoStartAllPending() {
252   ReportStats();
253   url_stats_ = URLStats();
254 
255   for (FetchMap::iterator it = fetches_preparing_.begin();
256        it != fetches_preparing_.end(); ++it) {
257     std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
258     for (size_t i = 0; i < list.size(); ++i) {
259       StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
260     }
261   }
262   fetches_preparing_.clear();
263 }
264 
StartBlacklistUpdate(const std::string & version,const ManifestFetchData::PingData & ping_data,int request_id)265 void ExtensionDownloader::StartBlacklistUpdate(
266     const std::string& version,
267     const ManifestFetchData::PingData& ping_data,
268     int request_id) {
269   // Note: it is very important that we use the https version of the update
270   // url here to avoid DNS hijacking of the blacklist, which is not validated
271   // by a public key signature like .crx files are.
272   scoped_ptr<ManifestFetchData> blacklist_fetch(
273       new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
274                             request_id));
275   DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
276   blacklist_fetch->AddExtension(kBlacklistAppID,
277                                 version,
278                                 &ping_data,
279                                 std::string(),
280                                 kDefaultInstallSource);
281   StartUpdateCheck(blacklist_fetch.Pass());
282 }
283 
AddExtensionData(const std::string & id,const Version & version,Manifest::Type extension_type,const GURL & extension_update_url,const std::string & update_url_data,int request_id)284 bool ExtensionDownloader::AddExtensionData(const std::string& id,
285                                            const Version& version,
286                                            Manifest::Type extension_type,
287                                            const GURL& extension_update_url,
288                                            const std::string& update_url_data,
289                                            int request_id) {
290   GURL update_url(extension_update_url);
291   // Skip extensions with non-empty invalid update URLs.
292   if (!update_url.is_empty() && !update_url.is_valid()) {
293     LOG(WARNING) << "Extension " << id << " has invalid update url "
294                  << update_url;
295     return false;
296   }
297 
298   // Make sure we use SSL for store-hosted extensions.
299   if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
300       !update_url.SchemeIsSecure())
301     update_url = extension_urls::GetWebstoreUpdateUrl();
302 
303   // Skip extensions with empty IDs.
304   if (id.empty()) {
305     LOG(WARNING) << "Found extension with empty ID";
306     return false;
307   }
308 
309   if (update_url.DomainIs("google.com")) {
310     url_stats_.google_url_count++;
311   } else if (update_url.is_empty()) {
312     url_stats_.no_url_count++;
313     // Fill in default update URL.
314     update_url = extension_urls::GetWebstoreUpdateUrl();
315   } else {
316     url_stats_.other_url_count++;
317   }
318 
319   switch (extension_type) {
320     case Manifest::TYPE_THEME:
321       ++url_stats_.theme_count;
322       break;
323     case Manifest::TYPE_EXTENSION:
324     case Manifest::TYPE_USER_SCRIPT:
325       ++url_stats_.extension_count;
326       break;
327     case Manifest::TYPE_HOSTED_APP:
328     case Manifest::TYPE_LEGACY_PACKAGED_APP:
329       ++url_stats_.app_count;
330       break;
331     case Manifest::TYPE_PLATFORM_APP:
332       ++url_stats_.platform_app_count;
333       break;
334     case Manifest::TYPE_UNKNOWN:
335     default:
336       ++url_stats_.pending_count;
337       break;
338   }
339 
340   std::vector<GURL> update_urls;
341   update_urls.push_back(update_url);
342   // If UMA is enabled, also add to ManifestFetchData for the
343   // webstore update URL.
344   if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
345       ChromeMetricsServiceAccessor::IsMetricsReportingEnabled()) {
346     update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
347   }
348 
349   for (size_t i = 0; i < update_urls.size(); ++i) {
350     DCHECK(!update_urls[i].is_empty());
351     DCHECK(update_urls[i].is_valid());
352 
353     std::string install_source = i == 0 ?
354         kDefaultInstallSource : kNotFromWebstoreInstallSource;
355 
356     ManifestFetchData::PingData ping_data;
357     ManifestFetchData::PingData* optional_ping_data = NULL;
358     if (delegate_->GetPingDataForExtension(id, &ping_data))
359       optional_ping_data = &ping_data;
360 
361     // Find or create a ManifestFetchData to add this extension to.
362     bool added = false;
363     FetchMap::iterator existing_iter = fetches_preparing_.find(
364         std::make_pair(request_id, update_urls[i]));
365     if (existing_iter != fetches_preparing_.end() &&
366         !existing_iter->second.empty()) {
367       // Try to add to the ManifestFetchData at the end of the list.
368       ManifestFetchData* existing_fetch = existing_iter->second.back().get();
369       if (existing_fetch->AddExtension(id, version.GetString(),
370                                        optional_ping_data, update_url_data,
371                                        install_source)) {
372         added = true;
373       }
374     }
375     if (!added) {
376       // Otherwise add a new element to the list, if the list doesn't exist or
377       // if its last element is already full.
378       linked_ptr<ManifestFetchData> fetch(
379           new ManifestFetchData(update_urls[i], request_id));
380       fetches_preparing_[std::make_pair(request_id, update_urls[i])].
381           push_back(fetch);
382       added = fetch->AddExtension(id, version.GetString(),
383                                   optional_ping_data,
384                                   update_url_data,
385                                   install_source);
386       DCHECK(added);
387     }
388   }
389 
390   return true;
391 }
392 
ReportStats() const393 void ExtensionDownloader::ReportStats() const {
394   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
395                            url_stats_.extension_count);
396   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
397                            url_stats_.theme_count);
398   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
399                            url_stats_.app_count);
400   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
401                            url_stats_.platform_app_count);
402   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
403                            url_stats_.pending_count);
404   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
405                            url_stats_.google_url_count);
406   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
407                            url_stats_.other_url_count);
408   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
409                            url_stats_.no_url_count);
410 }
411 
StartUpdateCheck(scoped_ptr<ManifestFetchData> fetch_data)412 void ExtensionDownloader::StartUpdateCheck(
413     scoped_ptr<ManifestFetchData> fetch_data) {
414   const std::set<std::string>& id_set(fetch_data->extension_ids());
415 
416   if (CommandLine::ForCurrentProcess()->HasSwitch(
417       switches::kDisableBackgroundNetworking)) {
418     NotifyExtensionsDownloadFailed(id_set,
419                                    fetch_data->request_ids(),
420                                    ExtensionDownloaderDelegate::DISABLED);
421     return;
422   }
423 
424   RequestQueue<ManifestFetchData>::iterator i;
425   for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
426     if (fetch_data->full_url() == i->full_url()) {
427       // This url is already scheduled to be fetched.
428       i->Merge(*fetch_data);
429       return;
430     }
431   }
432 
433   if (manifests_queue_.active_request() &&
434       manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
435     manifests_queue_.active_request()->Merge(*fetch_data);
436   } else {
437     UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
438         fetch_data->full_url().possibly_invalid_spec().length());
439 
440     manifests_queue_.ScheduleRequest(fetch_data.Pass());
441   }
442 }
443 
CreateManifestFetcher()444 void ExtensionDownloader::CreateManifestFetcher() {
445   if (VLOG_IS_ON(2)) {
446     std::vector<std::string> id_vector(
447         manifests_queue_.active_request()->extension_ids().begin(),
448         manifests_queue_.active_request()->extension_ids().end());
449     std::string id_list = JoinString(id_vector, ',');
450     VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
451             << " for " << id_list;
452   }
453 
454   manifest_fetcher_.reset(net::URLFetcher::Create(
455       kManifestFetcherId, manifests_queue_.active_request()->full_url(),
456       net::URLFetcher::GET, this));
457   manifest_fetcher_->SetRequestContext(request_context_);
458   manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
459                                   net::LOAD_DO_NOT_SAVE_COOKIES |
460                                   net::LOAD_DISABLE_CACHE);
461   // Update checks can be interrupted if a network change is detected; this is
462   // common for the retail mode AppPack on ChromeOS. Retrying once should be
463   // enough to recover in those cases; let the fetcher retry up to 3 times
464   // just in case. http://crosbug.com/130602
465   manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
466   manifest_fetcher_->Start();
467 }
468 
OnURLFetchComplete(const net::URLFetcher * source)469 void ExtensionDownloader::OnURLFetchComplete(
470     const net::URLFetcher* source) {
471   VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
472 
473   if (source == manifest_fetcher_.get()) {
474     std::string data;
475     source->GetResponseAsString(&data);
476     OnManifestFetchComplete(source->GetURL(),
477                             source->GetStatus(),
478                             source->GetResponseCode(),
479                             source->GetBackoffDelay(),
480                             data);
481   } else if (source == extension_fetcher_.get()) {
482     OnCRXFetchComplete(source,
483                        source->GetURL(),
484                        source->GetStatus(),
485                        source->GetResponseCode(),
486                        source->GetBackoffDelay());
487   } else {
488     NOTREACHED();
489   }
490 }
491 
OnManifestFetchComplete(const GURL & url,const net::URLRequestStatus & status,int response_code,const base::TimeDelta & backoff_delay,const std::string & data)492 void ExtensionDownloader::OnManifestFetchComplete(
493     const GURL& url,
494     const net::URLRequestStatus& status,
495     int response_code,
496     const base::TimeDelta& backoff_delay,
497     const std::string& data) {
498   // We want to try parsing the manifest, and if it indicates updates are
499   // available, we want to fire off requests to fetch those updates.
500   if (status.status() == net::URLRequestStatus::SUCCESS &&
501       (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
502     RETRY_HISTOGRAM("ManifestFetchSuccess",
503                     manifests_queue_.active_request_failure_count(), url);
504     VLOG(2) << "beginning manifest parse for " << url;
505     scoped_refptr<SafeManifestParser> safe_parser(
506         new SafeManifestParser(
507             data,
508             manifests_queue_.reset_active_request().release(),
509             base::Bind(&ExtensionDownloader::HandleManifestResults,
510                        weak_ptr_factory_.GetWeakPtr())));
511     safe_parser->Start();
512   } else {
513     VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
514             << "' response code:" << response_code;
515     if (ShouldRetryRequest(status, response_code) &&
516         manifests_queue_.active_request_failure_count() < kMaxRetries) {
517       manifests_queue_.RetryRequest(backoff_delay);
518     } else {
519       RETRY_HISTOGRAM("ManifestFetchFailure",
520                       manifests_queue_.active_request_failure_count(), url);
521       NotifyExtensionsDownloadFailed(
522           manifests_queue_.active_request()->extension_ids(),
523           manifests_queue_.active_request()->request_ids(),
524           ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
525     }
526   }
527   manifest_fetcher_.reset();
528   manifests_queue_.reset_active_request();
529 
530   // If we have any pending manifest requests, fire off the next one.
531   manifests_queue_.StartNextRequest();
532 }
533 
HandleManifestResults(const ManifestFetchData & fetch_data,const UpdateManifest::Results * results)534 void ExtensionDownloader::HandleManifestResults(
535     const ManifestFetchData& fetch_data,
536     const UpdateManifest::Results* results) {
537   // Keep a list of extensions that will not be updated, so that the |delegate_|
538   // can be notified once we're done here.
539   std::set<std::string> not_updated(fetch_data.extension_ids());
540 
541   if (!results) {
542     NotifyExtensionsDownloadFailed(
543         not_updated,
544         fetch_data.request_ids(),
545         ExtensionDownloaderDelegate::MANIFEST_INVALID);
546     return;
547   }
548 
549   // Examine the parsed manifest and kick off fetches of any new crx files.
550   std::vector<int> updates;
551   DetermineUpdates(fetch_data, *results, &updates);
552   for (size_t i = 0; i < updates.size(); i++) {
553     const UpdateManifest::Result* update = &(results->list.at(updates[i]));
554     const std::string& id = update->extension_id;
555     not_updated.erase(id);
556 
557     GURL crx_url = update->crx_url;
558     if (id != kBlacklistAppID) {
559       NotifyUpdateFound(update->extension_id, update->version);
560     } else {
561       // The URL of the blacklist file is returned by the server and we need to
562       // be sure that we continue to be able to reliably detect whether a URL
563       // references a blacklist file.
564       DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
565 
566       // Force https (crbug.com/129587).
567       if (!crx_url.SchemeIsSecure()) {
568         url::Replacements<char> replacements;
569         std::string scheme("https");
570         replacements.SetScheme(scheme.c_str(),
571                                url::Component(0, scheme.size()));
572         crx_url = crx_url.ReplaceComponents(replacements);
573       }
574     }
575     scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
576         update->extension_id, crx_url, update->package_hash,
577         update->version, fetch_data.request_ids()));
578     FetchUpdatedExtension(fetch.Pass());
579   }
580 
581   // If the manifest response included a <daystart> element, we want to save
582   // that value for any extensions which had sent a ping in the request.
583   if (fetch_data.base_url().DomainIs("google.com") &&
584       results->daystart_elapsed_seconds >= 0) {
585     Time day_start =
586         Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
587 
588     const std::set<std::string>& extension_ids = fetch_data.extension_ids();
589     std::set<std::string>::const_iterator i;
590     for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
591       const std::string& id = *i;
592       ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
593       result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
594       result.day_start = day_start;
595     }
596   }
597 
598   NotifyExtensionsDownloadFailed(
599       not_updated,
600       fetch_data.request_ids(),
601       ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
602 }
603 
DetermineUpdates(const ManifestFetchData & fetch_data,const UpdateManifest::Results & possible_updates,std::vector<int> * result)604 void ExtensionDownloader::DetermineUpdates(
605     const ManifestFetchData& fetch_data,
606     const UpdateManifest::Results& possible_updates,
607     std::vector<int>* result) {
608   // This will only be valid if one of possible_updates specifies
609   // browser_min_version.
610   Version browser_version;
611 
612   for (size_t i = 0; i < possible_updates.list.size(); i++) {
613     const UpdateManifest::Result* update = &possible_updates.list[i];
614     const std::string& id = update->extension_id;
615 
616     if (!fetch_data.Includes(id)) {
617       VLOG(2) << "Ignoring " << id << " from this manifest";
618       continue;
619     }
620 
621     if (VLOG_IS_ON(2)) {
622       if (update->version.empty())
623         VLOG(2) << "manifest indicates " << id << " has no update";
624       else
625         VLOG(2) << "manifest indicates " << id
626                 << " latest version is '" << update->version << "'";
627     }
628 
629     if (!delegate_->IsExtensionPending(id)) {
630       // If we're not installing pending extension, and the update
631       // version is the same or older than what's already installed,
632       // we don't want it.
633       std::string version;
634       if (!delegate_->GetExtensionExistingVersion(id, &version)) {
635         VLOG(2) << id << " is not installed";
636         continue;
637       }
638 
639       VLOG(2) << id << " is at '" << version << "'";
640 
641       Version existing_version(version);
642       Version update_version(update->version);
643 
644       if (!update_version.IsValid() ||
645           update_version.CompareTo(existing_version) <= 0) {
646         continue;
647       }
648     }
649 
650     // If the update specifies a browser minimum version, do we qualify?
651     if (update->browser_min_version.length() > 0) {
652       // First determine the browser version if we haven't already.
653       if (!browser_version.IsValid()) {
654         chrome::VersionInfo version_info;
655         if (version_info.is_valid())
656           browser_version = Version(version_info.Version());
657       }
658       Version browser_min_version(update->browser_min_version);
659       if (browser_version.IsValid() && browser_min_version.IsValid() &&
660           browser_min_version.CompareTo(browser_version) > 0) {
661         // TODO(asargent) - We may want this to show up in the extensions UI
662         // eventually. (http://crbug.com/12547).
663         LOG(WARNING) << "Updated version of extension " << id
664                      << " available, but requires chrome version "
665                      << update->browser_min_version;
666         continue;
667       }
668     }
669     VLOG(2) << "will try to update " << id;
670     result->push_back(i);
671   }
672 }
673 
674   // Begins (or queues up) download of an updated extension.
FetchUpdatedExtension(scoped_ptr<ExtensionFetch> fetch_data)675 void ExtensionDownloader::FetchUpdatedExtension(
676     scoped_ptr<ExtensionFetch> fetch_data) {
677   if (!fetch_data->url.is_valid()) {
678     // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
679     LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
680                << "' for extension " << fetch_data->id;
681     return;
682   }
683 
684   for (RequestQueue<ExtensionFetch>::iterator iter =
685            extensions_queue_.begin();
686        iter != extensions_queue_.end(); ++iter) {
687     if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
688       iter->request_ids.insert(fetch_data->request_ids.begin(),
689                                fetch_data->request_ids.end());
690       return;  // already scheduled
691     }
692   }
693 
694   if (extensions_queue_.active_request() &&
695       extensions_queue_.active_request()->url == fetch_data->url) {
696     extensions_queue_.active_request()->request_ids.insert(
697         fetch_data->request_ids.begin(), fetch_data->request_ids.end());
698   } else {
699     std::string version;
700     if (extension_cache_ &&
701         extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
702         version == fetch_data->version) {
703       base::FilePath crx_path;
704       // Now get .crx file path and mark extension as used.
705       extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
706       NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
707     } else {
708       extensions_queue_.ScheduleRequest(fetch_data.Pass());
709     }
710   }
711 }
712 
NotifyDelegateDownloadFinished(scoped_ptr<ExtensionFetch> fetch_data,const base::FilePath & crx_path,bool file_ownership_passed)713 void ExtensionDownloader::NotifyDelegateDownloadFinished(
714     scoped_ptr<ExtensionFetch> fetch_data,
715     const base::FilePath& crx_path,
716     bool file_ownership_passed) {
717   delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
718       file_ownership_passed, fetch_data->url, fetch_data->version,
719       ping_results_[fetch_data->id], fetch_data->request_ids);
720   ping_results_.erase(fetch_data->id);
721 }
722 
CreateExtensionFetcher()723 void ExtensionDownloader::CreateExtensionFetcher() {
724   const ExtensionFetch* fetch = extensions_queue_.active_request();
725   int load_flags = net::LOAD_DISABLE_CACHE;
726   if (!fetch->is_protected || !fetch->url.SchemeIs("https")) {
727       load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
728                     net::LOAD_DO_NOT_SAVE_COOKIES;
729   }
730   extension_fetcher_.reset(net::URLFetcher::Create(
731       kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
732   extension_fetcher_->SetRequestContext(request_context_);
733   extension_fetcher_->SetLoadFlags(load_flags);
734   extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
735   // Download CRX files to a temp file. The blacklist is small and will be
736   // processed in memory, so it is fetched into a string.
737   if (fetch->id != kBlacklistAppID) {
738     extension_fetcher_->SaveResponseToTemporaryFile(
739         BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
740   }
741 
742   VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
743 
744   extension_fetcher_->Start();
745 }
746 
OnCRXFetchComplete(const net::URLFetcher * source,const GURL & url,const net::URLRequestStatus & status,int response_code,const base::TimeDelta & backoff_delay)747 void ExtensionDownloader::OnCRXFetchComplete(
748     const net::URLFetcher* source,
749     const GURL& url,
750     const net::URLRequestStatus& status,
751     int response_code,
752     const base::TimeDelta& backoff_delay) {
753   const std::string& id = extensions_queue_.active_request()->id;
754   if (status.status() == net::URLRequestStatus::SUCCESS &&
755       (response_code == 200 || url.SchemeIsFile())) {
756     RETRY_HISTOGRAM("CrxFetchSuccess",
757                     extensions_queue_.active_request_failure_count(), url);
758     base::FilePath crx_path;
759     // Take ownership of the file at |crx_path|.
760     CHECK(source->GetResponseAsFilePath(true, &crx_path));
761     scoped_ptr<ExtensionFetch> fetch_data =
762         extensions_queue_.reset_active_request();
763     if (extension_cache_) {
764       const std::string& version = fetch_data->version;
765       extension_cache_->PutExtension(id, crx_path, version,
766           base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
767                      weak_ptr_factory_.GetWeakPtr(),
768                      base::Passed(&fetch_data)));
769     } else {
770       NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
771     }
772   } else if (ShouldRetryRequestWithCookies(
773                  status,
774                  response_code,
775                  extensions_queue_.active_request()->is_protected)) {
776     // Requeue the fetch with |is_protected| set, enabling cookies.
777     extensions_queue_.active_request()->is_protected = true;
778     extensions_queue_.RetryRequest(backoff_delay);
779   } else if (ShouldRetryRequestWithNextUser(
780                  status,
781                  response_code,
782                  extensions_queue_.active_request()->is_protected) &&
783              IncrementAuthUserIndex(&extensions_queue_.active_request()->url)) {
784     extensions_queue_.RetryRequest(backoff_delay);
785   } else {
786     const std::set<int>& request_ids =
787         extensions_queue_.active_request()->request_ids;
788     const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
789     VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
790             << "' response code:" << response_code;
791     if (ShouldRetryRequest(status, response_code) &&
792         extensions_queue_.active_request_failure_count() < kMaxRetries) {
793       extensions_queue_.RetryRequest(backoff_delay);
794     } else {
795       RETRY_HISTOGRAM("CrxFetchFailure",
796                       extensions_queue_.active_request_failure_count(), url);
797       // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
798       UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
799       delegate_->OnExtensionDownloadFailed(
800           id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
801     }
802     ping_results_.erase(id);
803     extensions_queue_.reset_active_request();
804   }
805 
806   extension_fetcher_.reset();
807 
808   // If there are any pending downloads left, start the next one.
809   extensions_queue_.StartNextRequest();
810 }
811 
NotifyExtensionsDownloadFailed(const std::set<std::string> & extension_ids,const std::set<int> & request_ids,ExtensionDownloaderDelegate::Error error)812 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
813     const std::set<std::string>& extension_ids,
814     const std::set<int>& request_ids,
815     ExtensionDownloaderDelegate::Error error) {
816   for (std::set<std::string>::const_iterator it = extension_ids.begin();
817        it != extension_ids.end(); ++it) {
818     const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
819     delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
820     ping_results_.erase(*it);
821   }
822 }
823 
NotifyUpdateFound(const std::string & id,const std::string & version)824 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
825                                             const std::string& version) {
826   UpdateDetails updateInfo(id, Version(version));
827   content::NotificationService::current()->Notify(
828       chrome::NOTIFICATION_EXTENSION_UPDATE_FOUND,
829       content::NotificationService::AllBrowserContextsAndSources(),
830       content::Details<UpdateDetails>(&updateInfo));
831 }
832 
833 }  // namespace extensions
834