58463804bf8bb59cd53896c90f08f51b61e769b3
[platform/framework/web/crosswalk.git] / src / chrome / browser / extensions / updater / extension_downloader.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
6
7 #include <utility>
8
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
26 #include "chrome/browser/metrics/chrome_metrics_service_accessor.h"
27 #include "chrome/common/chrome_switches.h"
28 #include "chrome/common/chrome_version_info.h"
29 #include "chrome/common/extensions/extension_constants.h"
30 #include "chrome/common/extensions/manifest_url_handler.h"
31 #include "content/public/browser/browser_thread.h"
32 #include "content/public/browser/notification_details.h"
33 #include "content/public/browser/notification_service.h"
34 #include "google_apis/gaia/identity_provider.h"
35 #include "net/base/backoff_entry.h"
36 #include "net/base/load_flags.h"
37 #include "net/base/net_errors.h"
38 #include "net/http/http_request_headers.h"
39 #include "net/http/http_status_code.h"
40 #include "net/url_request/url_fetcher.h"
41 #include "net/url_request/url_request_context_getter.h"
42 #include "net/url_request/url_request_status.h"
43
44 using base::Time;
45 using base::TimeDelta;
46 using content::BrowserThread;
47
48 namespace extensions {
49
50 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
51
52 namespace {
53
54 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
55   // Number of initial errors (in sequence) to ignore before applying
56   // exponential back-off rules.
57   0,
58
59   // Initial delay for exponential back-off in ms.
60   2000,
61
62   // Factor by which the waiting time will be multiplied.
63   2,
64
65   // Fuzzing percentage. ex: 10% will spread requests randomly
66   // between 90%-100% of the calculated time.
67   0.1,
68
69   // Maximum amount of time we are willing to delay our request in ms.
70   -1,
71
72   // Time to keep an entry from being discarded even when it
73   // has no significant state, -1 to never discard.
74   -1,
75
76   // Don't use initial delay unless the last request was an error.
77   false,
78 };
79
80 const char kAuthUserQueryKey[] = "authuser";
81
82 const int kMaxAuthUserValue = 10;
83 const int kMaxOAuth2Attempts = 3;
84
85 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
86 const char kDefaultInstallSource[] = "";
87
88 const char kGoogleDotCom[] = "google.com";
89 const char kTokenServiceConsumerId[] = "extension_downloader";
90 const char kWebstoreOAuth2Scope[] =
91     "https://www.googleapis.com/auth/chromewebstore.readonly";
92
93 #define RETRY_HISTOGRAM(name, retry_count, url)                           \
94   if ((url).DomainIs(kGoogleDotCom)) {                                    \
95     UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
96                                 retry_count,                              \
97                                 1,                                        \
98                                 kMaxRetries,                              \
99                                 kMaxRetries + 1);                         \
100   } else {                                                                \
101     UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl",  \
102                                 retry_count,                              \
103                                 1,                                        \
104                                 kMaxRetries,                              \
105                                 kMaxRetries + 1);                         \
106   }
107
108 bool ShouldRetryRequest(const net::URLRequestStatus& status,
109                         int response_code) {
110   // Retry if the response code is a server error, or the request failed because
111   // of network errors as opposed to file errors.
112   return ((response_code >= 500 && status.is_success()) ||
113           status.status() == net::URLRequestStatus::FAILED);
114 }
115
116 // This parses and updates a URL query such that the value of the |authuser|
117 // query parameter is incremented by 1. If parameter was not present in the URL,
118 // it will be added with a value of 1. All other query keys and values are
119 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
120 // maximum.
121 bool IncrementAuthUserIndex(GURL* url) {
122   int user_index = 0;
123   std::string old_query = url->query();
124   std::vector<std::string> new_query_parts;
125   url::Component query(0, old_query.length());
126   url::Component key, value;
127   while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
128     std::string key_string = old_query.substr(key.begin, key.len);
129     std::string value_string = old_query.substr(value.begin, value.len);
130     if (key_string == kAuthUserQueryKey) {
131       base::StringToInt(value_string, &user_index);
132     } else {
133       new_query_parts.push_back(base::StringPrintf(
134           "%s=%s", key_string.c_str(), value_string.c_str()));
135     }
136   }
137   if (user_index >= kMaxAuthUserValue)
138     return false;
139   new_query_parts.push_back(
140       base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
141   std::string new_query_string = JoinString(new_query_parts, '&');
142   url::Component new_query(0, new_query_string.size());
143   url::Replacements<char> replacements;
144   replacements.SetQuery(new_query_string.c_str(), new_query);
145   *url = url->ReplaceComponents(replacements);
146   return true;
147 }
148
149 }  // namespace
150
151 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
152     : id(id), version(version) {}
153
154 UpdateDetails::~UpdateDetails() {}
155
156 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
157     : url(), credentials(CREDENTIALS_NONE) {
158 }
159
160 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
161     const std::string& id,
162     const GURL& url,
163     const std::string& package_hash,
164     const std::string& version,
165     const std::set<int>& request_ids)
166     : id(id),
167       url(url),
168       package_hash(package_hash),
169       version(version),
170       request_ids(request_ids),
171       credentials(CREDENTIALS_NONE),
172       oauth2_attempt_count(0) {
173 }
174
175 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
176
177 ExtensionDownloader::ExtensionDownloader(
178     ExtensionDownloaderDelegate* delegate,
179     net::URLRequestContextGetter* request_context)
180     : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
181       delegate_(delegate),
182       request_context_(request_context),
183       weak_ptr_factory_(this),
184       manifests_queue_(&kDefaultBackoffPolicy,
185                        base::Bind(&ExtensionDownloader::CreateManifestFetcher,
186                                   base::Unretained(this))),
187       extensions_queue_(&kDefaultBackoffPolicy,
188                         base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
189                                    base::Unretained(this))),
190       extension_cache_(NULL) {
191   DCHECK(delegate_);
192   DCHECK(request_context_);
193 }
194
195 ExtensionDownloader::~ExtensionDownloader() {}
196
197 bool ExtensionDownloader::AddExtension(const Extension& extension,
198                                        int request_id) {
199   // Skip extensions with empty update URLs converted from user
200   // scripts.
201   if (extension.converted_from_user_script() &&
202       ManifestURL::GetUpdateURL(&extension).is_empty()) {
203     return false;
204   }
205
206   // If the extension updates itself from the gallery, ignore any update URL
207   // data.  At the moment there is no extra data that an extension can
208   // communicate to the the gallery update servers.
209   std::string update_url_data;
210   if (!ManifestURL::UpdatesFromGallery(&extension))
211     update_url_data = delegate_->GetUpdateUrlData(extension.id());
212
213   return AddExtensionData(extension.id(), *extension.version(),
214                           extension.GetType(),
215                           ManifestURL::GetUpdateURL(&extension),
216                           update_url_data, request_id);
217 }
218
219 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
220                                               const GURL& update_url,
221                                               int request_id) {
222   // Use a zero version to ensure that a pending extension will always
223   // be updated, and thus installed (assuming all extensions have
224   // non-zero versions).
225   Version version("0.0.0.0");
226   DCHECK(version.IsValid());
227
228   return AddExtensionData(id,
229                           version,
230                           Manifest::TYPE_UNKNOWN,
231                           update_url,
232                           std::string(),
233                           request_id);
234 }
235
236 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
237   if (cache) {
238     extension_cache_ = cache;
239     extension_cache_->Start(base::Bind(
240         &ExtensionDownloader::DoStartAllPending,
241         weak_ptr_factory_.GetWeakPtr()));
242   } else {
243     DoStartAllPending();
244   }
245 }
246
247 void ExtensionDownloader::DoStartAllPending() {
248   ReportStats();
249   url_stats_ = URLStats();
250
251   for (FetchMap::iterator it = fetches_preparing_.begin();
252        it != fetches_preparing_.end(); ++it) {
253     std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
254     for (size_t i = 0; i < list.size(); ++i) {
255       StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
256     }
257   }
258   fetches_preparing_.clear();
259 }
260
261 void ExtensionDownloader::StartBlacklistUpdate(
262     const std::string& version,
263     const ManifestFetchData::PingData& ping_data,
264     int request_id) {
265   // Note: it is very important that we use the https version of the update
266   // url here to avoid DNS hijacking of the blacklist, which is not validated
267   // by a public key signature like .crx files are.
268   scoped_ptr<ManifestFetchData> blacklist_fetch(
269       new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
270                             request_id));
271   DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
272   blacklist_fetch->AddExtension(kBlacklistAppID,
273                                 version,
274                                 &ping_data,
275                                 std::string(),
276                                 kDefaultInstallSource);
277   StartUpdateCheck(blacklist_fetch.Pass());
278 }
279
280 void ExtensionDownloader::SetWebstoreIdentityProvider(
281     scoped_ptr<IdentityProvider> identity_provider) {
282   identity_provider_.swap(identity_provider);
283 }
284
285 bool ExtensionDownloader::AddExtensionData(const std::string& id,
286                                            const Version& version,
287                                            Manifest::Type extension_type,
288                                            const GURL& extension_update_url,
289                                            const std::string& update_url_data,
290                                            int request_id) {
291   GURL update_url(extension_update_url);
292   // Skip extensions with non-empty invalid update URLs.
293   if (!update_url.is_empty() && !update_url.is_valid()) {
294     LOG(WARNING) << "Extension " << id << " has invalid update url "
295                  << update_url;
296     return false;
297   }
298
299   // Make sure we use SSL for store-hosted extensions.
300   if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
301       !update_url.SchemeIsSecure())
302     update_url = extension_urls::GetWebstoreUpdateUrl();
303
304   // Skip extensions with empty IDs.
305   if (id.empty()) {
306     LOG(WARNING) << "Found extension with empty ID";
307     return false;
308   }
309
310   if (update_url.DomainIs(kGoogleDotCom)) {
311     url_stats_.google_url_count++;
312   } else if (update_url.is_empty()) {
313     url_stats_.no_url_count++;
314     // Fill in default update URL.
315     update_url = extension_urls::GetWebstoreUpdateUrl();
316   } else {
317     url_stats_.other_url_count++;
318   }
319
320   switch (extension_type) {
321     case Manifest::TYPE_THEME:
322       ++url_stats_.theme_count;
323       break;
324     case Manifest::TYPE_EXTENSION:
325     case Manifest::TYPE_USER_SCRIPT:
326       ++url_stats_.extension_count;
327       break;
328     case Manifest::TYPE_HOSTED_APP:
329     case Manifest::TYPE_LEGACY_PACKAGED_APP:
330       ++url_stats_.app_count;
331       break;
332     case Manifest::TYPE_PLATFORM_APP:
333       ++url_stats_.platform_app_count;
334       break;
335     case Manifest::TYPE_UNKNOWN:
336     default:
337       ++url_stats_.pending_count;
338       break;
339   }
340
341   std::vector<GURL> update_urls;
342   update_urls.push_back(update_url);
343   // If UMA is enabled, also add to ManifestFetchData for the
344   // webstore update URL.
345   if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
346       ChromeMetricsServiceAccessor::IsMetricsReportingEnabled()) {
347     update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
348   }
349
350   for (size_t i = 0; i < update_urls.size(); ++i) {
351     DCHECK(!update_urls[i].is_empty());
352     DCHECK(update_urls[i].is_valid());
353
354     std::string install_source = i == 0 ?
355         kDefaultInstallSource : kNotFromWebstoreInstallSource;
356
357     ManifestFetchData::PingData ping_data;
358     ManifestFetchData::PingData* optional_ping_data = NULL;
359     if (delegate_->GetPingDataForExtension(id, &ping_data))
360       optional_ping_data = &ping_data;
361
362     // Find or create a ManifestFetchData to add this extension to.
363     bool added = false;
364     FetchMap::iterator existing_iter = fetches_preparing_.find(
365         std::make_pair(request_id, update_urls[i]));
366     if (existing_iter != fetches_preparing_.end() &&
367         !existing_iter->second.empty()) {
368       // Try to add to the ManifestFetchData at the end of the list.
369       ManifestFetchData* existing_fetch = existing_iter->second.back().get();
370       if (existing_fetch->AddExtension(id, version.GetString(),
371                                        optional_ping_data, update_url_data,
372                                        install_source)) {
373         added = true;
374       }
375     }
376     if (!added) {
377       // Otherwise add a new element to the list, if the list doesn't exist or
378       // if its last element is already full.
379       linked_ptr<ManifestFetchData> fetch(
380           new ManifestFetchData(update_urls[i], request_id));
381       fetches_preparing_[std::make_pair(request_id, update_urls[i])].
382           push_back(fetch);
383       added = fetch->AddExtension(id, version.GetString(),
384                                   optional_ping_data,
385                                   update_url_data,
386                                   install_source);
387       DCHECK(added);
388     }
389   }
390
391   return true;
392 }
393
394 void ExtensionDownloader::ReportStats() const {
395   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
396                            url_stats_.extension_count);
397   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
398                            url_stats_.theme_count);
399   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
400                            url_stats_.app_count);
401   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
402                            url_stats_.platform_app_count);
403   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
404                            url_stats_.pending_count);
405   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
406                            url_stats_.google_url_count);
407   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
408                            url_stats_.other_url_count);
409   UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
410                            url_stats_.no_url_count);
411 }
412
413 void ExtensionDownloader::StartUpdateCheck(
414     scoped_ptr<ManifestFetchData> fetch_data) {
415   const std::set<std::string>& id_set(fetch_data->extension_ids());
416
417   if (CommandLine::ForCurrentProcess()->HasSwitch(
418       switches::kDisableBackgroundNetworking)) {
419     NotifyExtensionsDownloadFailed(id_set,
420                                    fetch_data->request_ids(),
421                                    ExtensionDownloaderDelegate::DISABLED);
422     return;
423   }
424
425   RequestQueue<ManifestFetchData>::iterator i;
426   for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
427     if (fetch_data->full_url() == i->full_url()) {
428       // This url is already scheduled to be fetched.
429       i->Merge(*fetch_data);
430       return;
431     }
432   }
433
434   if (manifests_queue_.active_request() &&
435       manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
436     manifests_queue_.active_request()->Merge(*fetch_data);
437   } else {
438     UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
439         fetch_data->full_url().possibly_invalid_spec().length());
440
441     manifests_queue_.ScheduleRequest(fetch_data.Pass());
442   }
443 }
444
445 void ExtensionDownloader::CreateManifestFetcher() {
446   if (VLOG_IS_ON(2)) {
447     std::vector<std::string> id_vector(
448         manifests_queue_.active_request()->extension_ids().begin(),
449         manifests_queue_.active_request()->extension_ids().end());
450     std::string id_list = JoinString(id_vector, ',');
451     VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
452             << " for " << id_list;
453   }
454
455   manifest_fetcher_.reset(net::URLFetcher::Create(
456       kManifestFetcherId, manifests_queue_.active_request()->full_url(),
457       net::URLFetcher::GET, this));
458   manifest_fetcher_->SetRequestContext(request_context_);
459   manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
460                                   net::LOAD_DO_NOT_SAVE_COOKIES |
461                                   net::LOAD_DISABLE_CACHE);
462   // Update checks can be interrupted if a network change is detected; this is
463   // common for the retail mode AppPack on ChromeOS. Retrying once should be
464   // enough to recover in those cases; let the fetcher retry up to 3 times
465   // just in case. http://crosbug.com/130602
466   manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
467   manifest_fetcher_->Start();
468 }
469
470 void ExtensionDownloader::OnURLFetchComplete(
471     const net::URLFetcher* source) {
472   VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
473
474   if (source == manifest_fetcher_.get()) {
475     std::string data;
476     source->GetResponseAsString(&data);
477     OnManifestFetchComplete(source->GetURL(),
478                             source->GetStatus(),
479                             source->GetResponseCode(),
480                             source->GetBackoffDelay(),
481                             data);
482   } else if (source == extension_fetcher_.get()) {
483     OnCRXFetchComplete(source,
484                        source->GetURL(),
485                        source->GetStatus(),
486                        source->GetResponseCode(),
487                        source->GetBackoffDelay());
488   } else {
489     NOTREACHED();
490   }
491 }
492
493 void ExtensionDownloader::OnManifestFetchComplete(
494     const GURL& url,
495     const net::URLRequestStatus& status,
496     int response_code,
497     const base::TimeDelta& backoff_delay,
498     const std::string& data) {
499   // We want to try parsing the manifest, and if it indicates updates are
500   // available, we want to fire off requests to fetch those updates.
501   if (status.status() == net::URLRequestStatus::SUCCESS &&
502       (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
503     RETRY_HISTOGRAM("ManifestFetchSuccess",
504                     manifests_queue_.active_request_failure_count(), url);
505     VLOG(2) << "beginning manifest parse for " << url;
506     scoped_refptr<SafeManifestParser> safe_parser(
507         new SafeManifestParser(
508             data,
509             manifests_queue_.reset_active_request().release(),
510             base::Bind(&ExtensionDownloader::HandleManifestResults,
511                        weak_ptr_factory_.GetWeakPtr())));
512     safe_parser->Start();
513   } else {
514     VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
515             << "' response code:" << response_code;
516     if (ShouldRetryRequest(status, response_code) &&
517         manifests_queue_.active_request_failure_count() < kMaxRetries) {
518       manifests_queue_.RetryRequest(backoff_delay);
519     } else {
520       RETRY_HISTOGRAM("ManifestFetchFailure",
521                       manifests_queue_.active_request_failure_count(), url);
522       NotifyExtensionsDownloadFailed(
523           manifests_queue_.active_request()->extension_ids(),
524           manifests_queue_.active_request()->request_ids(),
525           ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
526     }
527   }
528   manifest_fetcher_.reset();
529   manifests_queue_.reset_active_request();
530
531   // If we have any pending manifest requests, fire off the next one.
532   manifests_queue_.StartNextRequest();
533 }
534
535 void ExtensionDownloader::HandleManifestResults(
536     const ManifestFetchData& fetch_data,
537     const UpdateManifest::Results* results) {
538   // Keep a list of extensions that will not be updated, so that the |delegate_|
539   // can be notified once we're done here.
540   std::set<std::string> not_updated(fetch_data.extension_ids());
541
542   if (!results) {
543     NotifyExtensionsDownloadFailed(
544         not_updated,
545         fetch_data.request_ids(),
546         ExtensionDownloaderDelegate::MANIFEST_INVALID);
547     return;
548   }
549
550   // Examine the parsed manifest and kick off fetches of any new crx files.
551   std::vector<int> updates;
552   DetermineUpdates(fetch_data, *results, &updates);
553   for (size_t i = 0; i < updates.size(); i++) {
554     const UpdateManifest::Result* update = &(results->list.at(updates[i]));
555     const std::string& id = update->extension_id;
556     not_updated.erase(id);
557
558     GURL crx_url = update->crx_url;
559     if (id != kBlacklistAppID) {
560       NotifyUpdateFound(update->extension_id, update->version);
561     } else {
562       // The URL of the blacklist file is returned by the server and we need to
563       // be sure that we continue to be able to reliably detect whether a URL
564       // references a blacklist file.
565       DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
566
567       // Force https (crbug.com/129587).
568       if (!crx_url.SchemeIsSecure()) {
569         url::Replacements<char> replacements;
570         std::string scheme("https");
571         replacements.SetScheme(scheme.c_str(),
572                                url::Component(0, scheme.size()));
573         crx_url = crx_url.ReplaceComponents(replacements);
574       }
575     }
576     scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
577         update->extension_id, crx_url, update->package_hash,
578         update->version, fetch_data.request_ids()));
579     FetchUpdatedExtension(fetch.Pass());
580   }
581
582   // If the manifest response included a <daystart> element, we want to save
583   // that value for any extensions which had sent a ping in the request.
584   if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
585       results->daystart_elapsed_seconds >= 0) {
586     Time day_start =
587         Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
588
589     const std::set<std::string>& extension_ids = fetch_data.extension_ids();
590     std::set<std::string>::const_iterator i;
591     for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
592       const std::string& id = *i;
593       ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
594       result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
595       result.day_start = day_start;
596     }
597   }
598
599   NotifyExtensionsDownloadFailed(
600       not_updated,
601       fetch_data.request_ids(),
602       ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
603 }
604
605 void ExtensionDownloader::DetermineUpdates(
606     const ManifestFetchData& fetch_data,
607     const UpdateManifest::Results& possible_updates,
608     std::vector<int>* result) {
609   // This will only be valid if one of possible_updates specifies
610   // browser_min_version.
611   Version browser_version;
612
613   for (size_t i = 0; i < possible_updates.list.size(); i++) {
614     const UpdateManifest::Result* update = &possible_updates.list[i];
615     const std::string& id = update->extension_id;
616
617     if (!fetch_data.Includes(id)) {
618       VLOG(2) << "Ignoring " << id << " from this manifest";
619       continue;
620     }
621
622     if (VLOG_IS_ON(2)) {
623       if (update->version.empty())
624         VLOG(2) << "manifest indicates " << id << " has no update";
625       else
626         VLOG(2) << "manifest indicates " << id
627                 << " latest version is '" << update->version << "'";
628     }
629
630     if (!delegate_->IsExtensionPending(id)) {
631       // If we're not installing pending extension, and the update
632       // version is the same or older than what's already installed,
633       // we don't want it.
634       std::string version;
635       if (!delegate_->GetExtensionExistingVersion(id, &version)) {
636         VLOG(2) << id << " is not installed";
637         continue;
638       }
639
640       VLOG(2) << id << " is at '" << version << "'";
641
642       Version existing_version(version);
643       Version update_version(update->version);
644
645       if (!update_version.IsValid() ||
646           update_version.CompareTo(existing_version) <= 0) {
647         continue;
648       }
649     }
650
651     // If the update specifies a browser minimum version, do we qualify?
652     if (update->browser_min_version.length() > 0) {
653       // First determine the browser version if we haven't already.
654       if (!browser_version.IsValid()) {
655         chrome::VersionInfo version_info;
656         if (version_info.is_valid())
657           browser_version = Version(version_info.Version());
658       }
659       Version browser_min_version(update->browser_min_version);
660       if (browser_version.IsValid() && browser_min_version.IsValid() &&
661           browser_min_version.CompareTo(browser_version) > 0) {
662         // TODO(asargent) - We may want this to show up in the extensions UI
663         // eventually. (http://crbug.com/12547).
664         LOG(WARNING) << "Updated version of extension " << id
665                      << " available, but requires chrome version "
666                      << update->browser_min_version;
667         continue;
668       }
669     }
670     VLOG(2) << "will try to update " << id;
671     result->push_back(i);
672   }
673 }
674
675   // Begins (or queues up) download of an updated extension.
676 void ExtensionDownloader::FetchUpdatedExtension(
677     scoped_ptr<ExtensionFetch> fetch_data) {
678   if (!fetch_data->url.is_valid()) {
679     // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
680     LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
681                << "' for extension " << fetch_data->id;
682     return;
683   }
684
685   for (RequestQueue<ExtensionFetch>::iterator iter =
686            extensions_queue_.begin();
687        iter != extensions_queue_.end(); ++iter) {
688     if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
689       iter->request_ids.insert(fetch_data->request_ids.begin(),
690                                fetch_data->request_ids.end());
691       return;  // already scheduled
692     }
693   }
694
695   if (extensions_queue_.active_request() &&
696       extensions_queue_.active_request()->url == fetch_data->url) {
697     extensions_queue_.active_request()->request_ids.insert(
698         fetch_data->request_ids.begin(), fetch_data->request_ids.end());
699   } else {
700     std::string version;
701     if (extension_cache_ &&
702         extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
703         version == fetch_data->version) {
704       base::FilePath crx_path;
705       // Now get .crx file path and mark extension as used.
706       extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
707       NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
708     } else {
709       extensions_queue_.ScheduleRequest(fetch_data.Pass());
710     }
711   }
712 }
713
714 void ExtensionDownloader::NotifyDelegateDownloadFinished(
715     scoped_ptr<ExtensionFetch> fetch_data,
716     const base::FilePath& crx_path,
717     bool file_ownership_passed) {
718   delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
719       file_ownership_passed, fetch_data->url, fetch_data->version,
720       ping_results_[fetch_data->id], fetch_data->request_ids);
721   ping_results_.erase(fetch_data->id);
722 }
723
724 void ExtensionDownloader::CreateExtensionFetcher() {
725   const ExtensionFetch* fetch = extensions_queue_.active_request();
726   extension_fetcher_.reset(net::URLFetcher::Create(
727       kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
728   extension_fetcher_->SetRequestContext(request_context_);
729   extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
730
731   int load_flags = net::LOAD_DISABLE_CACHE;
732   bool is_secure = fetch->url.SchemeIsSecure();
733   if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
734       load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
735                     net::LOAD_DO_NOT_SAVE_COOKIES;
736   }
737   extension_fetcher_->SetLoadFlags(load_flags);
738
739   // Download CRX files to a temp file. The blacklist is small and will be
740   // processed in memory, so it is fetched into a string.
741   if (fetch->id != kBlacklistAppID) {
742     extension_fetcher_->SaveResponseToTemporaryFile(
743         BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
744   }
745
746   if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
747       is_secure) {
748     if (access_token_.empty()) {
749       // We should try OAuth2, but we have no token cached. This
750       // ExtensionFetcher will be started once the token fetch is complete,
751       // in either OnTokenFetchSuccess or OnTokenFetchFailure.
752       DCHECK(identity_provider_.get());
753       OAuth2TokenService::ScopeSet webstore_scopes;
754       webstore_scopes.insert(kWebstoreOAuth2Scope);
755       access_token_request_ =
756           identity_provider_->GetTokenService()->StartRequest(
757               identity_provider_->GetActiveAccountId(),
758               webstore_scopes,
759               this);
760       return;
761     }
762     extension_fetcher_->AddExtraRequestHeader(
763         base::StringPrintf("%s: Bearer %s",
764             net::HttpRequestHeaders::kAuthorization,
765             access_token_.c_str()));
766   }
767
768   VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
769   extension_fetcher_->Start();
770 }
771
772 void ExtensionDownloader::OnCRXFetchComplete(
773     const net::URLFetcher* source,
774     const GURL& url,
775     const net::URLRequestStatus& status,
776     int response_code,
777     const base::TimeDelta& backoff_delay) {
778   ExtensionFetch& active_request = *extensions_queue_.active_request();
779   const std::string& id = active_request.id;
780   if (status.status() == net::URLRequestStatus::SUCCESS &&
781       (response_code == 200 || url.SchemeIsFile())) {
782     RETRY_HISTOGRAM("CrxFetchSuccess",
783                     extensions_queue_.active_request_failure_count(), url);
784     base::FilePath crx_path;
785     // Take ownership of the file at |crx_path|.
786     CHECK(source->GetResponseAsFilePath(true, &crx_path));
787     scoped_ptr<ExtensionFetch> fetch_data =
788         extensions_queue_.reset_active_request();
789     if (extension_cache_) {
790       const std::string& version = fetch_data->version;
791       extension_cache_->PutExtension(id, crx_path, version,
792           base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
793                      weak_ptr_factory_.GetWeakPtr(),
794                      base::Passed(&fetch_data)));
795     } else {
796       NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
797     }
798   } else if (IterateFetchCredentialsAfterFailure(
799                 &active_request,
800                 status,
801                 response_code)) {
802     extensions_queue_.RetryRequest(backoff_delay);
803   } else {
804     const std::set<int>& request_ids = active_request.request_ids;
805     const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
806     VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
807             << "' response code:" << response_code;
808     if (ShouldRetryRequest(status, response_code) &&
809         extensions_queue_.active_request_failure_count() < kMaxRetries) {
810       extensions_queue_.RetryRequest(backoff_delay);
811     } else {
812       RETRY_HISTOGRAM("CrxFetchFailure",
813                       extensions_queue_.active_request_failure_count(), url);
814       // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
815       UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
816       delegate_->OnExtensionDownloadFailed(
817           id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
818     }
819     ping_results_.erase(id);
820     extensions_queue_.reset_active_request();
821   }
822
823   extension_fetcher_.reset();
824
825   // If there are any pending downloads left, start the next one.
826   extensions_queue_.StartNextRequest();
827 }
828
829 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
830     const std::set<std::string>& extension_ids,
831     const std::set<int>& request_ids,
832     ExtensionDownloaderDelegate::Error error) {
833   for (std::set<std::string>::const_iterator it = extension_ids.begin();
834        it != extension_ids.end(); ++it) {
835     const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
836     delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
837     ping_results_.erase(*it);
838   }
839 }
840
841 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
842                                             const std::string& version) {
843   UpdateDetails updateInfo(id, Version(version));
844   content::NotificationService::current()->Notify(
845       extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
846       content::NotificationService::AllBrowserContextsAndSources(),
847       content::Details<UpdateDetails>(&updateInfo));
848 }
849
850 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
851     ExtensionFetch* fetch,
852     const net::URLRequestStatus& status,
853     int response_code) {
854   bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
855                       (status.status() == net::URLRequestStatus::SUCCESS &&
856                        (response_code == net::HTTP_UNAUTHORIZED ||
857                         response_code == net::HTTP_FORBIDDEN));
858   if (!auth_failure) {
859     return false;
860   }
861   // Here we decide what to do next if the server refused to authorize this
862   // fetch.
863   switch (fetch->credentials) {
864     case ExtensionFetch::CREDENTIALS_NONE:
865       if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
866         fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
867       } else {
868         fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
869       }
870       return true;
871     case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
872       fetch->oauth2_attempt_count++;
873       // OAuth2 may fail due to an expired access token, in which case we
874       // should invalidate the token and try again.
875       if (response_code == net::HTTP_UNAUTHORIZED &&
876           fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
877         DCHECK(identity_provider_.get());
878         OAuth2TokenService::ScopeSet webstore_scopes;
879         webstore_scopes.insert(kWebstoreOAuth2Scope);
880         identity_provider_->GetTokenService()->InvalidateToken(
881             identity_provider_->GetActiveAccountId(),
882             webstore_scopes,
883             access_token_);
884         access_token_.clear();
885         return true;
886       }
887       // Either there is no Gaia identity available, the active identity
888       // doesn't have access to this resource, or the server keeps returning
889       // 401s and we've retried too many times. Fall back on cookies.
890       if (access_token_.empty() ||
891           response_code == net::HTTP_FORBIDDEN ||
892           fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
893         fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
894         return true;
895       }
896       // Something else is wrong. Time to give up.
897       return false;
898     case ExtensionFetch::CREDENTIALS_COOKIES:
899       if (response_code == net::HTTP_FORBIDDEN) {
900         // Try the next session identity, up to some maximum.
901         return IncrementAuthUserIndex(&fetch->url);
902       }
903       return false;
904     default:
905       NOTREACHED();
906   }
907   NOTREACHED();
908   return false;
909 }
910
911 void ExtensionDownloader::OnGetTokenSuccess(
912     const OAuth2TokenService::Request* request,
913     const std::string& access_token,
914     const base::Time& expiration_time) {
915   access_token_ = access_token;
916   extension_fetcher_->AddExtraRequestHeader(
917       base::StringPrintf("%s: Bearer %s",
918           net::HttpRequestHeaders::kAuthorization,
919           access_token_.c_str()));
920   extension_fetcher_->Start();
921 }
922
923 void ExtensionDownloader::OnGetTokenFailure(
924     const OAuth2TokenService::Request* request,
925     const GoogleServiceAuthError& error) {
926   // If we fail to get an access token, kick the pending fetch and let it fall
927   // back on cookies.
928   extension_fetcher_->Start();
929 }
930
931 }  // namespace extensions