1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/common/chrome_switches.h"
26 #include "chrome/common/chrome_version_info.h"
27 #include "chrome/common/extensions/manifest_url_handler.h"
28 #include "content/public/browser/browser_thread.h"
29 #include "content/public/browser/notification_details.h"
30 #include "content/public/browser/notification_service.h"
31 #include "extensions/browser/updater/safe_manifest_parser.h"
32 #include "extensions/common/extension_urls.h"
33 #include "google_apis/gaia/identity_provider.h"
34 #include "net/base/backoff_entry.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/net_errors.h"
37 #include "net/http/http_request_headers.h"
38 #include "net/http/http_status_code.h"
39 #include "net/url_request/url_fetcher.h"
40 #include "net/url_request/url_request_context_getter.h"
41 #include "net/url_request/url_request_status.h"
44 using base::TimeDelta;
45 using content::BrowserThread;
47 namespace extensions {
49 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
53 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
54 // Number of initial errors (in sequence) to ignore before applying
55 // exponential back-off rules.
58 // Initial delay for exponential back-off in ms.
61 // Factor by which the waiting time will be multiplied.
64 // Fuzzing percentage. ex: 10% will spread requests randomly
65 // between 90%-100% of the calculated time.
68 // Maximum amount of time we are willing to delay our request in ms.
71 // Time to keep an entry from being discarded even when it
72 // has no significant state, -1 to never discard.
75 // Don't use initial delay unless the last request was an error.
79 const char kAuthUserQueryKey[] = "authuser";
81 const int kMaxAuthUserValue = 10;
82 const int kMaxOAuth2Attempts = 3;
84 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
85 const char kDefaultInstallSource[] = "";
87 const char kGoogleDotCom[] = "google.com";
88 const char kTokenServiceConsumerId[] = "extension_downloader";
89 const char kWebstoreOAuth2Scope[] =
90 "https://www.googleapis.com/auth/chromewebstore.readonly";
92 #define RETRY_HISTOGRAM(name, retry_count, url) \
93 if ((url).DomainIs(kGoogleDotCom)) { \
94 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
100 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
107 bool ShouldRetryRequest(const net::URLRequestStatus& status,
109 // Retry if the response code is a server error, or the request failed because
110 // of network errors as opposed to file errors.
111 return ((response_code >= 500 && status.is_success()) ||
112 status.status() == net::URLRequestStatus::FAILED);
115 // This parses and updates a URL query such that the value of the |authuser|
116 // query parameter is incremented by 1. If parameter was not present in the URL,
117 // it will be added with a value of 1. All other query keys and values are
118 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
120 bool IncrementAuthUserIndex(GURL* url) {
122 std::string old_query = url->query();
123 std::vector<std::string> new_query_parts;
124 url::Component query(0, old_query.length());
125 url::Component key, value;
126 while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
127 std::string key_string = old_query.substr(key.begin, key.len);
128 std::string value_string = old_query.substr(value.begin, value.len);
129 if (key_string == kAuthUserQueryKey) {
130 base::StringToInt(value_string, &user_index);
132 new_query_parts.push_back(base::StringPrintf(
133 "%s=%s", key_string.c_str(), value_string.c_str()));
136 if (user_index >= kMaxAuthUserValue)
138 new_query_parts.push_back(
139 base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
140 std::string new_query_string = JoinString(new_query_parts, '&');
141 url::Component new_query(0, new_query_string.size());
142 url::Replacements<char> replacements;
143 replacements.SetQuery(new_query_string.c_str(), new_query);
144 *url = url->ReplaceComponents(replacements);
150 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
151 : id(id), version(version) {}
153 UpdateDetails::~UpdateDetails() {}
155 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
156 : url(), credentials(CREDENTIALS_NONE) {
159 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
160 const std::string& id,
162 const std::string& package_hash,
163 const std::string& version,
164 const std::set<int>& request_ids)
167 package_hash(package_hash),
169 request_ids(request_ids),
170 credentials(CREDENTIALS_NONE),
171 oauth2_attempt_count(0) {
174 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
176 ExtensionDownloader::ExtensionDownloader(
177 ExtensionDownloaderDelegate* delegate,
178 net::URLRequestContextGetter* request_context)
179 : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
181 request_context_(request_context),
182 manifests_queue_(&kDefaultBackoffPolicy,
183 base::Bind(&ExtensionDownloader::CreateManifestFetcher,
184 base::Unretained(this))),
185 extensions_queue_(&kDefaultBackoffPolicy,
186 base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
187 base::Unretained(this))),
188 extension_cache_(NULL),
189 enable_extra_update_metrics_(false),
190 weak_ptr_factory_(this) {
192 DCHECK(request_context_.get());
195 ExtensionDownloader::~ExtensionDownloader() {}
197 bool ExtensionDownloader::AddExtension(const Extension& extension,
199 // Skip extensions with empty update URLs converted from user
201 if (extension.converted_from_user_script() &&
202 ManifestURL::GetUpdateURL(&extension).is_empty()) {
206 // If the extension updates itself from the gallery, ignore any update URL
207 // data. At the moment there is no extra data that an extension can
208 // communicate to the the gallery update servers.
209 std::string update_url_data;
210 if (!ManifestURL::UpdatesFromGallery(&extension))
211 update_url_data = delegate_->GetUpdateUrlData(extension.id());
213 std::string install_source;
214 bool force_update = delegate_->ShouldForceUpdate(extension.id(),
216 return AddExtensionData(extension.id(),
217 *extension.version(),
219 ManifestURL::GetUpdateURL(&extension),
226 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
227 const GURL& update_url,
229 // Use a zero version to ensure that a pending extension will always
230 // be updated, and thus installed (assuming all extensions have
231 // non-zero versions).
232 Version version("0.0.0.0");
233 DCHECK(version.IsValid());
235 return AddExtensionData(id,
237 Manifest::TYPE_UNKNOWN,
245 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
247 extension_cache_ = cache;
248 extension_cache_->Start(base::Bind(
249 &ExtensionDownloader::DoStartAllPending,
250 weak_ptr_factory_.GetWeakPtr()));
256 void ExtensionDownloader::DoStartAllPending() {
258 url_stats_ = URLStats();
260 for (FetchMap::iterator it = fetches_preparing_.begin();
261 it != fetches_preparing_.end(); ++it) {
262 std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
263 for (size_t i = 0; i < list.size(); ++i) {
264 StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
267 fetches_preparing_.clear();
270 void ExtensionDownloader::StartBlacklistUpdate(
271 const std::string& version,
272 const ManifestFetchData::PingData& ping_data,
274 // Note: it is very important that we use the https version of the update
275 // url here to avoid DNS hijacking of the blacklist, which is not validated
276 // by a public key signature like .crx files are.
277 scoped_ptr<ManifestFetchData> blacklist_fetch(CreateManifestFetchData(
278 extension_urls::GetWebstoreUpdateUrl(), request_id));
279 DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
280 blacklist_fetch->AddExtension(kBlacklistAppID,
284 kDefaultInstallSource,
286 StartUpdateCheck(blacklist_fetch.Pass());
289 void ExtensionDownloader::SetWebstoreIdentityProvider(
290 scoped_ptr<IdentityProvider> identity_provider) {
291 identity_provider_.swap(identity_provider);
294 bool ExtensionDownloader::AddExtensionData(
295 const std::string& id,
296 const Version& version,
297 Manifest::Type extension_type,
298 const GURL& extension_update_url,
299 const std::string& update_url_data,
302 const std::string& install_source_override) {
303 GURL update_url(extension_update_url);
304 // Skip extensions with non-empty invalid update URLs.
305 if (!update_url.is_empty() && !update_url.is_valid()) {
306 LOG(WARNING) << "Extension " << id << " has invalid update url "
311 // Make sure we use SSL for store-hosted extensions.
312 if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
313 !update_url.SchemeIsSecure())
314 update_url = extension_urls::GetWebstoreUpdateUrl();
316 // Skip extensions with empty IDs.
318 LOG(WARNING) << "Found extension with empty ID";
322 if (update_url.DomainIs(kGoogleDotCom)) {
323 url_stats_.google_url_count++;
324 } else if (update_url.is_empty()) {
325 url_stats_.no_url_count++;
326 // Fill in default update URL.
327 update_url = extension_urls::GetWebstoreUpdateUrl();
329 url_stats_.other_url_count++;
332 switch (extension_type) {
333 case Manifest::TYPE_THEME:
334 ++url_stats_.theme_count;
336 case Manifest::TYPE_EXTENSION:
337 case Manifest::TYPE_USER_SCRIPT:
338 ++url_stats_.extension_count;
340 case Manifest::TYPE_HOSTED_APP:
341 case Manifest::TYPE_LEGACY_PACKAGED_APP:
342 ++url_stats_.app_count;
344 case Manifest::TYPE_PLATFORM_APP:
345 ++url_stats_.platform_app_count;
347 case Manifest::TYPE_UNKNOWN:
349 ++url_stats_.pending_count;
353 std::vector<GURL> update_urls;
354 update_urls.push_back(update_url);
355 // If metrics are enabled, also add to ManifestFetchData for the
356 // webstore update URL.
357 if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
358 enable_extra_update_metrics_) {
359 update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
362 for (size_t i = 0; i < update_urls.size(); ++i) {
363 DCHECK(!update_urls[i].is_empty());
364 DCHECK(update_urls[i].is_valid());
366 std::string install_source = i == 0 ?
367 kDefaultInstallSource : kNotFromWebstoreInstallSource;
368 if (!install_source_override.empty()) {
369 install_source = install_source_override;
372 ManifestFetchData::PingData ping_data;
373 ManifestFetchData::PingData* optional_ping_data = NULL;
374 if (delegate_->GetPingDataForExtension(id, &ping_data))
375 optional_ping_data = &ping_data;
377 // Find or create a ManifestFetchData to add this extension to.
379 FetchMap::iterator existing_iter = fetches_preparing_.find(
380 std::make_pair(request_id, update_urls[i]));
381 if (existing_iter != fetches_preparing_.end() &&
382 !existing_iter->second.empty()) {
383 // Try to add to the ManifestFetchData at the end of the list.
384 ManifestFetchData* existing_fetch = existing_iter->second.back().get();
385 if (existing_fetch->AddExtension(id, version.GetString(),
386 optional_ping_data, update_url_data,
393 // Otherwise add a new element to the list, if the list doesn't exist or
394 // if its last element is already full.
395 linked_ptr<ManifestFetchData> fetch(
396 CreateManifestFetchData(update_urls[i], request_id));
397 fetches_preparing_[std::make_pair(request_id, update_urls[i])].
399 added = fetch->AddExtension(id, version.GetString(),
411 void ExtensionDownloader::ReportStats() const {
412 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
413 url_stats_.extension_count);
414 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
415 url_stats_.theme_count);
416 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
417 url_stats_.app_count);
418 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
419 url_stats_.platform_app_count);
420 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
421 url_stats_.pending_count);
422 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
423 url_stats_.google_url_count);
424 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
425 url_stats_.other_url_count);
426 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
427 url_stats_.no_url_count);
430 void ExtensionDownloader::StartUpdateCheck(
431 scoped_ptr<ManifestFetchData> fetch_data) {
432 const std::set<std::string>& id_set(fetch_data->extension_ids());
434 if (CommandLine::ForCurrentProcess()->HasSwitch(
435 switches::kDisableBackgroundNetworking)) {
436 NotifyExtensionsDownloadFailed(id_set,
437 fetch_data->request_ids(),
438 ExtensionDownloaderDelegate::DISABLED);
442 RequestQueue<ManifestFetchData>::iterator i;
443 for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
444 if (fetch_data->full_url() == i->full_url()) {
445 // This url is already scheduled to be fetched.
446 i->Merge(*fetch_data);
451 if (manifests_queue_.active_request() &&
452 manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
453 manifests_queue_.active_request()->Merge(*fetch_data);
455 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
456 fetch_data->full_url().possibly_invalid_spec().length());
458 manifests_queue_.ScheduleRequest(fetch_data.Pass());
462 void ExtensionDownloader::CreateManifestFetcher() {
464 std::vector<std::string> id_vector(
465 manifests_queue_.active_request()->extension_ids().begin(),
466 manifests_queue_.active_request()->extension_ids().end());
467 std::string id_list = JoinString(id_vector, ',');
468 VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
469 << " for " << id_list;
472 manifest_fetcher_.reset(net::URLFetcher::Create(
473 kManifestFetcherId, manifests_queue_.active_request()->full_url(),
474 net::URLFetcher::GET, this));
475 manifest_fetcher_->SetRequestContext(request_context_.get());
476 manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
477 net::LOAD_DO_NOT_SAVE_COOKIES |
478 net::LOAD_DISABLE_CACHE);
479 // Update checks can be interrupted if a network change is detected; this is
480 // common for the retail mode AppPack on ChromeOS. Retrying once should be
481 // enough to recover in those cases; let the fetcher retry up to 3 times
482 // just in case. http://crosbug.com/130602
483 manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
484 manifest_fetcher_->Start();
487 void ExtensionDownloader::OnURLFetchComplete(
488 const net::URLFetcher* source) {
489 VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
491 if (source == manifest_fetcher_.get()) {
493 source->GetResponseAsString(&data);
494 OnManifestFetchComplete(source->GetURL(),
496 source->GetResponseCode(),
497 source->GetBackoffDelay(),
499 } else if (source == extension_fetcher_.get()) {
500 OnCRXFetchComplete(source,
503 source->GetResponseCode(),
504 source->GetBackoffDelay());
510 void ExtensionDownloader::OnManifestFetchComplete(
512 const net::URLRequestStatus& status,
514 const base::TimeDelta& backoff_delay,
515 const std::string& data) {
516 // We want to try parsing the manifest, and if it indicates updates are
517 // available, we want to fire off requests to fetch those updates.
518 if (status.status() == net::URLRequestStatus::SUCCESS &&
519 (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
520 RETRY_HISTOGRAM("ManifestFetchSuccess",
521 manifests_queue_.active_request_failure_count(), url);
522 VLOG(2) << "beginning manifest parse for " << url;
523 scoped_refptr<SafeManifestParser> safe_parser(
524 new SafeManifestParser(
526 manifests_queue_.reset_active_request().release(),
527 base::Bind(&ExtensionDownloader::HandleManifestResults,
528 weak_ptr_factory_.GetWeakPtr())));
529 safe_parser->Start();
531 VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
532 << "' response code:" << response_code;
533 if (ShouldRetryRequest(status, response_code) &&
534 manifests_queue_.active_request_failure_count() < kMaxRetries) {
535 manifests_queue_.RetryRequest(backoff_delay);
537 RETRY_HISTOGRAM("ManifestFetchFailure",
538 manifests_queue_.active_request_failure_count(), url);
539 NotifyExtensionsDownloadFailed(
540 manifests_queue_.active_request()->extension_ids(),
541 manifests_queue_.active_request()->request_ids(),
542 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
545 manifest_fetcher_.reset();
546 manifests_queue_.reset_active_request();
548 // If we have any pending manifest requests, fire off the next one.
549 manifests_queue_.StartNextRequest();
552 void ExtensionDownloader::HandleManifestResults(
553 const ManifestFetchData& fetch_data,
554 const UpdateManifest::Results* results) {
555 // Keep a list of extensions that will not be updated, so that the |delegate_|
556 // can be notified once we're done here.
557 std::set<std::string> not_updated(fetch_data.extension_ids());
560 NotifyExtensionsDownloadFailed(
562 fetch_data.request_ids(),
563 ExtensionDownloaderDelegate::MANIFEST_INVALID);
567 // Examine the parsed manifest and kick off fetches of any new crx files.
568 std::vector<int> updates;
569 DetermineUpdates(fetch_data, *results, &updates);
570 for (size_t i = 0; i < updates.size(); i++) {
571 const UpdateManifest::Result* update = &(results->list.at(updates[i]));
572 const std::string& id = update->extension_id;
573 not_updated.erase(id);
575 GURL crx_url = update->crx_url;
576 if (id != kBlacklistAppID) {
577 NotifyUpdateFound(update->extension_id, update->version);
579 // The URL of the blacklist file is returned by the server and we need to
580 // be sure that we continue to be able to reliably detect whether a URL
581 // references a blacklist file.
582 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
584 // Force https (crbug.com/129587).
585 if (!crx_url.SchemeIsSecure()) {
586 url::Replacements<char> replacements;
587 std::string scheme("https");
588 replacements.SetScheme(scheme.c_str(),
589 url::Component(0, scheme.size()));
590 crx_url = crx_url.ReplaceComponents(replacements);
593 scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
594 update->extension_id, crx_url, update->package_hash,
595 update->version, fetch_data.request_ids()));
596 FetchUpdatedExtension(fetch.Pass());
599 // If the manifest response included a <daystart> element, we want to save
600 // that value for any extensions which had sent a ping in the request.
601 if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
602 results->daystart_elapsed_seconds >= 0) {
604 Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
606 const std::set<std::string>& extension_ids = fetch_data.extension_ids();
607 std::set<std::string>::const_iterator i;
608 for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
609 const std::string& id = *i;
610 ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
611 result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
612 result.day_start = day_start;
616 NotifyExtensionsDownloadFailed(
618 fetch_data.request_ids(),
619 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
622 void ExtensionDownloader::DetermineUpdates(
623 const ManifestFetchData& fetch_data,
624 const UpdateManifest::Results& possible_updates,
625 std::vector<int>* result) {
626 // This will only be valid if one of possible_updates specifies
627 // browser_min_version.
628 Version browser_version;
630 for (size_t i = 0; i < possible_updates.list.size(); i++) {
631 const UpdateManifest::Result* update = &possible_updates.list[i];
632 const std::string& id = update->extension_id;
634 if (!fetch_data.Includes(id)) {
635 VLOG(2) << "Ignoring " << id << " from this manifest";
640 if (update->version.empty())
641 VLOG(2) << "manifest indicates " << id << " has no update";
643 VLOG(2) << "manifest indicates " << id
644 << " latest version is '" << update->version << "'";
647 if (!delegate_->IsExtensionPending(id)) {
648 // If we're not installing pending extension, and the update
649 // version is the same or older than what's already installed,
652 if (!delegate_->GetExtensionExistingVersion(id, &version)) {
653 VLOG(2) << id << " is not installed";
657 VLOG(2) << id << " is at '" << version << "'";
659 // We should skip the version check if update was forced.
660 if (!fetch_data.DidForceUpdate(id)) {
661 Version existing_version(version);
662 Version update_version(update->version);
663 if (!update_version.IsValid() ||
664 update_version.CompareTo(existing_version) <= 0) {
670 // If the update specifies a browser minimum version, do we qualify?
671 if (update->browser_min_version.length() > 0) {
672 // First determine the browser version if we haven't already.
673 if (!browser_version.IsValid()) {
674 chrome::VersionInfo version_info;
675 if (version_info.is_valid())
676 browser_version = Version(version_info.Version());
678 Version browser_min_version(update->browser_min_version);
679 if (browser_version.IsValid() && browser_min_version.IsValid() &&
680 browser_min_version.CompareTo(browser_version) > 0) {
681 // TODO(asargent) - We may want this to show up in the extensions UI
682 // eventually. (http://crbug.com/12547).
683 LOG(WARNING) << "Updated version of extension " << id
684 << " available, but requires chrome version "
685 << update->browser_min_version;
689 VLOG(2) << "will try to update " << id;
690 result->push_back(i);
694 // Begins (or queues up) download of an updated extension.
695 void ExtensionDownloader::FetchUpdatedExtension(
696 scoped_ptr<ExtensionFetch> fetch_data) {
697 if (!fetch_data->url.is_valid()) {
698 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
699 LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
700 << "' for extension " << fetch_data->id;
704 for (RequestQueue<ExtensionFetch>::iterator iter =
705 extensions_queue_.begin();
706 iter != extensions_queue_.end(); ++iter) {
707 if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
708 iter->request_ids.insert(fetch_data->request_ids.begin(),
709 fetch_data->request_ids.end());
710 return; // already scheduled
714 if (extensions_queue_.active_request() &&
715 extensions_queue_.active_request()->url == fetch_data->url) {
716 extensions_queue_.active_request()->request_ids.insert(
717 fetch_data->request_ids.begin(), fetch_data->request_ids.end());
720 if (extension_cache_ &&
721 extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
722 version == fetch_data->version) {
723 base::FilePath crx_path;
724 // Now get .crx file path and mark extension as used.
725 extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
726 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
728 extensions_queue_.ScheduleRequest(fetch_data.Pass());
733 void ExtensionDownloader::NotifyDelegateDownloadFinished(
734 scoped_ptr<ExtensionFetch> fetch_data,
735 const base::FilePath& crx_path,
736 bool file_ownership_passed) {
737 delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
738 file_ownership_passed, fetch_data->url, fetch_data->version,
739 ping_results_[fetch_data->id], fetch_data->request_ids);
740 ping_results_.erase(fetch_data->id);
743 void ExtensionDownloader::CreateExtensionFetcher() {
744 const ExtensionFetch* fetch = extensions_queue_.active_request();
745 extension_fetcher_.reset(net::URLFetcher::Create(
746 kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
747 extension_fetcher_->SetRequestContext(request_context_.get());
748 extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
750 int load_flags = net::LOAD_DISABLE_CACHE;
751 bool is_secure = fetch->url.SchemeIsSecure();
752 if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
753 load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
754 net::LOAD_DO_NOT_SAVE_COOKIES;
756 extension_fetcher_->SetLoadFlags(load_flags);
758 // Download CRX files to a temp file. The blacklist is small and will be
759 // processed in memory, so it is fetched into a string.
760 if (fetch->id != kBlacklistAppID) {
761 extension_fetcher_->SaveResponseToTemporaryFile(
762 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
765 if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
767 if (access_token_.empty()) {
768 // We should try OAuth2, but we have no token cached. This
769 // ExtensionFetcher will be started once the token fetch is complete,
770 // in either OnTokenFetchSuccess or OnTokenFetchFailure.
771 DCHECK(identity_provider_.get());
772 OAuth2TokenService::ScopeSet webstore_scopes;
773 webstore_scopes.insert(kWebstoreOAuth2Scope);
774 access_token_request_ =
775 identity_provider_->GetTokenService()->StartRequest(
776 identity_provider_->GetActiveAccountId(),
781 extension_fetcher_->AddExtraRequestHeader(
782 base::StringPrintf("%s: Bearer %s",
783 net::HttpRequestHeaders::kAuthorization,
784 access_token_.c_str()));
787 VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
788 extension_fetcher_->Start();
791 void ExtensionDownloader::OnCRXFetchComplete(
792 const net::URLFetcher* source,
794 const net::URLRequestStatus& status,
796 const base::TimeDelta& backoff_delay) {
797 ExtensionFetch& active_request = *extensions_queue_.active_request();
798 const std::string& id = active_request.id;
799 if (status.status() == net::URLRequestStatus::SUCCESS &&
800 (response_code == 200 || url.SchemeIsFile())) {
801 RETRY_HISTOGRAM("CrxFetchSuccess",
802 extensions_queue_.active_request_failure_count(), url);
803 base::FilePath crx_path;
804 // Take ownership of the file at |crx_path|.
805 CHECK(source->GetResponseAsFilePath(true, &crx_path));
806 scoped_ptr<ExtensionFetch> fetch_data =
807 extensions_queue_.reset_active_request();
808 if (extension_cache_) {
809 const std::string& version = fetch_data->version;
810 extension_cache_->PutExtension(id, crx_path, version,
811 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
812 weak_ptr_factory_.GetWeakPtr(),
813 base::Passed(&fetch_data)));
815 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
817 } else if (IterateFetchCredentialsAfterFailure(
821 extensions_queue_.RetryRequest(backoff_delay);
823 const std::set<int>& request_ids = active_request.request_ids;
824 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
825 VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
826 << "' response code:" << response_code;
827 if (ShouldRetryRequest(status, response_code) &&
828 extensions_queue_.active_request_failure_count() < kMaxRetries) {
829 extensions_queue_.RetryRequest(backoff_delay);
831 RETRY_HISTOGRAM("CrxFetchFailure",
832 extensions_queue_.active_request_failure_count(), url);
833 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
834 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
835 delegate_->OnExtensionDownloadFailed(
836 id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
838 ping_results_.erase(id);
839 extensions_queue_.reset_active_request();
842 extension_fetcher_.reset();
844 // If there are any pending downloads left, start the next one.
845 extensions_queue_.StartNextRequest();
848 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
849 const std::set<std::string>& extension_ids,
850 const std::set<int>& request_ids,
851 ExtensionDownloaderDelegate::Error error) {
852 for (std::set<std::string>::const_iterator it = extension_ids.begin();
853 it != extension_ids.end(); ++it) {
854 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
855 delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
856 ping_results_.erase(*it);
860 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
861 const std::string& version) {
862 UpdateDetails updateInfo(id, Version(version));
863 content::NotificationService::current()->Notify(
864 extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
865 content::NotificationService::AllBrowserContextsAndSources(),
866 content::Details<UpdateDetails>(&updateInfo));
869 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
870 ExtensionFetch* fetch,
871 const net::URLRequestStatus& status,
873 bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
874 (status.status() == net::URLRequestStatus::SUCCESS &&
875 (response_code == net::HTTP_UNAUTHORIZED ||
876 response_code == net::HTTP_FORBIDDEN));
880 // Here we decide what to do next if the server refused to authorize this
882 switch (fetch->credentials) {
883 case ExtensionFetch::CREDENTIALS_NONE:
884 if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
885 fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
887 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
890 case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
891 fetch->oauth2_attempt_count++;
892 // OAuth2 may fail due to an expired access token, in which case we
893 // should invalidate the token and try again.
894 if (response_code == net::HTTP_UNAUTHORIZED &&
895 fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
896 DCHECK(identity_provider_.get());
897 OAuth2TokenService::ScopeSet webstore_scopes;
898 webstore_scopes.insert(kWebstoreOAuth2Scope);
899 identity_provider_->GetTokenService()->InvalidateToken(
900 identity_provider_->GetActiveAccountId(),
903 access_token_.clear();
906 // Either there is no Gaia identity available, the active identity
907 // doesn't have access to this resource, or the server keeps returning
908 // 401s and we've retried too many times. Fall back on cookies.
909 if (access_token_.empty() ||
910 response_code == net::HTTP_FORBIDDEN ||
911 fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
912 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
915 // Something else is wrong. Time to give up.
917 case ExtensionFetch::CREDENTIALS_COOKIES:
918 if (response_code == net::HTTP_FORBIDDEN) {
919 // Try the next session identity, up to some maximum.
920 return IncrementAuthUserIndex(&fetch->url);
930 void ExtensionDownloader::OnGetTokenSuccess(
931 const OAuth2TokenService::Request* request,
932 const std::string& access_token,
933 const base::Time& expiration_time) {
934 access_token_ = access_token;
935 extension_fetcher_->AddExtraRequestHeader(
936 base::StringPrintf("%s: Bearer %s",
937 net::HttpRequestHeaders::kAuthorization,
938 access_token_.c_str()));
939 extension_fetcher_->Start();
942 void ExtensionDownloader::OnGetTokenFailure(
943 const OAuth2TokenService::Request* request,
944 const GoogleServiceAuthError& error) {
945 // If we fail to get an access token, kick the pending fetch and let it fall
947 extension_fetcher_->Start();
950 ManifestFetchData* ExtensionDownloader::CreateManifestFetchData(
951 const GURL& update_url,
953 ManifestFetchData::PingMode ping_mode = ManifestFetchData::NO_PING;
954 if (update_url.DomainIs(ping_enabled_domain_.c_str())) {
955 if (enable_extra_update_metrics_) {
956 ping_mode = ManifestFetchData::PING_WITH_METRICS;
958 ping_mode = ManifestFetchData::PING;
961 return new ManifestFetchData(
962 update_url, request_id, brand_code_, manifest_query_params_, ping_mode);
965 } // namespace extensions