1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/stl_util.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/string_util.h"
19 #include "base/strings/stringprintf.h"
20 #include "base/time/time.h"
21 #include "base/version.h"
22 #include "chrome/browser/chrome_notification_types.h"
23 #include "chrome/browser/extensions/updater/extension_cache.h"
24 #include "chrome/browser/extensions/updater/request_queue_impl.h"
25 #include "chrome/browser/extensions/updater/safe_manifest_parser.h"
26 #include "chrome/browser/metrics/chrome_metrics_service_accessor.h"
27 #include "chrome/common/chrome_switches.h"
28 #include "chrome/common/chrome_version_info.h"
29 #include "chrome/common/extensions/extension_constants.h"
30 #include "chrome/common/extensions/manifest_url_handler.h"
31 #include "content/public/browser/browser_thread.h"
32 #include "content/public/browser/notification_details.h"
33 #include "content/public/browser/notification_service.h"
34 #include "google_apis/gaia/identity_provider.h"
35 #include "net/base/backoff_entry.h"
36 #include "net/base/load_flags.h"
37 #include "net/base/net_errors.h"
38 #include "net/http/http_request_headers.h"
39 #include "net/http/http_status_code.h"
40 #include "net/url_request/url_fetcher.h"
41 #include "net/url_request/url_request_context_getter.h"
42 #include "net/url_request/url_request_status.h"
45 using base::TimeDelta;
46 using content::BrowserThread;
48 namespace extensions {
50 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
54 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
55 // Number of initial errors (in sequence) to ignore before applying
56 // exponential back-off rules.
59 // Initial delay for exponential back-off in ms.
62 // Factor by which the waiting time will be multiplied.
65 // Fuzzing percentage. ex: 10% will spread requests randomly
66 // between 90%-100% of the calculated time.
69 // Maximum amount of time we are willing to delay our request in ms.
72 // Time to keep an entry from being discarded even when it
73 // has no significant state, -1 to never discard.
76 // Don't use initial delay unless the last request was an error.
80 const char kAuthUserQueryKey[] = "authuser";
82 const int kMaxAuthUserValue = 10;
83 const int kMaxOAuth2Attempts = 3;
85 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
86 const char kDefaultInstallSource[] = "";
88 const char kGoogleDotCom[] = "google.com";
89 const char kTokenServiceConsumerId[] = "extension_downloader";
90 const char kWebstoreOAuth2Scope[] =
91 "https://www.googleapis.com/auth/chromewebstore.readonly";
93 #define RETRY_HISTOGRAM(name, retry_count, url) \
94 if ((url).DomainIs(kGoogleDotCom)) { \
95 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
101 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
108 bool ShouldRetryRequest(const net::URLRequestStatus& status,
110 // Retry if the response code is a server error, or the request failed because
111 // of network errors as opposed to file errors.
112 return ((response_code >= 500 && status.is_success()) ||
113 status.status() == net::URLRequestStatus::FAILED);
116 // This parses and updates a URL query such that the value of the |authuser|
117 // query parameter is incremented by 1. If parameter was not present in the URL,
118 // it will be added with a value of 1. All other query keys and values are
119 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
121 bool IncrementAuthUserIndex(GURL* url) {
123 std::string old_query = url->query();
124 std::vector<std::string> new_query_parts;
125 url::Component query(0, old_query.length());
126 url::Component key, value;
127 while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
128 std::string key_string = old_query.substr(key.begin, key.len);
129 std::string value_string = old_query.substr(value.begin, value.len);
130 if (key_string == kAuthUserQueryKey) {
131 base::StringToInt(value_string, &user_index);
133 new_query_parts.push_back(base::StringPrintf(
134 "%s=%s", key_string.c_str(), value_string.c_str()));
137 if (user_index >= kMaxAuthUserValue)
139 new_query_parts.push_back(
140 base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
141 std::string new_query_string = JoinString(new_query_parts, '&');
142 url::Component new_query(0, new_query_string.size());
143 url::Replacements<char> replacements;
144 replacements.SetQuery(new_query_string.c_str(), new_query);
145 *url = url->ReplaceComponents(replacements);
151 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
152 : id(id), version(version) {}
154 UpdateDetails::~UpdateDetails() {}
156 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
157 : url(), credentials(CREDENTIALS_NONE) {
160 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
161 const std::string& id,
163 const std::string& package_hash,
164 const std::string& version,
165 const std::set<int>& request_ids)
168 package_hash(package_hash),
170 request_ids(request_ids),
171 credentials(CREDENTIALS_NONE),
172 oauth2_attempt_count(0) {
175 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
177 ExtensionDownloader::ExtensionDownloader(
178 ExtensionDownloaderDelegate* delegate,
179 net::URLRequestContextGetter* request_context)
180 : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
182 request_context_(request_context),
183 weak_ptr_factory_(this),
184 manifests_queue_(&kDefaultBackoffPolicy,
185 base::Bind(&ExtensionDownloader::CreateManifestFetcher,
186 base::Unretained(this))),
187 extensions_queue_(&kDefaultBackoffPolicy,
188 base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
189 base::Unretained(this))),
190 extension_cache_(NULL) {
192 DCHECK(request_context_);
195 ExtensionDownloader::~ExtensionDownloader() {}
197 bool ExtensionDownloader::AddExtension(const Extension& extension,
199 // Skip extensions with empty update URLs converted from user
201 if (extension.converted_from_user_script() &&
202 ManifestURL::GetUpdateURL(&extension).is_empty()) {
206 // If the extension updates itself from the gallery, ignore any update URL
207 // data. At the moment there is no extra data that an extension can
208 // communicate to the the gallery update servers.
209 std::string update_url_data;
210 if (!ManifestURL::UpdatesFromGallery(&extension))
211 update_url_data = delegate_->GetUpdateUrlData(extension.id());
213 return AddExtensionData(extension.id(), *extension.version(),
215 ManifestURL::GetUpdateURL(&extension),
216 update_url_data, request_id);
219 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
220 const GURL& update_url,
222 // Use a zero version to ensure that a pending extension will always
223 // be updated, and thus installed (assuming all extensions have
224 // non-zero versions).
225 Version version("0.0.0.0");
226 DCHECK(version.IsValid());
228 return AddExtensionData(id,
230 Manifest::TYPE_UNKNOWN,
236 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
238 extension_cache_ = cache;
239 extension_cache_->Start(base::Bind(
240 &ExtensionDownloader::DoStartAllPending,
241 weak_ptr_factory_.GetWeakPtr()));
247 void ExtensionDownloader::DoStartAllPending() {
249 url_stats_ = URLStats();
251 for (FetchMap::iterator it = fetches_preparing_.begin();
252 it != fetches_preparing_.end(); ++it) {
253 std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
254 for (size_t i = 0; i < list.size(); ++i) {
255 StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
258 fetches_preparing_.clear();
261 void ExtensionDownloader::StartBlacklistUpdate(
262 const std::string& version,
263 const ManifestFetchData::PingData& ping_data,
265 // Note: it is very important that we use the https version of the update
266 // url here to avoid DNS hijacking of the blacklist, which is not validated
267 // by a public key signature like .crx files are.
268 scoped_ptr<ManifestFetchData> blacklist_fetch(
269 new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
271 DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
272 blacklist_fetch->AddExtension(kBlacklistAppID,
276 kDefaultInstallSource);
277 StartUpdateCheck(blacklist_fetch.Pass());
280 void ExtensionDownloader::SetWebstoreIdentityProvider(
281 scoped_ptr<IdentityProvider> identity_provider) {
282 identity_provider_.swap(identity_provider);
285 bool ExtensionDownloader::AddExtensionData(const std::string& id,
286 const Version& version,
287 Manifest::Type extension_type,
288 const GURL& extension_update_url,
289 const std::string& update_url_data,
291 GURL update_url(extension_update_url);
292 // Skip extensions with non-empty invalid update URLs.
293 if (!update_url.is_empty() && !update_url.is_valid()) {
294 LOG(WARNING) << "Extension " << id << " has invalid update url "
299 // Make sure we use SSL for store-hosted extensions.
300 if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
301 !update_url.SchemeIsSecure())
302 update_url = extension_urls::GetWebstoreUpdateUrl();
304 // Skip extensions with empty IDs.
306 LOG(WARNING) << "Found extension with empty ID";
310 if (update_url.DomainIs(kGoogleDotCom)) {
311 url_stats_.google_url_count++;
312 } else if (update_url.is_empty()) {
313 url_stats_.no_url_count++;
314 // Fill in default update URL.
315 update_url = extension_urls::GetWebstoreUpdateUrl();
317 url_stats_.other_url_count++;
320 switch (extension_type) {
321 case Manifest::TYPE_THEME:
322 ++url_stats_.theme_count;
324 case Manifest::TYPE_EXTENSION:
325 case Manifest::TYPE_USER_SCRIPT:
326 ++url_stats_.extension_count;
328 case Manifest::TYPE_HOSTED_APP:
329 case Manifest::TYPE_LEGACY_PACKAGED_APP:
330 ++url_stats_.app_count;
332 case Manifest::TYPE_PLATFORM_APP:
333 ++url_stats_.platform_app_count;
335 case Manifest::TYPE_UNKNOWN:
337 ++url_stats_.pending_count;
341 std::vector<GURL> update_urls;
342 update_urls.push_back(update_url);
343 // If UMA is enabled, also add to ManifestFetchData for the
344 // webstore update URL.
345 if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
346 ChromeMetricsServiceAccessor::IsMetricsReportingEnabled()) {
347 update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
350 for (size_t i = 0; i < update_urls.size(); ++i) {
351 DCHECK(!update_urls[i].is_empty());
352 DCHECK(update_urls[i].is_valid());
354 std::string install_source = i == 0 ?
355 kDefaultInstallSource : kNotFromWebstoreInstallSource;
357 ManifestFetchData::PingData ping_data;
358 ManifestFetchData::PingData* optional_ping_data = NULL;
359 if (delegate_->GetPingDataForExtension(id, &ping_data))
360 optional_ping_data = &ping_data;
362 // Find or create a ManifestFetchData to add this extension to.
364 FetchMap::iterator existing_iter = fetches_preparing_.find(
365 std::make_pair(request_id, update_urls[i]));
366 if (existing_iter != fetches_preparing_.end() &&
367 !existing_iter->second.empty()) {
368 // Try to add to the ManifestFetchData at the end of the list.
369 ManifestFetchData* existing_fetch = existing_iter->second.back().get();
370 if (existing_fetch->AddExtension(id, version.GetString(),
371 optional_ping_data, update_url_data,
377 // Otherwise add a new element to the list, if the list doesn't exist or
378 // if its last element is already full.
379 linked_ptr<ManifestFetchData> fetch(
380 new ManifestFetchData(update_urls[i], request_id));
381 fetches_preparing_[std::make_pair(request_id, update_urls[i])].
383 added = fetch->AddExtension(id, version.GetString(),
394 void ExtensionDownloader::ReportStats() const {
395 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
396 url_stats_.extension_count);
397 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
398 url_stats_.theme_count);
399 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
400 url_stats_.app_count);
401 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
402 url_stats_.platform_app_count);
403 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
404 url_stats_.pending_count);
405 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
406 url_stats_.google_url_count);
407 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
408 url_stats_.other_url_count);
409 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
410 url_stats_.no_url_count);
413 void ExtensionDownloader::StartUpdateCheck(
414 scoped_ptr<ManifestFetchData> fetch_data) {
415 const std::set<std::string>& id_set(fetch_data->extension_ids());
417 if (CommandLine::ForCurrentProcess()->HasSwitch(
418 switches::kDisableBackgroundNetworking)) {
419 NotifyExtensionsDownloadFailed(id_set,
420 fetch_data->request_ids(),
421 ExtensionDownloaderDelegate::DISABLED);
425 RequestQueue<ManifestFetchData>::iterator i;
426 for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
427 if (fetch_data->full_url() == i->full_url()) {
428 // This url is already scheduled to be fetched.
429 i->Merge(*fetch_data);
434 if (manifests_queue_.active_request() &&
435 manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
436 manifests_queue_.active_request()->Merge(*fetch_data);
438 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
439 fetch_data->full_url().possibly_invalid_spec().length());
441 manifests_queue_.ScheduleRequest(fetch_data.Pass());
445 void ExtensionDownloader::CreateManifestFetcher() {
447 std::vector<std::string> id_vector(
448 manifests_queue_.active_request()->extension_ids().begin(),
449 manifests_queue_.active_request()->extension_ids().end());
450 std::string id_list = JoinString(id_vector, ',');
451 VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
452 << " for " << id_list;
455 manifest_fetcher_.reset(net::URLFetcher::Create(
456 kManifestFetcherId, manifests_queue_.active_request()->full_url(),
457 net::URLFetcher::GET, this));
458 manifest_fetcher_->SetRequestContext(request_context_);
459 manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
460 net::LOAD_DO_NOT_SAVE_COOKIES |
461 net::LOAD_DISABLE_CACHE);
462 // Update checks can be interrupted if a network change is detected; this is
463 // common for the retail mode AppPack on ChromeOS. Retrying once should be
464 // enough to recover in those cases; let the fetcher retry up to 3 times
465 // just in case. http://crosbug.com/130602
466 manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
467 manifest_fetcher_->Start();
470 void ExtensionDownloader::OnURLFetchComplete(
471 const net::URLFetcher* source) {
472 VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
474 if (source == manifest_fetcher_.get()) {
476 source->GetResponseAsString(&data);
477 OnManifestFetchComplete(source->GetURL(),
479 source->GetResponseCode(),
480 source->GetBackoffDelay(),
482 } else if (source == extension_fetcher_.get()) {
483 OnCRXFetchComplete(source,
486 source->GetResponseCode(),
487 source->GetBackoffDelay());
493 void ExtensionDownloader::OnManifestFetchComplete(
495 const net::URLRequestStatus& status,
497 const base::TimeDelta& backoff_delay,
498 const std::string& data) {
499 // We want to try parsing the manifest, and if it indicates updates are
500 // available, we want to fire off requests to fetch those updates.
501 if (status.status() == net::URLRequestStatus::SUCCESS &&
502 (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
503 RETRY_HISTOGRAM("ManifestFetchSuccess",
504 manifests_queue_.active_request_failure_count(), url);
505 VLOG(2) << "beginning manifest parse for " << url;
506 scoped_refptr<SafeManifestParser> safe_parser(
507 new SafeManifestParser(
509 manifests_queue_.reset_active_request().release(),
510 base::Bind(&ExtensionDownloader::HandleManifestResults,
511 weak_ptr_factory_.GetWeakPtr())));
512 safe_parser->Start();
514 VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
515 << "' response code:" << response_code;
516 if (ShouldRetryRequest(status, response_code) &&
517 manifests_queue_.active_request_failure_count() < kMaxRetries) {
518 manifests_queue_.RetryRequest(backoff_delay);
520 RETRY_HISTOGRAM("ManifestFetchFailure",
521 manifests_queue_.active_request_failure_count(), url);
522 NotifyExtensionsDownloadFailed(
523 manifests_queue_.active_request()->extension_ids(),
524 manifests_queue_.active_request()->request_ids(),
525 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
528 manifest_fetcher_.reset();
529 manifests_queue_.reset_active_request();
531 // If we have any pending manifest requests, fire off the next one.
532 manifests_queue_.StartNextRequest();
535 void ExtensionDownloader::HandleManifestResults(
536 const ManifestFetchData& fetch_data,
537 const UpdateManifest::Results* results) {
538 // Keep a list of extensions that will not be updated, so that the |delegate_|
539 // can be notified once we're done here.
540 std::set<std::string> not_updated(fetch_data.extension_ids());
543 NotifyExtensionsDownloadFailed(
545 fetch_data.request_ids(),
546 ExtensionDownloaderDelegate::MANIFEST_INVALID);
550 // Examine the parsed manifest and kick off fetches of any new crx files.
551 std::vector<int> updates;
552 DetermineUpdates(fetch_data, *results, &updates);
553 for (size_t i = 0; i < updates.size(); i++) {
554 const UpdateManifest::Result* update = &(results->list.at(updates[i]));
555 const std::string& id = update->extension_id;
556 not_updated.erase(id);
558 GURL crx_url = update->crx_url;
559 if (id != kBlacklistAppID) {
560 NotifyUpdateFound(update->extension_id, update->version);
562 // The URL of the blacklist file is returned by the server and we need to
563 // be sure that we continue to be able to reliably detect whether a URL
564 // references a blacklist file.
565 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
567 // Force https (crbug.com/129587).
568 if (!crx_url.SchemeIsSecure()) {
569 url::Replacements<char> replacements;
570 std::string scheme("https");
571 replacements.SetScheme(scheme.c_str(),
572 url::Component(0, scheme.size()));
573 crx_url = crx_url.ReplaceComponents(replacements);
576 scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
577 update->extension_id, crx_url, update->package_hash,
578 update->version, fetch_data.request_ids()));
579 FetchUpdatedExtension(fetch.Pass());
582 // If the manifest response included a <daystart> element, we want to save
583 // that value for any extensions which had sent a ping in the request.
584 if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
585 results->daystart_elapsed_seconds >= 0) {
587 Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
589 const std::set<std::string>& extension_ids = fetch_data.extension_ids();
590 std::set<std::string>::const_iterator i;
591 for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
592 const std::string& id = *i;
593 ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
594 result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
595 result.day_start = day_start;
599 NotifyExtensionsDownloadFailed(
601 fetch_data.request_ids(),
602 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
605 void ExtensionDownloader::DetermineUpdates(
606 const ManifestFetchData& fetch_data,
607 const UpdateManifest::Results& possible_updates,
608 std::vector<int>* result) {
609 // This will only be valid if one of possible_updates specifies
610 // browser_min_version.
611 Version browser_version;
613 for (size_t i = 0; i < possible_updates.list.size(); i++) {
614 const UpdateManifest::Result* update = &possible_updates.list[i];
615 const std::string& id = update->extension_id;
617 if (!fetch_data.Includes(id)) {
618 VLOG(2) << "Ignoring " << id << " from this manifest";
623 if (update->version.empty())
624 VLOG(2) << "manifest indicates " << id << " has no update";
626 VLOG(2) << "manifest indicates " << id
627 << " latest version is '" << update->version << "'";
630 if (!delegate_->IsExtensionPending(id)) {
631 // If we're not installing pending extension, and the update
632 // version is the same or older than what's already installed,
635 if (!delegate_->GetExtensionExistingVersion(id, &version)) {
636 VLOG(2) << id << " is not installed";
640 VLOG(2) << id << " is at '" << version << "'";
642 Version existing_version(version);
643 Version update_version(update->version);
645 if (!update_version.IsValid() ||
646 update_version.CompareTo(existing_version) <= 0) {
651 // If the update specifies a browser minimum version, do we qualify?
652 if (update->browser_min_version.length() > 0) {
653 // First determine the browser version if we haven't already.
654 if (!browser_version.IsValid()) {
655 chrome::VersionInfo version_info;
656 if (version_info.is_valid())
657 browser_version = Version(version_info.Version());
659 Version browser_min_version(update->browser_min_version);
660 if (browser_version.IsValid() && browser_min_version.IsValid() &&
661 browser_min_version.CompareTo(browser_version) > 0) {
662 // TODO(asargent) - We may want this to show up in the extensions UI
663 // eventually. (http://crbug.com/12547).
664 LOG(WARNING) << "Updated version of extension " << id
665 << " available, but requires chrome version "
666 << update->browser_min_version;
670 VLOG(2) << "will try to update " << id;
671 result->push_back(i);
675 // Begins (or queues up) download of an updated extension.
676 void ExtensionDownloader::FetchUpdatedExtension(
677 scoped_ptr<ExtensionFetch> fetch_data) {
678 if (!fetch_data->url.is_valid()) {
679 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
680 LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
681 << "' for extension " << fetch_data->id;
685 for (RequestQueue<ExtensionFetch>::iterator iter =
686 extensions_queue_.begin();
687 iter != extensions_queue_.end(); ++iter) {
688 if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
689 iter->request_ids.insert(fetch_data->request_ids.begin(),
690 fetch_data->request_ids.end());
691 return; // already scheduled
695 if (extensions_queue_.active_request() &&
696 extensions_queue_.active_request()->url == fetch_data->url) {
697 extensions_queue_.active_request()->request_ids.insert(
698 fetch_data->request_ids.begin(), fetch_data->request_ids.end());
701 if (extension_cache_ &&
702 extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
703 version == fetch_data->version) {
704 base::FilePath crx_path;
705 // Now get .crx file path and mark extension as used.
706 extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
707 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
709 extensions_queue_.ScheduleRequest(fetch_data.Pass());
714 void ExtensionDownloader::NotifyDelegateDownloadFinished(
715 scoped_ptr<ExtensionFetch> fetch_data,
716 const base::FilePath& crx_path,
717 bool file_ownership_passed) {
718 delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
719 file_ownership_passed, fetch_data->url, fetch_data->version,
720 ping_results_[fetch_data->id], fetch_data->request_ids);
721 ping_results_.erase(fetch_data->id);
724 void ExtensionDownloader::CreateExtensionFetcher() {
725 const ExtensionFetch* fetch = extensions_queue_.active_request();
726 extension_fetcher_.reset(net::URLFetcher::Create(
727 kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
728 extension_fetcher_->SetRequestContext(request_context_);
729 extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
731 int load_flags = net::LOAD_DISABLE_CACHE;
732 bool is_secure = fetch->url.SchemeIsSecure();
733 if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
734 load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
735 net::LOAD_DO_NOT_SAVE_COOKIES;
737 extension_fetcher_->SetLoadFlags(load_flags);
739 // Download CRX files to a temp file. The blacklist is small and will be
740 // processed in memory, so it is fetched into a string.
741 if (fetch->id != kBlacklistAppID) {
742 extension_fetcher_->SaveResponseToTemporaryFile(
743 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
746 if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
748 if (access_token_.empty()) {
749 // We should try OAuth2, but we have no token cached. This
750 // ExtensionFetcher will be started once the token fetch is complete,
751 // in either OnTokenFetchSuccess or OnTokenFetchFailure.
752 DCHECK(identity_provider_.get());
753 OAuth2TokenService::ScopeSet webstore_scopes;
754 webstore_scopes.insert(kWebstoreOAuth2Scope);
755 access_token_request_ =
756 identity_provider_->GetTokenService()->StartRequest(
757 identity_provider_->GetActiveAccountId(),
762 extension_fetcher_->AddExtraRequestHeader(
763 base::StringPrintf("%s: Bearer %s",
764 net::HttpRequestHeaders::kAuthorization,
765 access_token_.c_str()));
768 VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
769 extension_fetcher_->Start();
772 void ExtensionDownloader::OnCRXFetchComplete(
773 const net::URLFetcher* source,
775 const net::URLRequestStatus& status,
777 const base::TimeDelta& backoff_delay) {
778 ExtensionFetch& active_request = *extensions_queue_.active_request();
779 const std::string& id = active_request.id;
780 if (status.status() == net::URLRequestStatus::SUCCESS &&
781 (response_code == 200 || url.SchemeIsFile())) {
782 RETRY_HISTOGRAM("CrxFetchSuccess",
783 extensions_queue_.active_request_failure_count(), url);
784 base::FilePath crx_path;
785 // Take ownership of the file at |crx_path|.
786 CHECK(source->GetResponseAsFilePath(true, &crx_path));
787 scoped_ptr<ExtensionFetch> fetch_data =
788 extensions_queue_.reset_active_request();
789 if (extension_cache_) {
790 const std::string& version = fetch_data->version;
791 extension_cache_->PutExtension(id, crx_path, version,
792 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
793 weak_ptr_factory_.GetWeakPtr(),
794 base::Passed(&fetch_data)));
796 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
798 } else if (IterateFetchCredentialsAfterFailure(
802 extensions_queue_.RetryRequest(backoff_delay);
804 const std::set<int>& request_ids = active_request.request_ids;
805 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
806 VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
807 << "' response code:" << response_code;
808 if (ShouldRetryRequest(status, response_code) &&
809 extensions_queue_.active_request_failure_count() < kMaxRetries) {
810 extensions_queue_.RetryRequest(backoff_delay);
812 RETRY_HISTOGRAM("CrxFetchFailure",
813 extensions_queue_.active_request_failure_count(), url);
814 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
815 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
816 delegate_->OnExtensionDownloadFailed(
817 id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
819 ping_results_.erase(id);
820 extensions_queue_.reset_active_request();
823 extension_fetcher_.reset();
825 // If there are any pending downloads left, start the next one.
826 extensions_queue_.StartNextRequest();
829 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
830 const std::set<std::string>& extension_ids,
831 const std::set<int>& request_ids,
832 ExtensionDownloaderDelegate::Error error) {
833 for (std::set<std::string>::const_iterator it = extension_ids.begin();
834 it != extension_ids.end(); ++it) {
835 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
836 delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
837 ping_results_.erase(*it);
841 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
842 const std::string& version) {
843 UpdateDetails updateInfo(id, Version(version));
844 content::NotificationService::current()->Notify(
845 extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
846 content::NotificationService::AllBrowserContextsAndSources(),
847 content::Details<UpdateDetails>(&updateInfo));
850 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
851 ExtensionFetch* fetch,
852 const net::URLRequestStatus& status,
854 bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
855 (status.status() == net::URLRequestStatus::SUCCESS &&
856 (response_code == net::HTTP_UNAUTHORIZED ||
857 response_code == net::HTTP_FORBIDDEN));
861 // Here we decide what to do next if the server refused to authorize this
863 switch (fetch->credentials) {
864 case ExtensionFetch::CREDENTIALS_NONE:
865 if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
866 fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
868 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
871 case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
872 fetch->oauth2_attempt_count++;
873 // OAuth2 may fail due to an expired access token, in which case we
874 // should invalidate the token and try again.
875 if (response_code == net::HTTP_UNAUTHORIZED &&
876 fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
877 DCHECK(identity_provider_.get());
878 OAuth2TokenService::ScopeSet webstore_scopes;
879 webstore_scopes.insert(kWebstoreOAuth2Scope);
880 identity_provider_->GetTokenService()->InvalidateToken(
881 identity_provider_->GetActiveAccountId(),
884 access_token_.clear();
887 // Either there is no Gaia identity available, the active identity
888 // doesn't have access to this resource, or the server keeps returning
889 // 401s and we've retried too many times. Fall back on cookies.
890 if (access_token_.empty() ||
891 response_code == net::HTTP_FORBIDDEN ||
892 fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
893 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
896 // Something else is wrong. Time to give up.
898 case ExtensionFetch::CREDENTIALS_COOKIES:
899 if (response_code == net::HTTP_FORBIDDEN) {
900 // Try the next session identity, up to some maximum.
901 return IncrementAuthUserIndex(&fetch->url);
911 void ExtensionDownloader::OnGetTokenSuccess(
912 const OAuth2TokenService::Request* request,
913 const std::string& access_token,
914 const base::Time& expiration_time) {
915 access_token_ = access_token;
916 extension_fetcher_->AddExtraRequestHeader(
917 base::StringPrintf("%s: Bearer %s",
918 net::HttpRequestHeaders::kAuthorization,
919 access_token_.c_str()));
920 extension_fetcher_->Start();
923 void ExtensionDownloader::OnGetTokenFailure(
924 const OAuth2TokenService::Request* request,
925 const GoogleServiceAuthError& error) {
926 // If we fail to get an access token, kick the pending fetch and let it fall
928 extension_fetcher_->Start();
931 } // namespace extensions