#include "base/files/file_path.h"
#include "base/location.h"
#include "base/logging.h"
-#include "base/memory/scoped_handle.h"
#include "base/metrics/histogram.h"
#include "base/metrics/sparse_histogram.h"
-#include "base/platform_file.h"
#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
#include "base/time/time.h"
#include "base/version.h"
#include "chrome/browser/chrome_notification_types.h"
+#include "chrome/browser/extensions/updater/extension_cache.h"
#include "chrome/browser/extensions/updater/request_queue_impl.h"
-#include "chrome/browser/extensions/updater/safe_manifest_parser.h"
-#include "chrome/browser/metrics/metrics_service.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/common/chrome_version_info.h"
-#include "chrome/common/extensions/extension_constants.h"
#include "chrome/common/extensions/manifest_url_handler.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_details.h"
#include "content/public/browser/notification_service.h"
+#include "extensions/browser/updater/safe_manifest_parser.h"
+#include "extensions/common/extension_urls.h"
+#include "google_apis/gaia/identity_provider.h"
#include "net/base/backoff_entry.h"
#include "net/base/load_flags.h"
#include "net/base/net_errors.h"
+#include "net/http/http_request_headers.h"
+#include "net/http/http_status_code.h"
#include "net/url_request/url_fetcher.h"
+#include "net/url_request/url_request_context_getter.h"
#include "net/url_request/url_request_status.h"
using base::Time;
false,
};
+const char kAuthUserQueryKey[] = "authuser";
+
+const int kMaxAuthUserValue = 10;
+const int kMaxOAuth2Attempts = 3;
+
const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
const char kDefaultInstallSource[] = "";
-#define RETRY_HISTOGRAM(name, retry_count, url) \
- if ((url).DomainIs("google.com")) \
- UMA_HISTOGRAM_CUSTOM_COUNTS( \
- "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
- kMaxRetries, kMaxRetries+1); \
- else \
- UMA_HISTOGRAM_CUSTOM_COUNTS( \
- "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
- kMaxRetries, kMaxRetries+1)
+const char kGoogleDotCom[] = "google.com";
+const char kTokenServiceConsumerId[] = "extension_downloader";
+const char kWebstoreOAuth2Scope[] =
+ "https://www.googleapis.com/auth/chromewebstore.readonly";
+
+#define RETRY_HISTOGRAM(name, retry_count, url) \
+ if ((url).DomainIs(kGoogleDotCom)) { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ } else { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ }
bool ShouldRetryRequest(const net::URLRequestStatus& status,
int response_code) {
// Retry if the response code is a server error, or the request failed because
// of network errors as opposed to file errors.
- return (response_code >= 500 && status.is_success()) ||
- status.status() == net::URLRequestStatus::FAILED;
+ return ((response_code >= 500 && status.is_success()) ||
+ status.status() == net::URLRequestStatus::FAILED);
+}
+
+// This parses and updates a URL query such that the value of the |authuser|
+// query parameter is incremented by 1. If parameter was not present in the URL,
+// it will be added with a value of 1. All other query keys and values are
+// preserved as-is. Returns |false| if the user index exceeds a hard-coded
+// maximum.
+bool IncrementAuthUserIndex(GURL* url) {
+ int user_index = 0;
+ std::string old_query = url->query();
+ std::vector<std::string> new_query_parts;
+ url::Component query(0, old_query.length());
+ url::Component key, value;
+ while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
+ std::string key_string = old_query.substr(key.begin, key.len);
+ std::string value_string = old_query.substr(value.begin, value.len);
+ if (key_string == kAuthUserQueryKey) {
+ base::StringToInt(value_string, &user_index);
+ } else {
+ new_query_parts.push_back(base::StringPrintf(
+ "%s=%s", key_string.c_str(), value_string.c_str()));
+ }
+ }
+ if (user_index >= kMaxAuthUserValue)
+ return false;
+ new_query_parts.push_back(
+ base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
+ std::string new_query_string = JoinString(new_query_parts, '&');
+ url::Component new_query(0, new_query_string.size());
+ url::Replacements<char> replacements;
+ replacements.SetQuery(new_query_string.c_str(), new_query);
+ *url = url->ReplaceComponents(replacements);
+ return true;
}
} // namespace
UpdateDetails::~UpdateDetails() {}
-ExtensionDownloader::ExtensionFetch::ExtensionFetch() : url() {}
+ExtensionDownloader::ExtensionFetch::ExtensionFetch()
+ : url(), credentials(CREDENTIALS_NONE) {
+}
ExtensionDownloader::ExtensionFetch::ExtensionFetch(
const std::string& id,
const std::string& package_hash,
const std::string& version,
const std::set<int>& request_ids)
- : id(id), url(url), package_hash(package_hash), version(version),
- request_ids(request_ids) {}
+ : id(id),
+ url(url),
+ package_hash(package_hash),
+ version(version),
+ request_ids(request_ids),
+ credentials(CREDENTIALS_NONE),
+ oauth2_attempt_count(0) {
+}
ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
ExtensionDownloader::ExtensionDownloader(
ExtensionDownloaderDelegate* delegate,
net::URLRequestContextGetter* request_context)
- : delegate_(delegate),
+ : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
+ delegate_(delegate),
request_context_(request_context),
- weak_ptr_factory_(this),
manifests_queue_(&kDefaultBackoffPolicy,
- base::Bind(&ExtensionDownloader::CreateManifestFetcher,
- base::Unretained(this))),
+ base::Bind(&ExtensionDownloader::CreateManifestFetcher,
+ base::Unretained(this))),
extensions_queue_(&kDefaultBackoffPolicy,
- base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
- base::Unretained(this))) {
+ base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
+ base::Unretained(this))),
+ extension_cache_(NULL),
+ enable_extra_update_metrics_(false),
+ weak_ptr_factory_(this) {
DCHECK(delegate_);
- DCHECK(request_context_);
+ DCHECK(request_context_.get());
}
ExtensionDownloader::~ExtensionDownloader() {}
if (!ManifestURL::UpdatesFromGallery(&extension))
update_url_data = delegate_->GetUpdateUrlData(extension.id());
- return AddExtensionData(extension.id(), *extension.version(),
+ std::string install_source;
+ bool force_update = delegate_->ShouldForceUpdate(extension.id(),
+ &install_source);
+ return AddExtensionData(extension.id(),
+ *extension.version(),
extension.GetType(),
ManifestURL::GetUpdateURL(&extension),
- update_url_data, request_id);
+ update_url_data,
+ request_id,
+ force_update,
+ install_source);
}
bool ExtensionDownloader::AddPendingExtension(const std::string& id,
Manifest::TYPE_UNKNOWN,
update_url,
std::string(),
- request_id);
+ request_id,
+ false,
+ std::string());
+}
+
+void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
+ if (cache) {
+ extension_cache_ = cache;
+ extension_cache_->Start(base::Bind(
+ &ExtensionDownloader::DoStartAllPending,
+ weak_ptr_factory_.GetWeakPtr()));
+ } else {
+ DoStartAllPending();
+ }
}
-void ExtensionDownloader::StartAllPending() {
+void ExtensionDownloader::DoStartAllPending() {
ReportStats();
url_stats_ = URLStats();
// Note: it is very important that we use the https version of the update
// url here to avoid DNS hijacking of the blacklist, which is not validated
// by a public key signature like .crx files are.
- scoped_ptr<ManifestFetchData> blacklist_fetch(
- new ManifestFetchData(extension_urls::GetWebstoreUpdateUrl(),
- request_id));
+ scoped_ptr<ManifestFetchData> blacklist_fetch(CreateManifestFetchData(
+ extension_urls::GetWebstoreUpdateUrl(), request_id));
DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
blacklist_fetch->AddExtension(kBlacklistAppID,
version,
&ping_data,
std::string(),
- kDefaultInstallSource);
+ kDefaultInstallSource,
+ false);
StartUpdateCheck(blacklist_fetch.Pass());
}
-bool ExtensionDownloader::AddExtensionData(const std::string& id,
- const Version& version,
- Manifest::Type extension_type,
- const GURL& extension_update_url,
- const std::string& update_url_data,
- int request_id) {
+void ExtensionDownloader::SetWebstoreIdentityProvider(
+ scoped_ptr<IdentityProvider> identity_provider) {
+ identity_provider_.swap(identity_provider);
+}
+
+bool ExtensionDownloader::AddExtensionData(
+ const std::string& id,
+ const Version& version,
+ Manifest::Type extension_type,
+ const GURL& extension_update_url,
+ const std::string& update_url_data,
+ int request_id,
+ bool force_update,
+ const std::string& install_source_override) {
GURL update_url(extension_update_url);
// Skip extensions with non-empty invalid update URLs.
if (!update_url.is_empty() && !update_url.is_valid()) {
return false;
}
- if (update_url.DomainIs("google.com")) {
+ if (update_url.DomainIs(kGoogleDotCom)) {
url_stats_.google_url_count++;
} else if (update_url.is_empty()) {
url_stats_.no_url_count++;
std::vector<GURL> update_urls;
update_urls.push_back(update_url);
- // If UMA is enabled, also add to ManifestFetchData for the
+ // If metrics are enabled, also add to ManifestFetchData for the
// webstore update URL.
if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
- MetricsServiceHelper::IsMetricsReportingEnabled()) {
+ enable_extra_update_metrics_) {
update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
}
std::string install_source = i == 0 ?
kDefaultInstallSource : kNotFromWebstoreInstallSource;
+ if (!install_source_override.empty()) {
+ install_source = install_source_override;
+ }
ManifestFetchData::PingData ping_data;
ManifestFetchData::PingData* optional_ping_data = NULL;
ManifestFetchData* existing_fetch = existing_iter->second.back().get();
if (existing_fetch->AddExtension(id, version.GetString(),
optional_ping_data, update_url_data,
- install_source)) {
+ install_source,
+ force_update)) {
added = true;
}
}
// Otherwise add a new element to the list, if the list doesn't exist or
// if its last element is already full.
linked_ptr<ManifestFetchData> fetch(
- new ManifestFetchData(update_urls[i], request_id));
+ CreateManifestFetchData(update_urls[i], request_id));
fetches_preparing_[std::make_pair(request_id, update_urls[i])].
push_back(fetch);
added = fetch->AddExtension(id, version.GetString(),
optional_ping_data,
update_url_data,
- install_source);
+ install_source,
+ force_update);
DCHECK(added);
}
}
manifest_fetcher_.reset(net::URLFetcher::Create(
kManifestFetcherId, manifests_queue_.active_request()->full_url(),
net::URLFetcher::GET, this));
- manifest_fetcher_->SetRequestContext(request_context_);
+ manifest_fetcher_->SetRequestContext(request_context_.get());
manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
net::LOAD_DO_NOT_SAVE_COOKIES |
net::LOAD_DISABLE_CACHE);
// Force https (crbug.com/129587).
if (!crx_url.SchemeIsSecure()) {
- url_canon::Replacements<char> replacements;
+ url::Replacements<char> replacements;
std::string scheme("https");
replacements.SetScheme(scheme.c_str(),
- url_parse::Component(0, scheme.size()));
+ url::Component(0, scheme.size()));
crx_url = crx_url.ReplaceComponents(replacements);
}
}
// If the manifest response included a <daystart> element, we want to save
// that value for any extensions which had sent a ping in the request.
- if (fetch_data.base_url().DomainIs("google.com") &&
+ if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
results->daystart_elapsed_seconds >= 0) {
Time day_start =
Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
VLOG(2) << id << " is at '" << version << "'";
- Version existing_version(version);
- Version update_version(update->version);
-
- if (!update_version.IsValid() ||
- update_version.CompareTo(existing_version) <= 0) {
- continue;
+ // We should skip the version check if update was forced.
+ if (!fetch_data.DidForceUpdate(id)) {
+ Version existing_version(version);
+ Version update_version(update->version);
+ if (!update_version.IsValid() ||
+ update_version.CompareTo(existing_version) <= 0) {
+ continue;
+ }
}
}
extensions_queue_.active_request()->request_ids.insert(
fetch_data->request_ids.begin(), fetch_data->request_ids.end());
} else {
- extensions_queue_.ScheduleRequest(fetch_data.Pass());
+ std::string version;
+ if (extension_cache_ &&
+ extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
+ version == fetch_data->version) {
+ base::FilePath crx_path;
+ // Now get .crx file path and mark extension as used.
+ extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
+ NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
+ } else {
+ extensions_queue_.ScheduleRequest(fetch_data.Pass());
+ }
}
}
+void ExtensionDownloader::NotifyDelegateDownloadFinished(
+ scoped_ptr<ExtensionFetch> fetch_data,
+ const base::FilePath& crx_path,
+ bool file_ownership_passed) {
+ delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
+ file_ownership_passed, fetch_data->url, fetch_data->version,
+ ping_results_[fetch_data->id], fetch_data->request_ids);
+ ping_results_.erase(fetch_data->id);
+}
+
void ExtensionDownloader::CreateExtensionFetcher() {
+ const ExtensionFetch* fetch = extensions_queue_.active_request();
extension_fetcher_.reset(net::URLFetcher::Create(
- kExtensionFetcherId, extensions_queue_.active_request()->url,
- net::URLFetcher::GET, this));
- extension_fetcher_->SetRequestContext(request_context_);
- extension_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
- net::LOAD_DO_NOT_SAVE_COOKIES |
- net::LOAD_DISABLE_CACHE);
+ kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
+ extension_fetcher_->SetRequestContext(request_context_.get());
extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
+
+ int load_flags = net::LOAD_DISABLE_CACHE;
+ bool is_secure = fetch->url.SchemeIsSecure();
+ if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
+ load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
+ net::LOAD_DO_NOT_SAVE_COOKIES;
+ }
+ extension_fetcher_->SetLoadFlags(load_flags);
+
// Download CRX files to a temp file. The blacklist is small and will be
// processed in memory, so it is fetched into a string.
- if (extensions_queue_.active_request()->id != kBlacklistAppID) {
+ if (fetch->id != kBlacklistAppID) {
extension_fetcher_->SaveResponseToTemporaryFile(
BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
}
- VLOG(2) << "Starting fetch of " << extensions_queue_.active_request()->url
- << " for " << extensions_queue_.active_request()->id;
+ if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
+ is_secure) {
+ if (access_token_.empty()) {
+ // We should try OAuth2, but we have no token cached. This
+ // ExtensionFetcher will be started once the token fetch is complete,
+ // in either OnTokenFetchSuccess or OnTokenFetchFailure.
+ DCHECK(identity_provider_.get());
+ OAuth2TokenService::ScopeSet webstore_scopes;
+ webstore_scopes.insert(kWebstoreOAuth2Scope);
+ access_token_request_ =
+ identity_provider_->GetTokenService()->StartRequest(
+ identity_provider_->GetActiveAccountId(),
+ webstore_scopes,
+ this);
+ return;
+ }
+ extension_fetcher_->AddExtraRequestHeader(
+ base::StringPrintf("%s: Bearer %s",
+ net::HttpRequestHeaders::kAuthorization,
+ access_token_.c_str()));
+ }
+ VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
extension_fetcher_->Start();
}
const net::URLRequestStatus& status,
int response_code,
const base::TimeDelta& backoff_delay) {
- const std::string& id = extensions_queue_.active_request()->id;
- const std::set<int>& request_ids =
- extensions_queue_.active_request()->request_ids;
- const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
-
+ ExtensionFetch& active_request = *extensions_queue_.active_request();
+ const std::string& id = active_request.id;
if (status.status() == net::URLRequestStatus::SUCCESS &&
(response_code == 200 || url.SchemeIsFile())) {
RETRY_HISTOGRAM("CrxFetchSuccess",
base::FilePath crx_path;
// Take ownership of the file at |crx_path|.
CHECK(source->GetResponseAsFilePath(true, &crx_path));
- delegate_->OnExtensionDownloadFinished(
- id, crx_path, url, extensions_queue_.active_request()->version,
- ping, request_ids);
+ scoped_ptr<ExtensionFetch> fetch_data =
+ extensions_queue_.reset_active_request();
+ if (extension_cache_) {
+ const std::string& version = fetch_data->version;
+ extension_cache_->PutExtension(id, crx_path, version,
+ base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
+ weak_ptr_factory_.GetWeakPtr(),
+ base::Passed(&fetch_data)));
+ } else {
+ NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
+ }
+ } else if (IterateFetchCredentialsAfterFailure(
+ &active_request,
+ status,
+ response_code)) {
+ extensions_queue_.RetryRequest(backoff_delay);
} else {
+ const std::set<int>& request_ids = active_request.request_ids;
+ const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
<< "' response code:" << response_code;
if (ShouldRetryRequest(status, response_code) &&
delegate_->OnExtensionDownloadFailed(
id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
}
+ ping_results_.erase(id);
+ extensions_queue_.reset_active_request();
}
extension_fetcher_.reset();
- if (extensions_queue_.active_request())
- ping_results_.erase(id);
- extensions_queue_.reset_active_request();
// If there are any pending downloads left, start the next one.
extensions_queue_.StartNextRequest();
const std::string& version) {
UpdateDetails updateInfo(id, Version(version));
content::NotificationService::current()->Notify(
- chrome::NOTIFICATION_EXTENSION_UPDATE_FOUND,
+ extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
content::NotificationService::AllBrowserContextsAndSources(),
content::Details<UpdateDetails>(&updateInfo));
}
+bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
+ ExtensionFetch* fetch,
+ const net::URLRequestStatus& status,
+ int response_code) {
+ bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
+ (status.status() == net::URLRequestStatus::SUCCESS &&
+ (response_code == net::HTTP_UNAUTHORIZED ||
+ response_code == net::HTTP_FORBIDDEN));
+ if (!auth_failure) {
+ return false;
+ }
+ // Here we decide what to do next if the server refused to authorize this
+ // fetch.
+ switch (fetch->credentials) {
+ case ExtensionFetch::CREDENTIALS_NONE:
+ if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
+ } else {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ }
+ return true;
+ case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
+ fetch->oauth2_attempt_count++;
+ // OAuth2 may fail due to an expired access token, in which case we
+ // should invalidate the token and try again.
+ if (response_code == net::HTTP_UNAUTHORIZED &&
+ fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
+ DCHECK(identity_provider_.get());
+ OAuth2TokenService::ScopeSet webstore_scopes;
+ webstore_scopes.insert(kWebstoreOAuth2Scope);
+ identity_provider_->GetTokenService()->InvalidateToken(
+ identity_provider_->GetActiveAccountId(),
+ webstore_scopes,
+ access_token_);
+ access_token_.clear();
+ return true;
+ }
+ // Either there is no Gaia identity available, the active identity
+ // doesn't have access to this resource, or the server keeps returning
+ // 401s and we've retried too many times. Fall back on cookies.
+ if (access_token_.empty() ||
+ response_code == net::HTTP_FORBIDDEN ||
+ fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ return true;
+ }
+ // Something else is wrong. Time to give up.
+ return false;
+ case ExtensionFetch::CREDENTIALS_COOKIES:
+ if (response_code == net::HTTP_FORBIDDEN) {
+ // Try the next session identity, up to some maximum.
+ return IncrementAuthUserIndex(&fetch->url);
+ }
+ return false;
+ default:
+ NOTREACHED();
+ }
+ NOTREACHED();
+ return false;
+}
+
+void ExtensionDownloader::OnGetTokenSuccess(
+ const OAuth2TokenService::Request* request,
+ const std::string& access_token,
+ const base::Time& expiration_time) {
+ access_token_ = access_token;
+ extension_fetcher_->AddExtraRequestHeader(
+ base::StringPrintf("%s: Bearer %s",
+ net::HttpRequestHeaders::kAuthorization,
+ access_token_.c_str()));
+ extension_fetcher_->Start();
+}
+
+void ExtensionDownloader::OnGetTokenFailure(
+ const OAuth2TokenService::Request* request,
+ const GoogleServiceAuthError& error) {
+ // If we fail to get an access token, kick the pending fetch and let it fall
+ // back on cookies.
+ extension_fetcher_->Start();
+}
+
+ManifestFetchData* ExtensionDownloader::CreateManifestFetchData(
+ const GURL& update_url,
+ int request_id) {
+ ManifestFetchData::PingMode ping_mode = ManifestFetchData::NO_PING;
+ if (update_url.DomainIs(ping_enabled_domain_.c_str())) {
+ if (enable_extra_update_metrics_) {
+ ping_mode = ManifestFetchData::PING_WITH_METRICS;
+ } else {
+ ping_mode = ManifestFetchData::PING;
+ }
+ }
+ return new ManifestFetchData(
+ update_url, request_id, brand_code_, manifest_query_params_, ping_mode);
+}
+
} // namespace extensions