#include "base/message_loop/message_loop.h"
#include "base/metrics/field_trial.h"
#include "base/metrics/histogram.h"
+#include "base/profiler/scoped_tracker.h"
#include "base/rand_util.h"
#include "base/strings/string_util.h"
#include "base/time/time.h"
#include "net/http/http_transaction.h"
#include "net/http/http_transaction_factory.h"
#include "net/http/http_util.h"
+#include "net/proxy/proxy_info.h"
#include "net/ssl/ssl_cert_request_info.h"
#include "net/ssl/ssl_config_service.h"
#include "net/url_request/fraudulent_certificate_reporter.h"
class URLRequestHttpJob::HttpFilterContext : public FilterContext {
public:
explicit HttpFilterContext(URLRequestHttpJob* job);
- virtual ~HttpFilterContext();
+ ~HttpFilterContext() override;
// FilterContext implementation.
- virtual bool GetMimeType(std::string* mime_type) const OVERRIDE;
- virtual bool GetURL(GURL* gurl) const OVERRIDE;
- virtual bool GetContentDisposition(std::string* disposition) const OVERRIDE;
- virtual base::Time GetRequestTime() const OVERRIDE;
- virtual bool IsCachedContent() const OVERRIDE;
- virtual bool IsDownload() const OVERRIDE;
- virtual bool IsSdchResponse() const OVERRIDE;
- virtual int64 GetByteReadCount() const OVERRIDE;
- virtual int GetResponseCode() const OVERRIDE;
- virtual void RecordPacketStats(StatisticSelector statistic) const OVERRIDE;
+ bool GetMimeType(std::string* mime_type) const override;
+ bool GetURL(GURL* gurl) const override;
+ bool GetContentDisposition(std::string* disposition) const override;
+ base::Time GetRequestTime() const override;
+ bool IsCachedContent() const override;
+ bool IsDownload() const override;
+ bool SdchResponseExpected() const override;
+ int64 GetByteReadCount() const override;
+ int GetResponseCode() const override;
+ const URLRequestContext* GetURLRequestContext() const override;
+ void RecordPacketStats(StatisticSelector statistic) const override;
// Method to allow us to reset filter context for a response that should have
// been SDCH encoded when there is an update due to an explicit HTTP header.
job_->sdch_dictionary_advertised_ = false;
}
-bool URLRequestHttpJob::HttpFilterContext::IsSdchResponse() const {
+bool URLRequestHttpJob::HttpFilterContext::SdchResponseExpected() const {
return job_->sdch_dictionary_advertised_;
}
return job_->GetResponseCode();
}
+const URLRequestContext*
+URLRequestHttpJob::HttpFilterContext::GetURLRequestContext() const {
+ return job_->request() ? job_->request()->context() : NULL;
+}
+
void URLRequestHttpJob::HttpFilterContext::RecordPacketStats(
StatisticSelector statistic) const {
job_->RecordPacketStats(statistic);
request_time_snapshot_(),
final_packet_time_(),
filter_context_(new HttpFilterContext(this)),
- weak_factory_(this),
on_headers_received_callback_(
base::Bind(&URLRequestHttpJob::OnHeadersReceivedCallback,
base::Unretained(this))),
awaiting_callback_(false),
- http_user_agent_settings_(http_user_agent_settings) {
+ http_user_agent_settings_(http_user_agent_settings),
+ weak_factory_(this) {
URLRequestThrottlerManager* manager = request->context()->throttler_manager();
if (manager)
throttling_entry_ = manager->RegisterRequestUrl(request->url());
// filter_context_ is still alive.
DestroyFilters();
- if (sdch_dictionary_url_.is_valid()) {
- // Prior to reaching the destructor, request_ has been set to a NULL
- // pointer, so request_->url() is no longer valid in the destructor, and we
- // use an alternate copy |request_info_.url|.
- SdchManager* manager = SdchManager::Global();
- // To be extra safe, since this is a "different time" from when we decided
- // to get the dictionary, we'll validate that an SdchManager is available.
- // At shutdown time, care is taken to be sure that we don't delete this
- // globally useful instance "too soon," so this check is just defensive
- // coding to assure that IF the system is shutting down, we don't have any
- // problem if the manager was deleted ahead of time.
- if (manager) // Defensive programming.
- manager->FetchDictionary(request_info_.url, sdch_dictionary_url_);
- }
DoneWithRequest(ABORTED);
}
// Privacy mode could still be disabled in OnCookiesLoaded if we are going
// to send previously saved cookies.
request_info_.privacy_mode = enable_privacy_mode ?
- kPrivacyModeEnabled : kPrivacyModeDisabled;
+ PRIVACY_MODE_ENABLED : PRIVACY_MODE_DISABLED;
// Strip Referer from request_info_.extra_headers to prevent, e.g., plugins
// from overriding headers that are controlled using other means. Otherwise a
URLRequestJob::Kill();
}
+void URLRequestHttpJob::NotifyBeforeSendProxyHeadersCallback(
+ const ProxyInfo& proxy_info,
+ HttpRequestHeaders* request_headers) {
+ DCHECK(request_headers);
+ DCHECK_NE(URLRequestStatus::CANCELED, GetStatus().status());
+ if (network_delegate()) {
+ network_delegate()->NotifyBeforeSendProxyHeaders(
+ request_,
+ proxy_info,
+ request_headers);
+ }
+}
+
void URLRequestHttpJob::NotifyHeadersComplete() {
DCHECK(!response_info_);
ProcessStrictTransportSecurityHeader();
ProcessPublicKeyPinsHeader();
- if (SdchManager::Global() &&
- SdchManager::Global()->IsInSupportedDomain(request_->url())) {
+ SdchManager* sdch_manager(request()->context()->sdch_manager());
+ if (sdch_manager && sdch_manager->IsInSupportedDomain(request_->url())) {
const std::string name = "Get-Dictionary";
std::string url_text;
void* iter = NULL;
// Eventually we should wait until a dictionary is requested several times
// before we even download it (so that we don't waste memory or bandwidth).
if (GetResponseHeaders()->EnumerateHeader(&iter, name, &url_text)) {
- // request_->url() won't be valid in the destructor, so we use an
- // alternate copy.
- DCHECK_EQ(request_->url(), request_info_.url);
// Resolve suggested URL relative to request url.
- sdch_dictionary_url_ = request_info_.url.Resolve(url_text);
+ GURL sdch_dictionary_url = request_->url().Resolve(url_text);
+ if (sdch_dictionary_url.is_valid()) {
+ sdch_manager->OnGetDictionary(request_->url(), sdch_dictionary_url);
+ }
}
}
priority_, &transaction_);
if (rv == OK && request_info_.url.SchemeIsWSOrWSS()) {
- // TODO(ricea): Implement WebSocket throttling semantics as defined in
- // RFC6455 Section 4.1.
base::SupportsUserData::Data* data = request_->GetUserData(
WebSocketHandshakeStreamBase::CreateHelper::DataKey());
if (data) {
transaction_->SetBeforeNetworkStartCallback(
base::Bind(&URLRequestHttpJob::NotifyBeforeNetworkStart,
base::Unretained(this)));
+ transaction_->SetBeforeProxyHeadersSentCallback(
+ base::Bind(&URLRequestHttpJob::NotifyBeforeSendProxyHeadersCallback,
+ base::Unretained(this)));
if (!throttling_entry_.get() ||
- !throttling_entry_->ShouldRejectRequest(*request_)) {
+ !throttling_entry_->ShouldRejectRequest(*request_,
+ network_delegate())) {
rv = transaction_->Start(
&request_info_, start_callback_, request_->net_log());
start_time_ = base::TimeTicks::Now();
}
void URLRequestHttpJob::AddExtraHeaders() {
+ SdchManager* sdch_manager = request()->context()->sdch_manager();
+
// Supply Accept-Encoding field only if it is not already provided.
// It should be provided IF the content is known to have restrictions on
// potential encoding, such as streaming multi-media.
// simple_data_source.
if (!request_info_.extra_headers.HasHeader(
HttpRequestHeaders::kAcceptEncoding)) {
- bool advertise_sdch = SdchManager::Global() &&
- SdchManager::Global()->IsInSupportedDomain(request_->url());
+ bool advertise_sdch = sdch_manager &&
+ // We don't support SDCH responses to POST as there is a possibility
+ // of having SDCH encoded responses returned (e.g. by the cache)
+ // which we cannot decode, and in those situations, we will need
+ // to retransmit the request without SDCH, which is illegal for a POST.
+ request()->method() != "POST" &&
+ sdch_manager->IsInSupportedDomain(request_->url());
std::string avail_dictionaries;
if (advertise_sdch) {
- SdchManager::Global()->GetAvailDictionaryList(request_->url(),
- &avail_dictionaries);
+ sdch_manager->GetAvailDictionaryList(request_->url(),
+ &avail_dictionaries);
// The AllowLatencyExperiment() is only true if we've successfully done a
// full SDCH compression recently in this browser session for this host.
// Note that for this path, there might be no applicable dictionaries,
// and hence we can't participate in the experiment.
if (!avail_dictionaries.empty() &&
- SdchManager::Global()->AllowLatencyExperiment(request_->url())) {
+ sdch_manager->AllowLatencyExperiment(request_->url())) {
// We are participating in the test (or control), and hence we'll
// eventually record statistics via either SDCH_EXPERIMENT_DECODE or
// SDCH_EXPERIMENT_HOLDBACK, and we'll need some packet timing data.
if (!advertise_sdch) {
// Tell the server what compression formats we support (other than SDCH).
request_info_.extra_headers.SetHeader(
- HttpRequestHeaders::kAcceptEncoding, "gzip,deflate");
+ HttpRequestHeaders::kAcceptEncoding, "gzip, deflate");
} else {
// Include SDCH in acceptable list.
request_info_.extra_headers.SetHeader(
- HttpRequestHeaders::kAcceptEncoding, "gzip,deflate,sdch");
+ HttpRequestHeaders::kAcceptEncoding, "gzip, deflate, sdch");
if (!avail_dictionaries.empty()) {
request_info_.extra_headers.SetHeader(
kAvailDictionaryHeader,
request_info_.extra_headers.SetHeader(
HttpRequestHeaders::kCookie, cookie_line);
// Disable privacy mode as we are sending cookies anyway.
- request_info_.privacy_mode = kPrivacyModeDisabled;
+ request_info_.privacy_mode = PRIVACY_MODE_DISABLED;
}
DoStartTransaction();
}
}
void URLRequestHttpJob::OnStartCompleted(int result) {
+ // TODO(vadimt): Remove ScopedTracker below once crbug.com/424359 is fixed.
+ tracked_objects::ScopedTracker tracking_profile(
+ FROM_HERE_WITH_EXPLICIT_FUNCTION(
+ "424359 URLRequestHttpJob::OnStartCompleted"));
+
RecordTimer();
// If the request was destroyed, then there is no more work to do.
context->fraudulent_certificate_reporter();
if (reporter != NULL) {
const SSLInfo& ssl_info = transaction_->GetResponseInfo()->ssl_info;
- bool sni_available = SSLConfigService::IsSNIAvailable(
- context->ssl_config_service());
const std::string& host = request_->url().host();
- reporter->SendReport(host, ssl_info, sni_available);
+ reporter->SendReport(host, ssl_info);
}
}
if (result == OK) {
+ if (transaction_ && transaction_->GetResponseInfo()) {
+ SetProxyServer(transaction_->GetResponseInfo()->proxy_server);
+ }
scoped_refptr<HttpResponseHeaders> headers = GetResponseHeaders();
if (network_delegate()) {
// Note that |this| may not be deleted until
NotifySSLCertificateError(info, true);
} else {
// Maybe overridable, maybe not. Ask the delegate to decide.
- TransportSecurityState::DomainState domain_state;
const URLRequestContext* context = request_->context();
- const bool fatal = context->transport_security_state() &&
- context->transport_security_state()->GetDomainState(
- request_info_.url.host(),
- SSLConfigService::IsSNIAvailable(context->ssl_config_service()),
- &domain_state) &&
- domain_state.ShouldSSLErrorsBeFatal();
+ TransportSecurityState* state = context->transport_security_state();
+ const bool fatal =
+ state && state->ShouldSSLErrorsBeFatal(request_info_.url.host());
NotifySSLCertificateError(
transaction_->GetResponseInfo()->ssl_info, fatal);
}
}
void URLRequestHttpJob::OnReadCompleted(int result) {
+ // TODO(vadimt): Remove ScopedTracker below once crbug.com/424359 is fixed.
+ tracked_objects::ScopedTracker tracking_profile(
+ FROM_HERE_WITH_EXPLICIT_FUNCTION(
+ "424359 URLRequestHttpJob::OnReadCompleted"));
+
read_in_progress_ = false;
if (ShouldFixMismatchedContentLength(result))
encoding_types.push_back(Filter::ConvertEncodingToType(encoding_type));
}
- if (filter_context_->IsSdchResponse()) {
+ if (filter_context_->SdchResponseExpected()) {
// We are wary of proxies that discard or damage SDCH encoding. If a server
// explicitly states that this is not SDCH content, then we can correct our
// assumption that this is an SDCH response, and avoid the need to recover
? Filter::Factory(encoding_types, *filter_context_) : NULL;
}
+bool URLRequestHttpJob::CopyFragmentOnRedirect(const GURL& location) const {
+ // Allow modification of reference fragments by default, unless
+ // |allowed_unsafe_redirect_url_| is set and equal to the redirect URL.
+ // When this is the case, we assume that the network delegate has set the
+ // desired redirect URL (with or without fragment), so it must not be changed
+ // any more.
+ return !allowed_unsafe_redirect_url_.is_valid() ||
+ allowed_unsafe_redirect_url_ != location;
+}
+
bool URLRequestHttpJob::IsSafeRedirect(const GURL& location) {
// HTTP is always safe.
// TODO(pauljensen): Remove once crbug.com/146591 is fixed.
(location.scheme() == "http" || location.scheme() == "https")) {
return true;
}
- // Delegates may mark an URL as safe for redirection.
- if (allowed_unsafe_redirect_url_.is_valid()) {
- GURL::Replacements replacements;
- replacements.ClearRef();
- if (allowed_unsafe_redirect_url_.ReplaceComponents(replacements) ==
- location.ReplaceComponents(replacements)) {
- return true;
- }
+ // Delegates may mark a URL as safe for redirection.
+ if (allowed_unsafe_redirect_url_.is_valid() &&
+ allowed_unsafe_redirect_url_ == location) {
+ return true;
}
// Query URLRequestJobFactory as to whether |location| would be safe to
// redirect to.
} else {
// Otherwise, |override_response_headers_| must be non-NULL and contain
// bogus headers indicating a redirect.
- DCHECK(override_response_headers_);
+ DCHECK(override_response_headers_.get());
DCHECK(override_response_headers_->IsRedirect(NULL));
transaction_->StopCaching();
}
return; // No new bytes have arrived.
}
- final_packet_time_ = base::Time::Now();
+ base::Time now(base::Time::Now());
if (!bytes_observed_in_packets_)
- request_time_snapshot_ = request_ ? request_->request_time() : base::Time();
+ request_time_snapshot_ = now;
+ final_packet_time_ = now;
bytes_observed_in_packets_ = filter_input_byte_count();
}
}
case FilterContext::SDCH_EXPERIMENT_DECODE: {
- UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment2_Decode",
+ UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Decode",
duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
return;
}
case FilterContext::SDCH_EXPERIMENT_HOLDBACK: {
- UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment2_Holdback",
+ UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Holdback",
duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);