Skip to content

Commit

Permalink
Various small cleanups in URLRequestJob:
Browse files Browse the repository at this point in the history
  - Remove unused or unneeded methods.
  - Make (almost) all data private.

R=eroman@chromium.org
BUG=none
TEST=none


Review URL: http://codereview.chromium.org/6697035

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@78319 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
adamk@chromium.org committed Mar 16, 2011
1 parent 81ad7f4 commit 0da15e5
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 39 deletions.
21 changes: 7 additions & 14 deletions net/url_request/url_request_job.cc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

Expand Down Expand Up @@ -28,11 +28,13 @@ namespace net {

URLRequestJob::URLRequestJob(URLRequest* request)
: request_(request),
done_(false),
load_flags_(request_->load_flags()),
is_profiling_(request_->enable_profiling()),
prefilter_bytes_read_(0),
postfilter_bytes_read_(0),
is_compressible_content_(false),
is_compressed_(false),
done_(false),
filter_needs_more_output_space_(false),
filtered_read_buffer_len_(0),
has_handled_response_(false),
Expand All @@ -43,9 +45,7 @@ URLRequestJob::URLRequestJob(URLRequest* request)
bytes_observed_in_packets_(0),
max_packets_timed_(0),
observed_packet_count_(0) {
load_flags_ = request_->load_flags();
is_profiling_ = request->enable_profiling();
if (is_profiling()) {
if (is_profiling_) {
metrics_.reset(new URLRequestJobMetrics());
metrics_->start_time_ = TimeTicks::Now();
}
Expand Down Expand Up @@ -215,13 +215,6 @@ void URLRequestJob::FollowDeferredRedirect() {
FollowRedirect(redirect_url, redirect_status_code);
}

URLRequestJobMetrics* URLRequestJob::RetrieveMetrics() {
if (is_profiling())
return metrics_.release();
else
return NULL;
}

bool URLRequestJob::GetMimeType(std::string* mime_type) const {
return false;
}
Expand Down Expand Up @@ -562,7 +555,7 @@ void URLRequestJob::NotifyDone(const URLRequestStatus &status) {

RecordCompressionHistograms();

if (is_profiling() && metrics_->total_bytes_read_ > 0) {
if (is_profiling_ && metrics_->total_bytes_read_ > 0) {
// There are valid IO statistics. Fill in other fields of metrics for
// profiling consumers to retrieve information.
metrics_->original_url_.reset(new GURL(request_->original_url()));
Expand Down Expand Up @@ -830,7 +823,7 @@ void URLRequestJob::OnRawReadComplete(int bytes_read) {
}

void URLRequestJob::RecordBytesRead(int bytes_read) {
if (is_profiling()) {
if (is_profiling_) {
++(metrics_->number_of_read_IO_);
metrics_->total_bytes_read_ += bytes_read;
}
Expand Down
41 changes: 16 additions & 25 deletions net/url_request/url_request_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#include "net/base/host_port_pair.h"
#include "net/base/load_states.h"


namespace net {

class AuthChallengeInfo;
Expand Down Expand Up @@ -180,14 +179,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// NotifyDone on the request.
bool is_done() const { return done_; }

// Returns true if the job is doing performance profiling
bool is_profiling() const { return is_profiling_; }

// Retrieve the performance measurement of the job. The data is encapsulated
// with a URLRequestJobMetrics object. The caller owns this object from now
// on.
URLRequestJobMetrics* RetrieveMetrics();

// Get/Set expected content size
int64 expected_content_size() const { return expected_content_size_; }
void set_expected_content_size(const int64& size) {
Expand Down Expand Up @@ -286,22 +277,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// request was released by DetachRequest().
net::URLRequest* request_;

// Whether the job is doing performance profiling
bool is_profiling_;

// Contains IO performance measurement when profiling is enabled.
scoped_ptr<URLRequestJobMetrics> metrics_;

// The number of bytes read before passing to the filter.
int prefilter_bytes_read_;
// The number of bytes read after passing through the filter.
int postfilter_bytes_read_;
// True when (we believe) the content in this net::URLRequest was
// compressible.
bool is_compressible_content_;
// True when the content in this net::URLRequest was compressed.
bool is_compressed_;

private:
// When data filtering is enabled, this function is used to read data
// for the filter. Returns true if raw data was read. Returns false if
Expand Down Expand Up @@ -344,6 +319,22 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// Cache the load flags from request_ because it might go away.
int load_flags_;

// Whether the job is doing performance profiling
bool is_profiling_;

// Contains IO performance measurement when profiling is enabled.
scoped_ptr<URLRequestJobMetrics> metrics_;

// The number of bytes read before passing to the filter.
int prefilter_bytes_read_;
// The number of bytes read after passing through the filter.
int postfilter_bytes_read_;
// True when (we believe) the content in this net::URLRequest was
// compressible.
bool is_compressible_content_;
// True when the content in this net::URLRequest was compressed.
bool is_compressed_;

// The data stream filter which is enabled on demand.
scoped_ptr<Filter> filter_;

Expand Down

0 comments on commit 0da15e5

Please sign in to comment.