Skip to content

Commit

Permalink
Fix several problems with the content verification code
Browse files Browse the repository at this point in the history
-While we're doing the initial scan of files to build up
 computed_hashes.json, if we notice a mismatch, report that right
 away. This also fixes problems where if you modify/delete
 computed_hashes.json, enforcement would never work.

-Avoid an infinite loop in ContentVerifyJob by making sure we stop
 checking newly read bytes after we've already reported a failure
 (we were stuck failing to advance to the next block after reporting
  a failure)

BUG=375992,375953

Review URL: https://codereview.chromium.org/329303007

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@278071 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
asargent@chromium.org committed Jun 18, 2014
1 parent 6dc5125 commit b6641db
Show file tree
Hide file tree
Showing 9 changed files with 301 additions and 95 deletions.
127 changes: 100 additions & 27 deletions extensions/browser/content_hash_fetcher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
#include "crypto/secure_hash.h"
#include "crypto/sha2.h"
#include "extensions/browser/computed_hashes.h"
#include "extensions/browser/content_hash_tree.h"
#include "extensions/browser/extension_registry.h"
#include "extensions/browser/verified_contents.h"
#include "extensions/common/constants.h"
#include "extensions/common/extension.h"
#include "extensions/common/file_util.h"
Expand All @@ -47,9 +49,11 @@ class ContentHashFetcherJob
public:
typedef base::Callback<void(ContentHashFetcherJob*)> CompletionCallback;
ContentHashFetcherJob(net::URLRequestContextGetter* request_context,
ContentVerifierKey key,
const std::string& extension_id,
const base::FilePath& extension_path,
const GURL& fetch_url,
bool force,
const CompletionCallback& callback);

void Start();
Expand All @@ -58,20 +62,28 @@ class ContentHashFetcherJob
// just waiting for the entire job to complete. Safe to call from any thread.
void Cancel();

// Returns whether this job was completely successful (we have both verified
// contents and computed hashes).
// Checks whether this job has been cancelled. Safe to call from any thread.
bool IsCancelled();

// Returns whether this job was successful (we have both verified contents
// and computed hashes). Even if the job was a success, there might have been
// files that were found to have contents not matching expectations; these
// are available by calling hash_mismatch_paths().
bool success() { return success_; }

// Do we have a verified_contents.json file?
bool have_verified_contents() { return have_verified_contents_; }
bool force() { return force_; }

const std::string& extension_id() { return extension_id_; }

// Returns the set of paths that had a hash mismatch.
const std::set<base::FilePath>& hash_mismatch_paths() {
return hash_mismatch_paths_;
}

private:
friend class base::RefCountedThreadSafe<ContentHashFetcherJob>;
virtual ~ContentHashFetcherJob();

// Checks whether this job has been cancelled. Safe to call from any thread.
bool IsCancelled();

// Callback for when we're done doing file I/O to see if we already have
// a verified contents file. If we don't, this will kick off a network
// request to get one.
Expand Down Expand Up @@ -110,18 +122,22 @@ class ContentHashFetcherJob
// The url we'll need to use to fetch a verified_contents.json file.
GURL fetch_url_;

bool force_;

CompletionCallback callback_;
content::BrowserThread::ID creation_thread_;

// Used for fetching content signatures.
scoped_ptr<net::URLFetcher> url_fetcher_;

// The key used to validate verified_contents.json.
ContentVerifierKey key_;

// Whether this job succeeded.
bool success_;

// Whether we either found a verified contents file, or were successful in
// fetching one and saving it to disk.
bool have_verified_contents_;
// Paths that were found to have a mismatching hash.
std::set<base::FilePath> hash_mismatch_paths_;

// The block size to use for hashing.
int block_size_;
Expand All @@ -132,21 +148,26 @@ class ContentHashFetcherJob

// A lock for synchronizing access to |cancelled_|.
base::Lock cancelled_lock_;

DISALLOW_COPY_AND_ASSIGN(ContentHashFetcherJob);
};

ContentHashFetcherJob::ContentHashFetcherJob(
net::URLRequestContextGetter* request_context,
ContentVerifierKey key,
const std::string& extension_id,
const base::FilePath& extension_path,
const GURL& fetch_url,
bool force,
const CompletionCallback& callback)
: request_context_(request_context),
extension_id_(extension_id),
extension_path_(extension_path),
fetch_url_(fetch_url),
force_(force),
callback_(callback),
key_(key),
success_(false),
have_verified_contents_(false),
// TODO(asargent) - use the value from verified_contents.json for each
// file, instead of using a constant.
block_size_(4096),
Expand All @@ -172,21 +193,24 @@ void ContentHashFetcherJob::Cancel() {
cancelled_ = true;
}

ContentHashFetcherJob::~ContentHashFetcherJob() {
}

bool ContentHashFetcherJob::IsCancelled() {
base::AutoLock autolock(cancelled_lock_);
bool result = cancelled_;
return result;
}

ContentHashFetcherJob::~ContentHashFetcherJob() {
}

void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) {
if (IsCancelled())
return;
if (found) {
VLOG(1) << "Found verified contents for " << extension_id_;
DoneFetchingVerifiedContents(true);
} else {
VLOG(1) << "Missing verified contents for " << extension_id_
<< ", fetching...";
url_fetcher_.reset(
net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this));
url_fetcher_->SetRequestContext(request_context_);
Expand All @@ -209,6 +233,9 @@ static int WriteFileHelper(const base::FilePath& path,
}

void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) {
VLOG(1) << "URLFetchComplete for " << extension_id_
<< " is_success:" << url_fetcher_->GetStatus().is_success() << " "
<< fetch_url_.possibly_invalid_spec();
if (IsCancelled())
return;
scoped_ptr<std::string> response(new std::string);
Expand All @@ -224,6 +251,8 @@ void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) {
// move to parsing this in a sandboxed helper (crbug.com/372878).
scoped_ptr<base::Value> parsed(base::JSONReader::Read(*response));
if (parsed) {
VLOG(1) << "JSON parsed ok for " << extension_id_;

parsed.reset(); // no longer needed
base::FilePath destination =
file_util::GetVerifiedContentsPath(extension_path_);
Expand All @@ -247,14 +276,13 @@ void ContentHashFetcherJob::OnVerifiedContentsWritten(size_t expected_size,
}

void ContentHashFetcherJob::DoneFetchingVerifiedContents(bool success) {
have_verified_contents_ = success;

if (IsCancelled())
return;

// TODO(asargent) - eventually we should abort here on !success, but for
// testing purposes it's actually still helpful to continue on to create the
// computed hashes.
if (!success) {
DispatchCallback();
return;
}

content::BrowserThread::PostBlockingPoolSequencedTask(
"ContentHashFetcher",
Expand All @@ -268,10 +296,13 @@ void ContentHashFetcherJob::MaybeCreateHashes() {
base::FilePath hashes_file =
file_util::GetComputedHashesPath(extension_path_);

if (base::PathExists(hashes_file))
if (!force_ && base::PathExists(hashes_file)) {
success_ = true;
else
} else {
if (force_)
base::DeleteFile(hashes_file, false /* recursive */);
success_ = CreateHashes(hashes_file);
}

content::BrowserThread::PostTask(
creation_thread_,
Expand All @@ -286,6 +317,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL))
return false;

base::FilePath verified_contents_path =
file_util::GetVerifiedContentsPath(extension_path_);
VerifiedContents verified_contents(key_.data, key_.size);
if (!verified_contents.InitFrom(verified_contents_path, false))
return false;

base::FileEnumerator enumerator(extension_path_,
true, /* recursive */
base::FileEnumerator::FILES);
Expand All @@ -310,6 +347,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
const base::FilePath& full_path = *i;
base::FilePath relative_path;
extension_path_.AppendRelativePath(full_path, &relative_path);

const std::string* expected_root =
verified_contents.GetTreeHashRoot(relative_path);
if (!expected_root)
continue;

std::string contents;
if (!base::ReadFileToString(full_path, &contents)) {
LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII();
Expand Down Expand Up @@ -339,6 +382,14 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
// Get ready for next iteration.
offset += bytes_to_read;
}
std::string root =
ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length);
if (expected_root && *expected_root != root) {
VLOG(1) << "content mismatch for " << relative_path.AsUTF8Unsafe();
hash_mismatch_paths_.insert(relative_path);
continue;
}

writer.AddHashes(relative_path, block_size_, hashes);
}
return writer.WriteToFile(hashes_file);
Expand All @@ -356,9 +407,11 @@ void ContentHashFetcherJob::DispatchCallback() {
// ----

ContentHashFetcher::ContentHashFetcher(content::BrowserContext* context,
ContentVerifierDelegate* delegate)
ContentVerifierDelegate* delegate,
const FetchCallback& callback)
: context_(context),
delegate_(delegate),
fetch_callback_(callback),
observer_(this),
weak_ptr_factory_(this) {
}
Expand All @@ -374,13 +427,22 @@ void ContentHashFetcher::Start() {
observer_.Add(registry);
}

void ContentHashFetcher::DoFetch(const Extension* extension) {
void ContentHashFetcher::DoFetch(const Extension* extension, bool force) {
if (!extension || !delegate_->ShouldBeVerified(*extension))
return;

IdAndVersion key(extension->id(), extension->version()->GetString());
if (ContainsKey(jobs_, key))
return;
JobMap::iterator found = jobs_.find(key);
if (found != jobs_.end()) {
if (!force || found->second->force()) {
// Just let the existing job keep running.
return;
} else {
// Kill the existing non-force job, so we can start a new one below.
found->second->Cancel();
jobs_.erase(found);
}
}

// TODO(asargent) - we should do something here to remember recent attempts
// to fetch signatures by extension id, and use exponential backoff to avoid
Expand All @@ -392,9 +454,11 @@ void ContentHashFetcher::DoFetch(const Extension* extension) {
delegate_->GetSignatureFetchUrl(extension->id(), *extension->version());
ContentHashFetcherJob* job =
new ContentHashFetcherJob(context_->GetRequestContext(),
delegate_->PublicKey(),
extension->id(),
extension->path(),
url,
force,
base::Bind(&ContentHashFetcher::JobFinished,
weak_ptr_factory_.GetWeakPtr()));
jobs_.insert(std::make_pair(key, job));
Expand All @@ -405,7 +469,7 @@ void ContentHashFetcher::OnExtensionLoaded(
content::BrowserContext* browser_context,
const Extension* extension) {
CHECK(extension);
DoFetch(extension);
DoFetch(extension, false);
}

void ContentHashFetcher::OnExtensionUnloaded(
Expand All @@ -415,11 +479,20 @@ void ContentHashFetcher::OnExtensionUnloaded(
CHECK(extension);
IdAndVersion key(extension->id(), extension->version()->GetString());
JobMap::iterator found = jobs_.find(key);
if (found != jobs_.end())
if (found != jobs_.end()) {
found->second->Cancel();
jobs_.erase(found);
}
}

void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) {
if (!job->IsCancelled()) {
fetch_callback_.Run(job->extension_id(),
job->success(),
job->force(),
job->hash_mismatch_paths());
}

for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) {
if (i->second.get() == job) {
jobs_.erase(i);
Expand Down
25 changes: 22 additions & 3 deletions extensions/browser/content_hash_fetcher.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@
#ifndef EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_
#define EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_

#include <set>
#include <string>

#include "base/callback.h"
#include "base/files/file_path.h"
#include "base/memory/weak_ptr.h"
#include "base/scoped_observer.h"
#include "extensions/browser/content_verifier_delegate.h"
Expand All @@ -28,18 +33,31 @@ class ContentHashFetcherJob;
// sure they match the signed treehash root hash).
class ContentHashFetcher : public ExtensionRegistryObserver {
public:
// A callback for when a fetch is complete. This reports back:
// -extension id
// -whether we were successful or not (have verified_contents.json and
// -computed_hashes.json files)
// -was it a forced check?
// -a set of paths whose contents didn't match expected values
typedef base::Callback<
void(const std::string&, bool, bool, const std::set<base::FilePath>&)>
FetchCallback;

// The consumer of this class needs to ensure that context and delegate
// outlive this object.
ContentHashFetcher(content::BrowserContext* context,
ContentVerifierDelegate* delegate);
ContentVerifierDelegate* delegate,
const FetchCallback& callback);
virtual ~ContentHashFetcher();

// Begins the process of trying to fetch any needed verified contents, and
// listening for extension load/unload.
void Start();

// Explicitly ask to fetch hashes for |extension|.
void DoFetch(const Extension* extension);
// Explicitly ask to fetch hashes for |extension|. If |force| is true,
// we will always check the validity of the verified_contents.json and
// re-check the contents of the files in the filesystem.
void DoFetch(const Extension* extension, bool force);

// ExtensionRegistryObserver interface
virtual void OnExtensionLoaded(content::BrowserContext* browser_context,
Expand All @@ -55,6 +73,7 @@ class ContentHashFetcher : public ExtensionRegistryObserver {

content::BrowserContext* context_;
ContentVerifierDelegate* delegate_;
FetchCallback fetch_callback_;

// We keep around pointers to in-progress jobs, both so we can avoid
// scheduling duplicate work if fetching is already in progress, and so that
Expand Down
Loading

0 comments on commit b6641db

Please sign in to comment.