Skip to content

Commit

Permalink
Convert dom_perf into a telemetry test.
Browse files Browse the repository at this point in the history
BUG=230435

NOTRY=true

Review URL: https://chromiumcodereview.appspot.com/15065003

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@199545 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
chrisgao@chromium.org committed May 10, 2013
1 parent d2414bb commit 7a5121f
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 0 deletions.
15 changes: 15 additions & 0 deletions tools/perf/page_sets/dom_perf.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"description": "Dom Perf benchmark",
"pages": [
{ "url": "file:///../../../data/dom_perf/run.html?run=Accessors&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=CloneNodes&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=CreateNodes&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=DOMDivWalk&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=DOMTable&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=DOMWalk&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=Events&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=Get+Elements&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=GridSort&reportInJS=1" },
{ "url": "file:///../../../data/dom_perf/run.html?run=Template&reportInJS=1" }
]
}
61 changes: 61 additions & 0 deletions tools/perf/perf_tools/dom_perf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import json
import math

from telemetry.core import util
from telemetry.page import page_measurement


def _GeometricMean(values):
"""Compute a rounded geometric mean from an array of values."""
if not values:
return None
# To avoid infinite value errors, make sure no value is less than 0.001.
new_values = []
for value in values:
if value > 0.001:
new_values.append(value)
else:
new_values.append(0.001)
# Compute the sum of the log of the values.
log_sum = sum(map(math.log, new_values))
# Raise e to that sum over the number of values.
mean = math.pow(math.e, (log_sum / len(new_values)))
# Return the rounded mean.
return int(round(mean))


SCORE_UNIT = 'score (bigger is better)'
SCORE_TRACE_NAME = 'score'


class DomPerf(page_measurement.PageMeasurement):
@property
def results_are_the_same_on_every_page(self):
return False

def WillNavigateToPage(self, page, tab):
tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"')

def MeasurePage(self, page, tab, results):
def _IsDone():
return tab.GetCookieByName('__domperf_finished') == '1'
util.WaitFor(_IsDone, 600, poll_interval=5)

data = json.loads(tab.EvaluateJavaScript('__domperf_result'))
for suite in data['BenchmarkSuites']:
# Skip benchmarks that we didn't actually run this time around.
if len(suite['Benchmarks']) or suite['score']:
results.Add(SCORE_TRACE_NAME, SCORE_UNIT,
suite['score'], suite['name'], 'unimportant')

def DidRunPageSet(self, tab, results):
# Now give the geometric mean as the total for the combined runs.
scores = []
for result in results.page_results:
scores.append(result[SCORE_TRACE_NAME].output_value)
total = _GeometricMean(scores)
results.AddSummary(SCORE_TRACE_NAME, SCORE_UNIT, total, 'Total')

0 comments on commit 7a5121f

Please sign in to comment.