aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlan Jeffrey <ajeffrey@mozilla.com>2017-12-06 21:53:34 -0600
committerAlan Jeffrey <ajeffrey@mozilla.com>2017-12-06 21:57:08 -0600
commitee766b5e162cb4b151d58d8b8b36a528a3ea604e (patch)
tree2c4a5a58e6fe17bc3bc820a7614ca3c81fce165c
parent6aae59e7e50531f45d78495cf07970001ef05f86 (diff)
downloadservo-ee766b5e162cb4b151d58d8b8b36a528a3ea604e.tar.gz
servo-ee766b5e162cb4b151d58d8b8b36a528a3ea604e.zip
Add a --date option to test-perf.
-rw-r--r--etc/ci/performance/gecko_driver.py2
-rw-r--r--etc/ci/performance/runner.py25
-rwxr-xr-xetc/ci/performance/test_all.sh10
-rw-r--r--python/servo/testing_commands.py6
4 files changed, 29 insertions, 14 deletions
diff --git a/etc/ci/performance/gecko_driver.py b/etc/ci/performance/gecko_driver.py
index 9dec4dfda3b..99d041548ce 100644
--- a/etc/ci/performance/gecko_driver.py
+++ b/etc/ci/performance/gecko_driver.py
@@ -71,7 +71,7 @@ def generate_placeholder(testcase):
return [timings]
-def run_gecko_test(testcase, url, timeout, is_async):
+def run_gecko_test(testcase, url, date, timeout, is_async):
with create_gecko_session() as driver:
driver.set_page_load_timeout(timeout)
try:
diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py
index 7b895f11ca0..e3ffe31b456 100644
--- a/etc/ci/performance/runner.py
+++ b/etc/ci/performance/runner.py
@@ -17,7 +17,7 @@ from statistics import median, StatisticsError
from urllib.parse import urlsplit, urlunsplit, urljoin
-DATE = datetime.now().strftime("%Y%m%d")
+DATE = datetime.now().strftime("%Y-%m-%d")
MACHINE = platform.machine()
SYSTEM = platform.system()
@@ -66,11 +66,11 @@ def execute_test(url, command, timeout):
return ""
-def run_servo_test(testcase, url, timeout, is_async):
+def run_servo_test(testcase, url, date, timeout, is_async):
if is_async:
print("Servo does not support async test!")
# Return a placeholder
- return parse_log("", testcase, url)
+ return parse_log("", testcase, url, date)
ua_script_path = "{}/user-agent-js".format(os.getcwd())
command = [
@@ -92,10 +92,10 @@ def run_servo_test(testcase, url, timeout, is_async):
))
except subprocess.TimeoutExpired:
print("Test FAILED due to timeout: {}".format(testcase))
- return parse_log(log, testcase, url)
+ return parse_log(log, testcase, url, date)
-def parse_log(log, testcase, url):
+def parse_log(log, testcase, url, date):
blocks = []
block = []
copy = False
@@ -149,7 +149,7 @@ def parse_log(log, testcase, url):
return {
"system": SYSTEM,
"machine": MACHINE,
- "date": DATE,
+ "date": date,
"testcase": testcase,
"title": "",
"navigationStart": 0,
@@ -177,15 +177,15 @@ def parse_log(log, testcase, url):
# Set the testcase field to contain the original testcase name,
# rather than the url.
- def set_testcase(timing, testcase=None):
+ def set_testcase(timing, testcase=None, date=None):
timing['testcase'] = testcase
timing['system'] = SYSTEM
timing['machine'] = MACHINE
- timing['date'] = DATE
+ timing['date'] = date
return timing
valid_timing_for_case = partial(valid_timing, url=url)
- set_testcase_for_case = partial(set_testcase, testcase=testcase)
+ set_testcase_for_case = partial(set_testcase, testcase=testcase, date=date)
timings = list(map(set_testcase_for_case, filter(valid_timing_for_case, map(parse_block, blocks))))
if len(timings) == 0:
@@ -329,6 +329,10 @@ def main():
default=300, # 5 min
help=("kill the test if not finished in time (sec)."
" Default: 5 min"))
+ parser.add_argument("--date",
+ type=str,
+ default=None, # 5 min
+ help=("the date to use in the CSV file."))
parser.add_argument("--engine",
type=str,
default='servo',
@@ -340,6 +344,7 @@ def main():
elif args.engine == 'gecko':
import gecko_driver # Load this only when we need gecko test
run_test = gecko_driver.run_gecko_test
+ date = args.date or DATE
try:
# Assume the server is up and running
testcases = load_manifest(args.tp5_manifest)
@@ -352,7 +357,7 @@ def main():
url))
# results will be a mixure of timings dict and testcase strings
# testcase string indicates a failed test
- results += run_test(testcase, url, args.timeout, is_async)
+ results += run_test(testcase, url, date, args.timeout, is_async)
print("Finished")
# TODO: Record and analyze other performance.timing properties
diff --git a/etc/ci/performance/test_all.sh b/etc/ci/performance/test_all.sh
index 6602bc1f20e..e5e4f6dae08 100755
--- a/etc/ci/performance/test_all.sh
+++ b/etc/ci/performance/test_all.sh
@@ -14,6 +14,7 @@ set -o pipefail
# https://groups.google.com/forum/#!topic/mozilla.dev.servo/JlAZoRgcnpA
port="8123"
base="http://localhost:${port}"
+date="$(date +%Y-%m-%d)"
while (( "${#}" ))
do
@@ -33,6 +34,10 @@ case "${1}" in
base="${2}"
shift
;;
+ --date)
+ date="${2}"
+ shift
+ ;;
*)
echo "Unknown option ${1}."
exit
@@ -56,11 +61,12 @@ trap 'kill $(jobs -pr)' SIGINT SIGTERM EXIT
# MANIFEST="page_load_test/tp5n/20160509.manifest"
MANIFEST="page_load_test/test.manifest" # A manifest that excludes
# timeout test cases
-PERF_KEY="perf-$(uname -s)-$(uname -m)-$(date +%s).csv"
+PERF_KEY="perf-$(uname -s)-$(uname -m)-${date}.csv"
PERF_FILE="output/${PERF_KEY}"
echo "Running tests"
-python3 runner.py ${engine} --runs 4 --timeout "${timeout}" --base "${base}" \
+python3 runner.py ${engine} --runs 4 --timeout "${timeout}" \
+ --base "${base}" --date "${date}" \
"${MANIFEST}" "${PERF_FILE}"
if [[ "${submit:-}" ]];
diff --git a/python/servo/testing_commands.py b/python/servo/testing_commands.py
index a72b6ef8f70..60839938465 100644
--- a/python/servo/testing_commands.py
+++ b/python/servo/testing_commands.py
@@ -171,9 +171,11 @@ class MachCommands(CommandBase):
category='testing')
@CommandArgument('--base', default=None,
help="the base URL for testcases")
+ @CommandArgument('--date', default=None,
+ help="the datestamp for the data")
@CommandArgument('--submit', '-a', default=False, action="store_true",
help="submit the data to perfherder")
- def test_perf(self, base=None, submit=False):
+ def test_perf(self, base=None, date=None, submit=False):
self.set_software_rendering_env(True)
self.ensure_bootstrapped()
@@ -181,6 +183,8 @@ class MachCommands(CommandBase):
cmd = ["bash", "test_perf.sh"]
if base:
cmd += ["--base", base]
+ if date:
+ cmd += ["--date", date]
if submit:
cmd += ["--submit"]
return call(cmd,