aboutsummaryrefslogtreecommitdiffstats
path: root/etc/ci/performance/runner.py
diff options
context:
space:
mode:
authorbors-servo <lbergstrom+bors@mozilla.com>2017-02-07 18:59:03 -0800
committerGitHub <noreply@github.com>2017-02-07 18:59:03 -0800
commite9933f36b771e9aad24b7abcac0ae81b49539d35 (patch)
tree20f79794987390267fd5a94256f329b50564d906 /etc/ci/performance/runner.py
parente2b494b1d08b8929ca7e5ae369304f41af81ace2 (diff)
parent7aa3350d4532e77be011167daeae83eeb5c00525 (diff)
downloadservo-e9933f36b771e9aad24b7abcac0ae81b49539d35.tar.gz
servo-e9933f36b771e9aad24b7abcac0ae81b49539d35.zip
Auto merge of #15067 - shinglyu:stylo-perf-async, r=Manishearth
Added async performance test <!-- Please describe your changes on the following line: --> Add a new way to test arbitrary timing from JavaScript (only for Gecko). This is for Stylo testing. --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors - [ ] These changes fix #__ (github issue number if applicable). <!-- Either: --> - [x] There are tests for these changes OR - [ ] These changes do not require tests because _____ <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. --> <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/15067) <!-- Reviewable:end -->
Diffstat (limited to 'etc/ci/performance/runner.py')
-rw-r--r--etc/ci/performance/runner.py26
1 files changed, 19 insertions, 7 deletions
diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py
index 414eb12aeab..864703de312 100644
--- a/etc/ci/performance/runner.py
+++ b/etc/ci/performance/runner.py
@@ -20,8 +20,15 @@ def load_manifest(filename):
def parse_manifest(text):
- return filter(lambda x: x != "" and not x.startswith("#"),
- map(lambda x: x.strip(), text.splitlines()))
+ lines = filter(lambda x: x != "" and not x.startswith("#"),
+ map(lambda x: x.strip(), text.splitlines()))
+ output = []
+ for line in lines:
+ if line.split(" ")[0] == "async":
+ output.append((line.split(" ")[1], True))
+ else:
+ output.append((line.split(" ")[0], False))
+ return output
def execute_test(url, command, timeout):
@@ -39,7 +46,12 @@ def execute_test(url, command, timeout):
return ""
-def run_servo_test(url, timeout):
+def run_servo_test(url, timeout, is_async):
+ if is_async:
+ print("Servo does not support async test!")
+ # Return a placeholder
+ return parse_log("", url)
+
ua_script_path = "{}/user-agent-js".format(os.getcwd())
command = [
"../../../target/release/servo", url,
@@ -157,7 +169,7 @@ def parse_log(log, testcase):
def filter_result_by_manifest(result_json, manifest):
filtered = []
- for name in manifest:
+ for name, is_async in manifest:
match = [tc for tc in result_json if tc['testcase'] == name]
if len(match) == 0:
raise Exception(("Missing test result: {}. This will cause a "
@@ -177,7 +189,7 @@ def take_result_median(result_json, expected_runs):
median_result = {}
for k, _ in group[0].items():
- if k == "testcase":
+ if k == "testcase" or k == "title":
median_result[k] = group[0][k]
else:
try:
@@ -257,14 +269,14 @@ def main():
# Assume the server is up and running
testcases = load_manifest(args.tp5_manifest)
results = []
- for testcase in testcases:
+ for testcase, is_async in testcases:
for run in range(args.runs):
print("Running test {}/{} on {}".format(run + 1,
args.runs,
testcase))
# results will be a mixure of timings dict and testcase strings
# testcase string indicates a failed test
- results += run_test(testcase, args.timeout)
+ results += run_test(testcase, args.timeout, is_async)
print("Finished")
# TODO: Record and analyze other performance.timing properties