Merge pull request #1886 from dgquintas/run_tests_shuffling

Improvements to the way we randomize test runs.
pull/1892/head
Craig Tiller 10 years ago
commit 82c8d6de1e
  1. 40
      tools/run_tests/jobset.py
  2. 22
      tools/run_tests/run_tests.py

@ -33,7 +33,6 @@ import hashlib
import multiprocessing
import os
import platform
import random
import signal
import subprocess
import sys
@ -59,40 +58,6 @@ else:
signal.signal(signal.SIGALRM, alarm_handler)
def shuffle_iteratable(it):
"""Return an iterable that randomly walks it"""
# take a random sampling from the passed in iterable
# we take an element with probability 1/p and rapidly increase
# p as we take elements - this gives us a somewhat random set of values before
# we've seen all the values, but starts producing values without having to
# compute ALL of them at once, allowing tests to start a little earlier
LARGE_THRESHOLD = 1000
nextit = []
p = 1
for val in it:
if random.randint(0, p) == 0:
p = min(p*2, 100)
yield val
else:
nextit.append(val)
# if the input iterates over a large number of values (potentially
# infinite, we'd be in the loop for a while (again, potentially forever).
# We need to reset "nextit" every so often to, in the case of an infinite
# iterator, avoid growing "nextit" without ever freeing it.
if len(nextit) > LARGE_THRESHOLD:
random.shuffle(nextit)
for val in nextit:
yield val
nextit = []
p = 1
# after taking a random sampling, we shuffle the rest of the elements and
# yield them
random.shuffle(nextit)
for val in nextit:
yield val
_SUCCESS = object()
_FAILURE = object()
_RUNNING = object()
@ -358,11 +323,6 @@ def run(cmdlines,
maxjobs if maxjobs is not None else _DEFAULT_MAX_JOBS,
newline_on_success, travis, stop_on_failure,
cache if cache is not None else NoCache())
# We can't sort an infinite sequence of runs.
if not travis or infinite_runs:
cmdlines = shuffle_iteratable(cmdlines)
else:
cmdlines = sorted(cmdlines, key=lambda x: x.shortname)
for cmdline in cmdlines:
if not js.start(cmdline):
break

@ -36,16 +36,16 @@ import itertools
import json
import multiprocessing
import os
import platform
import random
import re
import subprocess
import sys
import time
import platform
import subprocess
import jobset
import watch_dirs
ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(ROOT)
@ -488,13 +488,21 @@ def _build_and_run(check_cancelled, newline_on_success, travis, cache):
return 1
# start antagonists
antagonists = [subprocess.Popen(['tools/run_tests/antagonist.py'])
antagonists = [subprocess.Popen(['tools/run_tests/antagonist.py'])
for _ in range(0, args.antagonists)]
try:
infinite_runs = runs_per_test == 0
# run all the tests
runs_sequence = (itertools.repeat(one_run) if infinite_runs
else itertools.repeat(one_run, runs_per_test))
# When running on travis, we want out test runs to be as similar as possible
# for reproducibility purposes.
if travis:
massaged_one_run = sorted(one_run, key=lambda x: x.shortname)
else:
# whereas otherwise, we want to shuffle things up to give all tests a
# chance to run.
massaged_one_run = list(one_run) # random.shuffle needs an indexable seq.
random.shuffle(massaged_one_run) # which it modifies in-place.
runs_sequence = (itertools.repeat(massaged_one_run) if infinite_runs
else itertools.repeat(massaged_one_run, runs_per_test))
all_runs = itertools.chain.from_iterable(runs_sequence)
if not jobset.run(all_runs, check_cancelled,
newline_on_success=newline_on_success, travis=travis,

Loading…
Cancel
Save