Merge master

reviewable/pr13058/r5
Yash Tibrewal 7 years ago
commit 5cce8cf2c3
  1. 4
      src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc
  2. 23
      tools/interop_matrix/client_matrix.py
  3. 13
      tools/interop_matrix/run_interop_matrix_tests.py
  4. 2
      tools/jenkins/run_full_performance.sh

@ -1236,12 +1236,12 @@ static void glb_notify_on_state_change_locked(grpc_lb_policy* pol,
static void lb_call_on_retry_timer_locked(void* arg, grpc_error* error) {
glb_lb_policy* glb_policy = (glb_lb_policy*)arg;
glb_policy->retry_timer_active = false;
if (!glb_policy->shutting_down && error == GRPC_ERROR_NONE) {
if (!glb_policy->shutting_down && glb_policy->lb_call == NULL &&
error == GRPC_ERROR_NONE) {
if (GRPC_TRACER_ON(grpc_lb_glb_trace)) {
gpr_log(GPR_INFO, "Restaring call to LB server (grpclb %p)",
(void*)glb_policy);
}
GPR_ASSERT(glb_policy->lb_call == NULL);
query_for_backends_locked(glb_policy);
}
GRPC_LB_POLICY_WEAK_UNREF(&glb_policy->base, "grpclb_retry_timer");

@ -27,7 +27,7 @@ LANG_RUNTIME_MATRIX = {
'cxx': ['cxx'], # This is actually debian8.
'go': ['go1.7', 'go1.8'],
'java': ['java_oracle8'],
'python': ['python'],
#'python': ['python'], # All python versions fail the tests due to timeout.
'node': ['node'],
'ruby': ['ruby'],
'php': ['php', 'php7'],
@ -64,14 +64,14 @@ LANG_RELEASE_MATRIX = {
'v1.6.1',
'v1.7.0',
],
'python': [
#'python': [
#'v1.0.x', #Fail to run the test. #13230.
'v1.1.4',
'v1.2.5',
'v1.3.9',
'v1.4.2',
'v1.6.6',
],
# 'v1.1.4',
# 'v1.2.5',
# 'v1.3.9',
# 'v1.4.2',
# 'v1.6.6',
#],
'node': [
'v1.0.1',
'v1.1.4',
@ -97,11 +97,10 @@ LANG_RELEASE_MATRIX = {
'v1.6.6',
],
'csharp': [
# Fail to build images due to #13278.
#'v1.0.1',
#'v1.1.4',
#'v1.2.5',
#'v1.3.9',
#'v1.1.4', Fail to build.
#'v1.2.5', Fail to run test with csharp image.
'v1.3.9',
'v1.4.2',
'v1.6.6',
],

@ -83,6 +83,8 @@ argp.add_argument('--bq_result_table',
args = argp.parse_args()
print(str(args))
def find_all_images_for_lang(lang):
"""Find docker images for a language across releases and runtimes.
@ -170,7 +172,6 @@ def run_tests_for_lang(lang, runtime, images):
jobset.message('START', 'Testing %s' % image, do_newline=True)
# Download the docker image before running each test case.
subprocess.check_call(['gcloud', 'docker', '--', 'pull', image])
_docker_images_cleanup.append(image)
suite_name = '%s__%s_%s' % (lang, runtime, release)
job_spec_list = find_test_cases(lang, runtime, release, suite_name)
@ -198,16 +199,16 @@ def run_tests_for_lang(lang, runtime, images):
suite_name,
str(uuid.uuid4()))
if not args.keep:
cleanup(image)
return total_num_failures
_docker_images_cleanup = []
def cleanup():
if not args.keep:
for image in _docker_images_cleanup:
def cleanup(image):
jobset.message('START', 'Cleanup docker image %s' % image, do_newline=True)
dockerjob.remove_image(image, skip_nonexistent=True)
atexit.register(cleanup)
languages = args.language if args.language != ['all'] else _LANGUAGES
total_num_failures = 0

@ -21,7 +21,7 @@ cd $(dirname $0)/../..
# run 8core client vs 8core server
tools/run_tests/run_performance_tests.py \
-l c++ csharp node ruby java python go node_express php7 php7_protobuf_c \
-l c++ csharp ruby java python go php7 php7_protobuf_c \
--netperf \
--category scalable \
--bq_result_table performance_test.performance_experiment \

Loading…
Cancel
Save