Merge pull request #6223 from sreecha/set_ulimit

Enable core-dumps for stress clients and servers when launching stress tests in GKE
pull/6240/head
Nicolas Noble 9 years ago
commit 2aec201200
  1. 6
      tools/gcp/stress_test/run_client.py
  2. 12
      tools/gcp/stress_test/run_server.py

@ -31,6 +31,7 @@
import datetime
import os
import re
import resource
import select
import subprocess
import sys
@ -89,6 +90,11 @@ def run_client():
examining logs). This is the reason why the script waits forever
in case of failures
"""
# Set the 'core file' size to 'unlimited' so that 'core' files are generated
# if the client crashes (Note: This is not relevant for Java and Go clients)
resource.setrlimit(resource.RLIMIT_CORE,
(resource.RLIM_INFINITY, resource.RLIM_INFINITY))
env = dict(os.environ)
image_type = env['STRESS_TEST_IMAGE_TYPE']
stress_client_cmd = env['STRESS_TEST_CMD'].split()

@ -30,6 +30,7 @@
import datetime
import os
import resource
import select
import subprocess
import sys
@ -56,6 +57,10 @@ def run_server():
might want to connect to the pod for examining logs). This is the
reason why the script waits forever in case of failures.
"""
# Set the 'core file' size to 'unlimited' so that 'core' files are generated
# if the server crashes (Note: This is not relevant for Java and Go servers)
resource.setrlimit(resource.RLIMIT_CORE,
(resource.RLIM_INFINITY, resource.RLIM_INFINITY))
# Read the parameters from environment variables
env = dict(os.environ)
@ -78,9 +83,10 @@ def run_server():
logfile_name = env.get('LOGFILE_NAME')
print('pod_name: %s, project_id: %s, run_id: %s, dataset_id: %s, '
'summary_table_id: %s, qps_table_id: %s') % (
pod_name, project_id, run_id, dataset_id, summary_table_id,
qps_table_id)
'summary_table_id: %s, qps_table_id: %s') % (pod_name, project_id,
run_id, dataset_id,
summary_table_id,
qps_table_id)
bq_helper = BigQueryHelper(run_id, image_type, pod_name, project_id,
dataset_id, summary_table_id, qps_table_id)

Loading…
Cancel
Save