From 30d9eddef99e568355972d66126a1c6e36560728 Mon Sep 17 00:00:00 2001 From: DongHun Kwak Date: Wed, 23 Sep 2020 10:39:14 +0900 Subject: [PATCH] Imported Upstream version 1.28.2 --- BUILD | 2 +- CMakeLists.txt | 6 +- Makefile | 4 +- build.yaml | 2 +- gRPC-C++.podspec | 2 +- gRPC-Core.podspec | 2 +- gRPC-ProtoRPC.podspec | 2 +- gRPC-RxLibrary.podspec | 2 +- gRPC.podspec | 2 +- package.xml | 6 +- src/cpp/common/version_cc.cc | 2 +- src/csharp/Grpc.Core.Api/VersionInfo.cs | 4 +- src/csharp/build/dependencies.props | 2 +- src/csharp/build_unitypackage.bat | 2 +- .../!ProtoCompiler-gRPCCppPlugin.podspec | 2 +- src/objective-c/!ProtoCompiler-gRPCPlugin.podspec | 2 +- src/objective-c/GRPCClient/version.h | 2 +- src/objective-c/tests/version.h | 2 +- src/php/composer.json | 2 +- src/php/ext/grpc/version.h | 2 +- src/python/grpcio/grpc/_grpcio_metadata.py | 2 +- src/python/grpcio/grpc_version.py | 2 +- src/python/grpcio_channelz/grpc_version.py | 2 +- src/python/grpcio_health_checking/grpc_version.py | 2 +- src/python/grpcio_reflection/grpc_version.py | 2 +- src/python/grpcio_status/grpc_version.py | 2 +- src/python/grpcio_testing/grpc_version.py | 2 +- src/python/grpcio_tests/grpc_version.py | 2 +- src/ruby/lib/grpc/version.rb | 2 +- src/ruby/tools/version.rb | 2 +- tools/distrib/pylint_code.sh | 2 +- tools/distrib/python/grpcio_tools/grpc_version.py | 2 +- tools/doxygen/Doxyfile.c++ | 2 +- tools/doxygen/Doxyfile.c++.internal | 2 +- tools/doxygen/Doxyfile.objc | 2 +- tools/doxygen/Doxyfile.objc.internal | 2 +- .../linux/grpc_xds_bazel_python_test_in_docker.sh | 2 + .../linux/grpc_xds_bazel_test_in_docker.sh | 2 + tools/run_tests/run_xds_tests.py | 714 ++++++++++++--------- 39 files changed, 465 insertions(+), 337 deletions(-) diff --git a/BUILD b/BUILD index d1d6da3..db90a56 100644 --- a/BUILD +++ b/BUILD @@ -79,7 +79,7 @@ g_stands_for = "galactic" core_version = "9.0.0" -version = "1.28.1" +version = "1.28.2" GPR_PUBLIC_HDRS = [ "include/grpc/support/alloc.h", diff --git a/CMakeLists.txt b/CMakeLists.txt index f7d6191..ee93e52 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,12 +25,12 @@ cmake_minimum_required(VERSION 3.5.1) set(PACKAGE_NAME "grpc") -set(PACKAGE_VERSION "1.28.1") +set(PACKAGE_VERSION "1.28.2") set(gRPC_CORE_VERSION "9.0.0") set(gRPC_CORE_SOVERSION "9") -set(gRPC_CPP_VERSION "1.28.1") +set(gRPC_CPP_VERSION "1.28.2") set(gRPC_CPP_SOVERSION "1") -set(gRPC_CSHARP_VERSION "2.28.1") +set(gRPC_CSHARP_VERSION "2.28.2") set(gRPC_CSHARP_SOVERSION "2") set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}") set(PACKAGE_TARNAME "${PACKAGE_NAME}-${PACKAGE_VERSION}") diff --git a/Makefile b/Makefile index 19058aa..11ce476 100644 --- a/Makefile +++ b/Makefile @@ -470,8 +470,8 @@ Q = @ endif CORE_VERSION = 9.0.0 -CPP_VERSION = 1.28.1 -CSHARP_VERSION = 2.28.1 +CPP_VERSION = 1.28.2 +CSHARP_VERSION = 2.28.2 CPPFLAGS_NO_ARCH += $(addprefix -I, $(INCLUDES)) $(addprefix -D, $(DEFINES)) CPPFLAGS += $(CPPFLAGS_NO_ARCH) $(ARCH_FLAGS) diff --git a/build.yaml b/build.yaml index 0f5c953..a9abed7 100644 --- a/build.yaml +++ b/build.yaml @@ -15,7 +15,7 @@ settings: core_version: 9.0.0 csharp_major_version: 2 g_stands_for: galactic - version: 1.28.1 + version: 1.28.2 filegroups: - name: alts_tsi headers: diff --git a/gRPC-C++.podspec b/gRPC-C++.podspec index 63ba335..824b3ba 100644 --- a/gRPC-C++.podspec +++ b/gRPC-C++.podspec @@ -22,7 +22,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-C++' # TODO (mxyan): use version that match gRPC version when pod is stabilized - version = '1.28.1' + version = '1.28.2' s.version = version s.summary = 'gRPC C++ library' s.homepage = 'https://grpc.io' diff --git a/gRPC-Core.podspec b/gRPC-Core.podspec index 5fc1911..db4a7f9 100644 --- a/gRPC-Core.podspec +++ b/gRPC-Core.podspec @@ -21,7 +21,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-Core' - version = '1.28.1' + version = '1.28.2' s.version = version s.summary = 'Core cross-platform gRPC library, written in C' s.homepage = 'https://grpc.io' diff --git a/gRPC-ProtoRPC.podspec b/gRPC-ProtoRPC.podspec index dd1c4a4..06d22e3 100644 --- a/gRPC-ProtoRPC.podspec +++ b/gRPC-ProtoRPC.podspec @@ -21,7 +21,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-ProtoRPC' - version = '1.28.1' + version = '1.28.2' s.version = version s.summary = 'RPC library for Protocol Buffers, based on gRPC' s.homepage = 'https://grpc.io' diff --git a/gRPC-RxLibrary.podspec b/gRPC-RxLibrary.podspec index 03e136c..b36e2d7 100644 --- a/gRPC-RxLibrary.podspec +++ b/gRPC-RxLibrary.podspec @@ -21,7 +21,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-RxLibrary' - version = '1.28.1' + version = '1.28.2' s.version = version s.summary = 'Reactive Extensions library for iOS/OSX.' s.homepage = 'https://grpc.io' diff --git a/gRPC.podspec b/gRPC.podspec index 3487894..c9f3aeb 100644 --- a/gRPC.podspec +++ b/gRPC.podspec @@ -20,7 +20,7 @@ Pod::Spec.new do |s| s.name = 'gRPC' - version = '1.28.1' + version = '1.28.2' s.version = version s.summary = 'gRPC client library for iOS/OSX' s.homepage = 'https://grpc.io' diff --git a/package.xml b/package.xml index 0b5935d..7c28b6a 100644 --- a/package.xml +++ b/package.xml @@ -13,8 +13,8 @@ 2019-09-24 - 1.28.1 - 1.28.1 + 1.28.2 + 1.28.2 stable @@ -22,7 +22,7 @@ Apache 2.0 -- gRPC Core 1.28.1 update +- gRPC Core 1.28.2 update diff --git a/src/cpp/common/version_cc.cc b/src/cpp/common/version_cc.cc index 44ea226..4285a58 100644 --- a/src/cpp/common/version_cc.cc +++ b/src/cpp/common/version_cc.cc @@ -22,5 +22,5 @@ #include namespace grpc { -grpc::string Version() { return "1.28.1"; } +grpc::string Version() { return "1.28.2"; } } // namespace grpc diff --git a/src/csharp/Grpc.Core.Api/VersionInfo.cs b/src/csharp/Grpc.Core.Api/VersionInfo.cs index 2c34b9e..550fb0d 100644 --- a/src/csharp/Grpc.Core.Api/VersionInfo.cs +++ b/src/csharp/Grpc.Core.Api/VersionInfo.cs @@ -33,11 +33,11 @@ namespace Grpc.Core /// /// Current AssemblyFileVersion of gRPC C# assemblies /// - public const string CurrentAssemblyFileVersion = "2.28.1.0"; + public const string CurrentAssemblyFileVersion = "2.28.2.0"; /// /// Current version of gRPC C# /// - public const string CurrentVersion = "2.28.1"; + public const string CurrentVersion = "2.28.2"; } } diff --git a/src/csharp/build/dependencies.props b/src/csharp/build/dependencies.props index da53b77..736cb15 100644 --- a/src/csharp/build/dependencies.props +++ b/src/csharp/build/dependencies.props @@ -1,7 +1,7 @@ - 2.28.1 + 2.28.2 3.11.2 diff --git a/src/csharp/build_unitypackage.bat b/src/csharp/build_unitypackage.bat index a434386..192a249 100644 --- a/src/csharp/build_unitypackage.bat +++ b/src/csharp/build_unitypackage.bat @@ -13,7 +13,7 @@ @rem limitations under the License. @rem Current package versions -set VERSION=2.28.1 +set VERSION=2.28.2 @rem Adjust the location of nuget.exe set NUGET=C:\nuget\nuget.exe diff --git a/src/objective-c/!ProtoCompiler-gRPCCppPlugin.podspec b/src/objective-c/!ProtoCompiler-gRPCCppPlugin.podspec index 8a88ba2..5250ecf 100644 --- a/src/objective-c/!ProtoCompiler-gRPCCppPlugin.podspec +++ b/src/objective-c/!ProtoCompiler-gRPCCppPlugin.podspec @@ -42,7 +42,7 @@ Pod::Spec.new do |s| # exclamation mark ensures that other "regular" pods will be able to find it as it'll be installed # before them. s.name = '!ProtoCompiler-gRPCCppPlugin' - v = '1.28.1' + v = '1.28.2' s.version = v s.summary = 'The gRPC ProtoC plugin generates C++ files from .proto services.' s.description = <<-DESC diff --git a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec index f6600b5..c6674b4 100644 --- a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec +++ b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec @@ -42,7 +42,7 @@ Pod::Spec.new do |s| # exclamation mark ensures that other "regular" pods will be able to find it as it'll be installed # before them. s.name = '!ProtoCompiler-gRPCPlugin' - v = '1.28.1' + v = '1.28.2' s.version = v s.summary = 'The gRPC ProtoC plugin generates Objective-C files from .proto services.' s.description = <<-DESC diff --git a/src/objective-c/GRPCClient/version.h b/src/objective-c/GRPCClient/version.h index c41c5e3..2c9a54b 100644 --- a/src/objective-c/GRPCClient/version.h +++ b/src/objective-c/GRPCClient/version.h @@ -22,4 +22,4 @@ // instead. This file can be regenerated from the template by running // `tools/buildgen/generate_projects.sh`. -#define GRPC_OBJC_VERSION_STRING @"1.28.1" +#define GRPC_OBJC_VERSION_STRING @"1.28.2" diff --git a/src/objective-c/tests/version.h b/src/objective-c/tests/version.h index b037458..21f78af 100644 --- a/src/objective-c/tests/version.h +++ b/src/objective-c/tests/version.h @@ -22,5 +22,5 @@ // instead. This file can be regenerated from the template by running // `tools/buildgen/generate_projects.sh`. -#define GRPC_OBJC_VERSION_STRING @"1.28.1" +#define GRPC_OBJC_VERSION_STRING @"1.28.2" #define GRPC_C_VERSION_STRING @"9.0.0" diff --git a/src/php/composer.json b/src/php/composer.json index 78ff79b..dcdd719 100644 --- a/src/php/composer.json +++ b/src/php/composer.json @@ -2,7 +2,7 @@ "name": "grpc/grpc-dev", "description": "gRPC library for PHP - for Development use only", "license": "Apache-2.0", - "version": "1.28.1", + "version": "1.28.2", "require": { "php": ">=5.5.0", "google/protobuf": "^v3.3.0" diff --git a/src/php/ext/grpc/version.h b/src/php/ext/grpc/version.h index 1ddc513..6213ece 100644 --- a/src/php/ext/grpc/version.h +++ b/src/php/ext/grpc/version.h @@ -20,6 +20,6 @@ #ifndef VERSION_H #define VERSION_H -#define PHP_GRPC_VERSION "1.28.1" +#define PHP_GRPC_VERSION "1.28.2" #endif /* VERSION_H */ diff --git a/src/python/grpcio/grpc/_grpcio_metadata.py b/src/python/grpcio/grpc/_grpcio_metadata.py index 83bfa2f..41294e2 100644 --- a/src/python/grpcio/grpc/_grpcio_metadata.py +++ b/src/python/grpcio/grpc/_grpcio_metadata.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc/_grpcio_metadata.py.template`!!! -__version__ = """1.28.1""" +__version__ = """1.28.2""" diff --git a/src/python/grpcio/grpc_version.py b/src/python/grpcio/grpc_version.py index 491b5c5..1348953 100644 --- a/src/python/grpcio/grpc_version.py +++ b/src/python/grpcio/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_channelz/grpc_version.py b/src/python/grpcio_channelz/grpc_version.py index d72530b..8c46676 100644 --- a/src/python/grpcio_channelz/grpc_version.py +++ b/src/python/grpcio_channelz/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_channelz/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_health_checking/grpc_version.py b/src/python/grpcio_health_checking/grpc_version.py index 19006b9..add2c16 100644 --- a/src/python/grpcio_health_checking/grpc_version.py +++ b/src/python/grpcio_health_checking/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_health_checking/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_reflection/grpc_version.py b/src/python/grpcio_reflection/grpc_version.py index aff4f53..e92f2c7 100644 --- a/src/python/grpcio_reflection/grpc_version.py +++ b/src/python/grpcio_reflection/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_reflection/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_status/grpc_version.py b/src/python/grpcio_status/grpc_version.py index 115020e..14ed74a 100644 --- a/src/python/grpcio_status/grpc_version.py +++ b/src/python/grpcio_status/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_status/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_testing/grpc_version.py b/src/python/grpcio_testing/grpc_version.py index 7601663..e895687 100644 --- a/src/python/grpcio_testing/grpc_version.py +++ b/src/python/grpcio_testing/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_testing/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/python/grpcio_tests/grpc_version.py b/src/python/grpcio_tests/grpc_version.py index d15c5b6..ccc3224 100644 --- a/src/python/grpcio_tests/grpc_version.py +++ b/src/python/grpcio_tests/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/src/ruby/lib/grpc/version.rb b/src/ruby/lib/grpc/version.rb index f92a8ee..69247fc 100644 --- a/src/ruby/lib/grpc/version.rb +++ b/src/ruby/lib/grpc/version.rb @@ -14,5 +14,5 @@ # GRPC contains the General RPC module. module GRPC - VERSION = '1.28.1' + VERSION = '1.28.2' end diff --git a/src/ruby/tools/version.rb b/src/ruby/tools/version.rb index 0100d1f..a8fa6e2 100644 --- a/src/ruby/tools/version.rb +++ b/src/ruby/tools/version.rb @@ -14,6 +14,6 @@ module GRPC module Tools - VERSION = '1.28.1' + VERSION = '1.28.2' end end diff --git a/tools/distrib/pylint_code.sh b/tools/distrib/pylint_code.sh index 25189db..5e6b818 100755 --- a/tools/distrib/pylint_code.sh +++ b/tools/distrib/pylint_code.sh @@ -40,7 +40,7 @@ python3 -m virtualenv $VIRTUALENV -p $(which python3) PYTHON=$VIRTUALENV/bin/python $PYTHON -m pip install --upgrade pip==19.3.1 -$PYTHON -m pip install --upgrade pylint==2.2.2 +$PYTHON -m pip install --upgrade astroid==2.3.3 pylint==2.2.2 EXIT=0 for dir in "${DIRS[@]}"; do diff --git a/tools/distrib/python/grpcio_tools/grpc_version.py b/tools/distrib/python/grpcio_tools/grpc_version.py index 370bfb4..a41da24 100644 --- a/tools/distrib/python/grpcio_tools/grpc_version.py +++ b/tools/distrib/python/grpcio_tools/grpc_version.py @@ -14,4 +14,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!! -VERSION = '1.28.1' +VERSION = '1.28.2' diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index 82bb2a7..75316ff 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.28.1 +PROJECT_NUMBER = 1.28.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 5a08109..d23ae56 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.28.1 +PROJECT_NUMBER = 1.28.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.objc b/tools/doxygen/Doxyfile.objc index 6804a1a..7131ee7 100644 --- a/tools/doxygen/Doxyfile.objc +++ b/tools/doxygen/Doxyfile.objc @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Objective-C" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.28.1 +PROJECT_NUMBER = 1.28.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.objc.internal b/tools/doxygen/Doxyfile.objc.internal index 9eae6eb..141083a 100644 --- a/tools/doxygen/Doxyfile.objc.internal +++ b/tools/doxygen/Doxyfile.objc.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Objective-C" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.28.1 +PROJECT_NUMBER = 1.28.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/internal_ci/linux/grpc_xds_bazel_python_test_in_docker.sh b/tools/internal_ci/linux/grpc_xds_bazel_python_test_in_docker.sh index fa6f1bd..d97d7fb 100755 --- a/tools/internal_ci/linux/grpc_xds_bazel_python_test_in_docker.sh +++ b/tools/internal_ci/linux/grpc_xds_bazel_python_test_in_docker.sh @@ -52,6 +52,8 @@ GRPC_VERBOSITY=debug GRPC_TRACE=xds_client,xds_resolver,cds_lb,xds_lb "$PYTHON" tools/run_tests/run_xds_tests.py \ --test_case=all \ --project_id=grpc-testing \ + --source_image=projects/grpc-testing/global/images/xds-test-server \ + --path_to_server_binary=/java_server/grpc-java/interop-testing/build/install/grpc-interop-testing/bin/xds-test-server \ --gcp_suffix=$(date '+%s') \ --verbose \ --client_cmd='bazel run //src/python/grpcio_tests/tests_py3_only/interop:xds_interop_client -- --server=xds-experimental:///{server_uri} --stats_port={stats_port} --qps={qps}' diff --git a/tools/internal_ci/linux/grpc_xds_bazel_test_in_docker.sh b/tools/internal_ci/linux/grpc_xds_bazel_test_in_docker.sh index e6a8377..d0a1338 100755 --- a/tools/internal_ci/linux/grpc_xds_bazel_test_in_docker.sh +++ b/tools/internal_ci/linux/grpc_xds_bazel_test_in_docker.sh @@ -52,6 +52,8 @@ GRPC_VERBOSITY=debug GRPC_TRACE=xds_client,xds_resolver,cds_lb,xds_lb "$PYTHON" tools/run_tests/run_xds_tests.py \ --test_case=all \ --project_id=grpc-testing \ + --source_image=projects/grpc-testing/global/images/xds-test-server \ + --path_to_server_binary=/java_server/grpc-java/interop-testing/build/install/grpc-interop-testing/bin/xds-test-server \ --gcp_suffix=$(date '+%s') \ --verbose \ --client_cmd='bazel-bin/test/cpp/interop/xds_interop_client --server=xds-experimental:///{server_uri} --stats_port={stats_port} --qps={qps}' diff --git a/tools/run_tests/run_xds_tests.py b/tools/run_tests/run_xds_tests.py index 3f9fab3..8f1d9b1 100755 --- a/tools/run_tests/run_xds_tests.py +++ b/tools/run_tests/run_xds_tests.py @@ -58,6 +58,8 @@ _TEST_CASES = [ def parse_test_cases(arg): if arg == 'all': return _TEST_CASES + if arg == '': + return [] test_cases = arg.split(',') if all([test_case in _TEST_CASES for test_case in test_cases]): return test_cases @@ -87,11 +89,16 @@ argp.add_argument( help='Comma-separated list of test cases to run, or \'all\' to run every ' 'test. Available tests: %s' % ' '.join(_TEST_CASES)) argp.add_argument( + '--bootstrap_file', + default='', + help='File to reference via GRPC_XDS_BOOTSTRAP. Disables built-in ' + 'bootstrap generation') +argp.add_argument( '--client_cmd', default=None, - help='Command to launch xDS test client. This script will fill in ' - '{server_uri}, {stats_port} and {qps} parameters using str.format(), and ' - 'generate the GRPC_XDS_BOOTSTRAP file.') + help='Command to launch xDS test client. {server_uri}, {stats_port} and ' + '{qps} references will be replaced using str.format(). GRPC_XDS_BOOTSTRAP ' + 'will be set for the command') argp.add_argument('--zone', default='us-central1-a') argp.add_argument('--secondary_zone', default='us-west1-b', @@ -104,6 +111,13 @@ argp.add_argument( help='Time limit for waiting for created backend services to report ' 'healthy when launching or updated GCP resources') argp.add_argument( + '--use_existing_gcp_resources', + default=False, + action='store_true', + help= + 'If set, find and use already created GCP resources instead of creating new' + ' ones.') +argp.add_argument( '--keep_gcp_resources', default=False, action='store_true', @@ -117,20 +131,22 @@ argp.add_argument( help= 'If provided, uses this file instead of retrieving via the GCP discovery ' 'API') +argp.add_argument( + '--alpha_compute_discovery_document', + default=None, + type=str, + help='If provided, uses this file instead of retrieving via the alpha GCP ' + 'discovery API') argp.add_argument('--network', default='global/networks/default', help='GCP network to use') argp.add_argument('--service_port_range', - default='80', + default='8080:8110', type=parse_port_range, help='Listening port for created gRPC backends. Specified as ' 'either a single int or as a range in the format min:max, in ' 'which case an available port p will be chosen s.t. min <= p ' '<= max') -argp.add_argument('--forwarding_rule_ip_prefix', - default='172.16.0.', - help='If set, an available IP with this prefix followed by ' - '0-255 will be used for the generated forwarding rule.') argp.add_argument( '--stats_port', default=8079, @@ -142,6 +158,11 @@ argp.add_argument('--xds_server', argp.add_argument('--source_image', default='projects/debian-cloud/global/images/family/debian-9', help='Source image for VMs created during the test') +argp.add_argument('--path_to_server_binary', + default=None, + type=str, + help='If set, the server binary must already be pre-built on ' + 'the specified source image') argp.add_argument('--machine_type', default='e2-standard-2', help='Machine type for VMs created during the test') @@ -152,18 +173,20 @@ argp.add_argument( help='Number of VMs to create per instance group. Certain test cases (e.g., ' 'round_robin) may not give meaningful results if this is set to a value ' 'less than 2.') -argp.add_argument( - '--tolerate_gcp_errors', - default=False, - action='store_true', - help= - 'Continue with test even when an error occurs during setup. Intended for ' - 'manual testing, where attempts to recreate any GCP resources already ' - 'existing will result in an error') argp.add_argument('--verbose', help='verbose log output', default=False, action='store_true') +# TODO(ericgribkoff) Remove this param once the sponge-formatted log files are +# visible in all test environments. +argp.add_argument('--log_client_output', + help='Log captured client output', + default=False, + action='store_true') +argp.add_argument('--only_stable_gcp_apis', + help='Do not use alpha compute APIs', + default=False, + action='store_true') args = argp.parse_args() if args.verbose: @@ -176,6 +199,7 @@ _INSTANCE_GROUP_SIZE = args.instance_group_size _NUM_TEST_RPCS = 10 * args.qps _WAIT_FOR_STATS_SEC = 180 _WAIT_FOR_URL_MAP_PATCH_SEC = 300 +_GCP_API_RETRIES = 5 _BOOTSTRAP_TEMPLATE = """ {{ "node": {{ @@ -233,7 +257,7 @@ def get_client_stats(num_rpcs, timeout_sec): logger.debug('Invoked GetClientStats RPC: %s', response) return response except grpc.RpcError as rpc_error: - raise Exception('GetClientStats RPC failed') + logger.exception('GetClientStats RPC failed') def _verify_rpcs_to_given_backends(backends, timeout_sec, num_rpcs, @@ -322,9 +346,9 @@ def test_change_backend_service(gcp, original_backend_service, instance_group, _WAIT_FOR_STATS_SEC) try: patch_url_map_backend_service(gcp, alternate_backend_service) - stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC) - if stats.num_failures > 0: - raise Exception('Unexpected failure: %s', stats) + # TODO(ericgribkoff) Verify no RPCs fail during backend switch. + # Currently TD may briefly send an update with no localities if adding + # the MIG to the backend service above races with the URL map patch. wait_until_all_rpcs_go_to_given_backends(alternate_backend_instances, _WAIT_FOR_URL_MAP_PATCH_SEC) finally: @@ -473,7 +497,27 @@ def test_secondary_locality_gets_requests_on_primary_failure( patch_backend_instances(gcp, backend_service, [primary_instance_group]) -def create_instance_template(gcp, name, network, source_image, machine_type): +def get_startup_script(path_to_server_binary, service_port): + if path_to_server_binary: + return "nohup %s --port=%d 1>/dev/null &" % (path_to_server_binary, + service_port) + else: + return """#!/bin/bash +sudo apt update +sudo apt install -y git default-jdk +mkdir java_server +pushd java_server +git clone https://github.com/grpc/grpc-java.git +pushd grpc-java +pushd interop-testing +../gradlew installDist -x test -PskipCodegen=true -PskipAndroid=true + +nohup build/install/grpc-interop-testing/bin/xds-test-server \ + --port=%d 1>/dev/null &""" % service_port + + +def create_instance_template(gcp, name, network, source_image, machine_type, + startup_script): config = { 'name': name, 'properties': { @@ -499,29 +543,16 @@ def create_instance_template(gcp, name, network, source_image, machine_type): }], 'metadata': { 'items': [{ - 'key': - 'startup-script', - 'value': - """#!/bin/bash -sudo apt update -sudo apt install -y git default-jdk -mkdir java_server -pushd java_server -git clone https://github.com/grpc/grpc-java.git -pushd grpc-java -pushd interop-testing -../gradlew installDist -x test -PskipCodegen=true -PskipAndroid=true - -nohup build/install/grpc-interop-testing/bin/xds-test-server --port=%d 1>/dev/null &""" - % gcp.service_port + 'key': 'startup-script', + 'value': startup_script }] } } } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.instanceTemplates().insert(project=gcp.project, - body=config).execute() + result = gcp.compute.instanceTemplates().insert( + project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) gcp.instance_template = GcpResource(config['name'], result['targetLink']) @@ -538,13 +569,14 @@ def add_instance_group(gcp, zone, name, size): } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.instanceGroupManagers().insert(project=gcp.project, - zone=zone, - body=config).execute() + result = gcp.compute.instanceGroupManagers().insert( + project=gcp.project, zone=zone, + body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_zone_operation(gcp, zone, result['name']) result = gcp.compute.instanceGroupManagers().get( project=gcp.project, zone=zone, - instanceGroupManager=config['name']).execute() + instanceGroupManager=config['name']).execute( + num_retries=_GCP_API_RETRIES) instance_group = InstanceGroup(config['name'], result['instanceGroup'], zone) gcp.instance_groups.append(instance_group) @@ -552,16 +584,27 @@ def add_instance_group(gcp, zone, name, size): def create_health_check(gcp, name): - config = { - 'name': name, - 'type': 'TCP', - 'tcpHealthCheck': { - 'portName': 'grpc' + if gcp.alpha_compute: + config = { + 'name': name, + 'type': 'GRPC', + 'grpcHealthCheck': { + 'portSpecification': 'USE_SERVING_PORT' + } } - } + compute_to_use = gcp.alpha_compute + else: + config = { + 'name': name, + 'type': 'TCP', + 'tcpHealthCheck': { + 'portName': 'grpc' + } + } + compute_to_use = gcp.compute logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.healthChecks().insert(project=gcp.project, - body=config).execute() + result = compute_to_use.healthChecks().insert( + project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) gcp.health_check = GcpResource(config['name'], result['targetLink']) @@ -577,24 +620,30 @@ def create_health_check_firewall_rule(gcp, name): 'targetTags': ['allow-health-checks'], } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.firewalls().insert(project=gcp.project, - body=config).execute() + result = gcp.compute.firewalls().insert( + project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) gcp.health_check_firewall_rule = GcpResource(config['name'], result['targetLink']) def add_backend_service(gcp, name): + if gcp.alpha_compute: + protocol = 'GRPC' + compute_to_use = gcp.alpha_compute + else: + protocol = 'HTTP2' + compute_to_use = gcp.compute config = { 'name': name, 'loadBalancingScheme': 'INTERNAL_SELF_MANAGED', 'healthChecks': [gcp.health_check.url], 'portName': 'grpc', - 'protocol': 'HTTP2' + 'protocol': protocol } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.backendServices().insert(project=gcp.project, - body=config).execute() + result = compute_to_use.backendServices().insert( + project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) backend_service = GcpResource(config['name'], result['targetLink']) gcp.backend_services.append(backend_service) @@ -615,56 +664,164 @@ def create_url_map(gcp, name, backend_service, host_name): }] } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.urlMaps().insert(project=gcp.project, - body=config).execute() + result = gcp.compute.urlMaps().insert( + project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) gcp.url_map = GcpResource(config['name'], result['targetLink']) -def create_target_http_proxy(gcp, name): +def patch_url_map_host_rule_with_port(gcp, name, backend_service, host_name): config = { - 'name': name, - 'url_map': gcp.url_map.url, + 'hostRules': [{ + 'hosts': ['%s:%d' % (host_name, gcp.service_port)], + 'pathMatcher': _PATH_MATCHER_NAME + }] } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.targetHttpProxies().insert(project=gcp.project, - body=config).execute() + result = gcp.compute.urlMaps().patch( + project=gcp.project, urlMap=name, + body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) - gcp.target_http_proxy = GcpResource(config['name'], result['targetLink']) -def create_global_forwarding_rule(gcp, name, ip, port): - config = { - 'name': name, - 'loadBalancingScheme': 'INTERNAL_SELF_MANAGED', - 'portRange': str(port), - 'IPAddress': ip, - 'network': args.network, - 'target': gcp.target_http_proxy.url, - } - logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.globalForwardingRules().insert(project=gcp.project, - body=config).execute() +def create_target_proxy(gcp, name): + if gcp.alpha_compute: + config = { + 'name': name, + 'url_map': gcp.url_map.url, + 'validate_for_proxyless': True, + } + logger.debug('Sending GCP request with body=%s', config) + result = gcp.alpha_compute.targetGrpcProxies().insert( + project=gcp.project, + body=config).execute(num_retries=_GCP_API_RETRIES) + else: + config = { + 'name': name, + 'url_map': gcp.url_map.url, + } + logger.debug('Sending GCP request with body=%s', config) + result = gcp.compute.targetHttpProxies().insert( + project=gcp.project, + body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) - gcp.global_forwarding_rule = GcpResource(config['name'], - result['targetLink']) + gcp.target_proxy = GcpResource(config['name'], result['targetLink']) + + +def create_global_forwarding_rule(gcp, name, potential_ports): + if gcp.alpha_compute: + compute_to_use = gcp.alpha_compute + else: + compute_to_use = gcp.compute + for port in potential_ports: + try: + config = { + 'name': name, + 'loadBalancingScheme': 'INTERNAL_SELF_MANAGED', + 'portRange': str(port), + 'IPAddress': '0.0.0.0', + 'network': args.network, + 'target': gcp.target_proxy.url, + } + logger.debug('Sending GCP request with body=%s', config) + result = compute_to_use.globalForwardingRules().insert( + project=gcp.project, + body=config).execute(num_retries=_GCP_API_RETRIES) + wait_for_global_operation(gcp, result['name']) + gcp.global_forwarding_rule = GcpResource(config['name'], + result['targetLink']) + gcp.service_port = port + return + except googleapiclient.errors.HttpError as http_error: + logger.warning( + 'Got error %s when attempting to create forwarding rule to ' + '0.0.0.0:%d. Retrying with another port.' % (http_error, port)) + + +def get_health_check(gcp, health_check_name): + result = gcp.compute.healthChecks().get( + project=gcp.project, healthCheck=health_check_name).execute() + gcp.health_check = GcpResource(health_check_name, result['selfLink']) + + +def get_health_check_firewall_rule(gcp, firewall_name): + result = gcp.compute.firewalls().get(project=gcp.project, + firewall=firewall_name).execute() + gcp.health_check_firewall_rule = GcpResource(firewall_name, + result['selfLink']) + + +def get_backend_service(gcp, backend_service_name): + result = gcp.compute.backendServices().get( + project=gcp.project, backendService=backend_service_name).execute() + backend_service = GcpResource(backend_service_name, result['selfLink']) + gcp.backend_services.append(backend_service) + return backend_service + + +def get_url_map(gcp, url_map_name): + result = gcp.compute.urlMaps().get(project=gcp.project, + urlMap=url_map_name).execute() + gcp.url_map = GcpResource(url_map_name, result['selfLink']) + + +def get_target_proxy(gcp, target_proxy_name): + if gcp.alpha_compute: + result = gcp.alpha_compute.targetGrpcProxies().get( + project=gcp.project, targetGrpcProxy=target_proxy_name).execute() + else: + result = gcp.compute.targetHttpProxies().get( + project=gcp.project, targetHttpProxy=target_proxy_name).execute() + gcp.target_proxy = GcpResource(target_proxy_name, result['selfLink']) + + +def get_global_forwarding_rule(gcp, forwarding_rule_name): + result = gcp.compute.globalForwardingRules().get( + project=gcp.project, forwardingRule=forwarding_rule_name).execute() + gcp.global_forwarding_rule = GcpResource(forwarding_rule_name, + result['selfLink']) + + +def get_instance_template(gcp, template_name): + result = gcp.compute.instanceTemplates().get( + project=gcp.project, instanceTemplate=template_name).execute() + gcp.instance_template = GcpResource(template_name, result['selfLink']) + + +def get_instance_group(gcp, zone, instance_group_name): + result = gcp.compute.instanceGroups().get( + project=gcp.project, zone=zone, + instanceGroup=instance_group_name).execute() + gcp.service_port = result['namedPorts'][0]['port'] + instance_group = InstanceGroup(instance_group_name, result['selfLink'], + zone) + gcp.instance_groups.append(instance_group) + return instance_group def delete_global_forwarding_rule(gcp): try: result = gcp.compute.globalForwardingRules().delete( project=gcp.project, - forwardingRule=gcp.global_forwarding_rule.name).execute() + forwardingRule=gcp.global_forwarding_rule.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) -def delete_target_http_proxy(gcp): +def delete_target_proxy(gcp): try: - result = gcp.compute.targetHttpProxies().delete( - project=gcp.project, - targetHttpProxy=gcp.target_http_proxy.name).execute() + if gcp.alpha_compute: + result = gcp.alpha_compute.targetGrpcProxies().delete( + project=gcp.project, + targetGrpcProxy=gcp.target_proxy.name).execute( + num_retries=_GCP_API_RETRIES) + else: + result = gcp.compute.targetHttpProxies().delete( + project=gcp.project, + targetHttpProxy=gcp.target_proxy.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -673,7 +830,8 @@ def delete_target_http_proxy(gcp): def delete_url_map(gcp): try: result = gcp.compute.urlMaps().delete( - project=gcp.project, urlMap=gcp.url_map.name).execute() + project=gcp.project, + urlMap=gcp.url_map.name).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -684,7 +842,8 @@ def delete_backend_services(gcp): try: result = gcp.compute.backendServices().delete( project=gcp.project, - backendService=backend_service.name).execute() + backendService=backend_service.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -694,7 +853,8 @@ def delete_firewall(gcp): try: result = gcp.compute.firewalls().delete( project=gcp.project, - firewall=gcp.health_check_firewall_rule.name).execute() + firewall=gcp.health_check_firewall_rule.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -703,7 +863,8 @@ def delete_firewall(gcp): def delete_health_check(gcp): try: result = gcp.compute.healthChecks().delete( - project=gcp.project, healthCheck=gcp.health_check.name).execute() + project=gcp.project, healthCheck=gcp.health_check.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -715,7 +876,8 @@ def delete_instance_groups(gcp): result = gcp.compute.instanceGroupManagers().delete( project=gcp.project, zone=instance_group.zone, - instanceGroupManager=instance_group.name).execute() + instanceGroupManager=instance_group.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_zone_operation(gcp, instance_group.zone, result['name'], @@ -728,7 +890,8 @@ def delete_instance_template(gcp): try: result = gcp.compute.instanceTemplates().delete( project=gcp.project, - instanceTemplate=gcp.instance_template.name).execute() + instanceTemplate=gcp.instance_template.name).execute( + num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) except googleapiclient.errors.HttpError as http_error: logger.info('Delete failed: %s', http_error) @@ -738,6 +901,10 @@ def patch_backend_instances(gcp, backend_service, instance_groups, balancing_mode='UTILIZATION'): + if gcp.alpha_compute: + compute_to_use = gcp.alpha_compute + else: + compute_to_use = gcp.compute config = { 'backends': [{ 'group': instance_group.url, @@ -746,10 +913,12 @@ def patch_backend_instances(gcp, } for instance_group in instance_groups], } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.backendServices().patch( + result = compute_to_use.backendServices().patch( project=gcp.project, backendService=backend_service.name, - body=config).execute() - wait_for_global_operation(gcp, result['name']) + body=config).execute(num_retries=_GCP_API_RETRIES) + wait_for_global_operation(gcp, + result['name'], + timeout_sec=_WAIT_FOR_BACKEND_SEC) def resize_instance_group(gcp, @@ -760,7 +929,7 @@ def resize_instance_group(gcp, project=gcp.project, zone=instance_group.zone, instanceGroupManager=instance_group.name, - size=new_size).execute() + size=new_size).execute(num_retries=_GCP_API_RETRIES) wait_for_zone_operation(gcp, instance_group.zone, result['name'], @@ -772,7 +941,7 @@ def resize_instance_group(gcp, break if time.time() - start_time > timeout_sec: raise Exception('Failed to resize primary instance group') - time.sleep(1) + time.sleep(2) def patch_url_map_backend_service(gcp, backend_service): @@ -785,9 +954,9 @@ def patch_url_map_backend_service(gcp, backend_service): }] } logger.debug('Sending GCP request with body=%s', config) - result = gcp.compute.urlMaps().patch(project=gcp.project, - urlMap=gcp.url_map.name, - body=config).execute() + result = gcp.compute.urlMaps().patch( + project=gcp.project, urlMap=gcp.url_map.name, + body=config).execute(num_retries=_GCP_API_RETRIES) wait_for_global_operation(gcp, result['name']) @@ -797,12 +966,13 @@ def wait_for_global_operation(gcp, start_time = time.time() while time.time() - start_time <= timeout_sec: result = gcp.compute.globalOperations().get( - project=gcp.project, operation=operation).execute() + project=gcp.project, + operation=operation).execute(num_retries=_GCP_API_RETRIES) if result['status'] == 'DONE': if 'error' in result: raise Exception(result['error']) return - time.sleep(1) + time.sleep(2) raise Exception('Operation %s did not complete within %d', operation, timeout_sec) @@ -814,12 +984,13 @@ def wait_for_zone_operation(gcp, start_time = time.time() while time.time() - start_time <= timeout_sec: result = gcp.compute.zoneOperations().get( - project=gcp.project, zone=zone, operation=operation).execute() + project=gcp.project, zone=zone, + operation=operation).execute(num_retries=_GCP_API_RETRIES) if result['status'] == 'DONE': if 'error' in result: raise Exception(result['error']) return - time.sleep(1) + time.sleep(2) raise Exception('Operation %s did not complete within %d', operation, timeout_sec) @@ -834,7 +1005,7 @@ def wait_for_healthy_backends(gcp, result = gcp.compute.backendServices().getHealth( project=gcp.project, backendService=backend_service.name, - body=config).execute() + body=config).execute(num_retries=_GCP_API_RETRIES) if 'healthStatus' in result: healthy = True for instance in result['healthStatus']: @@ -843,7 +1014,7 @@ def wait_for_healthy_backends(gcp, break if healthy: return - time.sleep(1) + time.sleep(2) raise Exception('Not all backends became healthy within %d seconds: %s' % (timeout_sec, result)) @@ -856,7 +1027,7 @@ def get_instance_names(gcp, instance_group): instanceGroup=instance_group.name, body={ 'instanceState': 'ALL' - }).execute() + }).execute(num_retries=_GCP_API_RETRIES) if 'items' not in result: return [] for item in result['items']: @@ -869,26 +1040,11 @@ def get_instance_names(gcp, instance_group): return instance_names -def start_xds_client(cmd): - bootstrap_path = None - with tempfile.NamedTemporaryFile(delete=False) as bootstrap_file: - bootstrap_file.write( - _BOOTSTRAP_TEMPLATE.format( - node_id=socket.gethostname()).encode('utf-8')) - bootstrap_path = bootstrap_file.name - - client_process = subprocess.Popen(shlex.split(cmd), - env=dict( - os.environ, - GRPC_XDS_BOOTSTRAP=bootstrap_path)) - return client_process - - def clean_up(gcp): if gcp.global_forwarding_rule: delete_global_forwarding_rule(gcp) - if gcp.target_http_proxy: - delete_target_http_proxy(gcp) + if gcp.target_proxy: + delete_target_proxy(gcp) if gcp.url_map: delete_url_map(gcp) delete_backend_services(gcp) @@ -918,76 +1074,97 @@ class GcpResource(object): class GcpState(object): - def __init__(self, compute, project): + def __init__(self, compute, alpha_compute, project): self.compute = compute + self.alpha_compute = alpha_compute self.project = project self.health_check = None self.health_check_firewall_rule = None self.backend_services = [] self.url_map = None - self.target_http_proxy = None + self.target_proxy = None self.global_forwarding_rule = None self.service_port = None self.instance_template = None self.instance_groups = [] +alpha_compute = None if args.compute_discovery_document: with open(args.compute_discovery_document, 'r') as discovery_doc: compute = googleapiclient.discovery.build_from_document( discovery_doc.read()) + if not args.only_stable_gcp_apis and args.alpha_compute_discovery_document: + with open(args.alpha_compute_discovery_document, 'r') as discovery_doc: + alpha_compute = googleapiclient.discovery.build_from_document( + discovery_doc.read()) else: compute = googleapiclient.discovery.build('compute', 'v1') + if not args.only_stable_gcp_apis: + alpha_compute = googleapiclient.discovery.build('compute', 'alpha') try: - gcp = GcpState(compute, args.project_id) + gcp = GcpState(compute, alpha_compute, args.project_id) health_check_name = _BASE_HEALTH_CHECK_NAME + args.gcp_suffix firewall_name = _BASE_FIREWALL_RULE_NAME + args.gcp_suffix backend_service_name = _BASE_BACKEND_SERVICE_NAME + args.gcp_suffix alternate_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-alternate' + args.gcp_suffix url_map_name = _BASE_URL_MAP_NAME + args.gcp_suffix service_host_name = _BASE_SERVICE_HOST + args.gcp_suffix - target_http_proxy_name = _BASE_TARGET_PROXY_NAME + args.gcp_suffix + target_proxy_name = _BASE_TARGET_PROXY_NAME + args.gcp_suffix forwarding_rule_name = _BASE_FORWARDING_RULE_NAME + args.gcp_suffix - template_name = _BASE_TARGET_PROXY_NAME + args.gcp_suffix + template_name = _BASE_TEMPLATE_NAME + args.gcp_suffix instance_group_name = _BASE_INSTANCE_GROUP_NAME + args.gcp_suffix same_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-same-zone' + args.gcp_suffix if _USE_SECONDARY_IG: secondary_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-secondary-zone' + args.gcp_suffix - try: + if args.use_existing_gcp_resources: + logger.info('Reusing existing GCP resources') + get_health_check(gcp, health_check_name) + try: + get_health_check_firewall_rule(gcp, firewall_name) + except googleapiclient.errors.HttpError as http_error: + # Firewall rule may be auto-deleted periodically depending on GCP + # project settings. + logger.exception('Failed to find firewall rule, recreating') + create_health_check_firewall_rule(gcp, firewall_name) + backend_service = get_backend_service(gcp, backend_service_name) + alternate_backend_service = get_backend_service( + gcp, alternate_backend_service_name) + get_url_map(gcp, url_map_name) + get_target_proxy(gcp, target_proxy_name) + get_global_forwarding_rule(gcp, forwarding_rule_name) + get_instance_template(gcp, template_name) + instance_group = get_instance_group(gcp, args.zone, instance_group_name) + same_zone_instance_group = get_instance_group( + gcp, args.zone, same_zone_instance_group_name) + if _USE_SECONDARY_IG: + secondary_zone_instance_group = get_instance_group( + gcp, args.secondary_zone, secondary_zone_instance_group_name) + else: create_health_check(gcp, health_check_name) create_health_check_firewall_rule(gcp, firewall_name) backend_service = add_backend_service(gcp, backend_service_name) alternate_backend_service = add_backend_service( gcp, alternate_backend_service_name) create_url_map(gcp, url_map_name, backend_service, service_host_name) - create_target_http_proxy(gcp, target_http_proxy_name) + create_target_proxy(gcp, target_proxy_name) potential_service_ports = list(args.service_port_range) random.shuffle(potential_service_ports) - if args.forwarding_rule_ip_prefix == '': - potential_ips = ['0.0.0.0'] - else: - potential_ips = [ - args.forwarding_rule_ip_prefix + str(x) for x in range(256) - ] - random.shuffle(potential_ips) - for port in potential_service_ports: - for ip in potential_ips: - try: - create_global_forwarding_rule(gcp, forwarding_rule_name, ip, - port) - gcp.service_port = port - break - except googleapiclient.errors.HttpError as http_error: - logger.warning( - 'Got error %s when attempting to create forwarding rule to ' - '%s:%d. Retrying with another ip:port.' % - (http_error, ip, port)) + create_global_forwarding_rule(gcp, forwarding_rule_name, + potential_service_ports) if not gcp.service_port: raise Exception( 'Failed to find a valid ip:port for the forwarding rule') + if gcp.service_port != _DEFAULT_SERVICE_PORT: + patch_url_map_host_rule_with_port(gcp, url_map_name, + backend_service, + service_host_name) + startup_script = get_startup_script(args.path_to_server_binary, + gcp.service_port) create_instance_template(gcp, template_name, args.network, - args.source_image, args.machine_type) + args.source_image, args.machine_type, + startup_script) instance_group = add_instance_group(gcp, args.zone, instance_group_name, _INSTANCE_GROUP_SIZE) patch_backend_instances(gcp, backend_service, [instance_group]) @@ -997,157 +1174,104 @@ try: secondary_zone_instance_group = add_instance_group( gcp, args.secondary_zone, secondary_zone_instance_group_name, _INSTANCE_GROUP_SIZE) - except googleapiclient.errors.HttpError as http_error: - if args.tolerate_gcp_errors: - logger.warning( - 'Failed to set up backends: %s. Attempting to continue since ' - '--tolerate_gcp_errors=true', http_error) - if not gcp.instance_template: - result = compute.instanceTemplates().get( - project=args.project_id, - instanceTemplate=template_name).execute() - gcp.instance_template = GcpResource(template_name, - result['selfLink']) - if not gcp.backend_services: - result = compute.backendServices().get( - project=args.project_id, - backendService=backend_service_name).execute() - backend_service = GcpResource(backend_service_name, - result['selfLink']) - gcp.backend_services.append(backend_service) - result = compute.backendServices().get( - project=args.project_id, - backendService=alternate_backend_service_name).execute() - alternate_backend_service = GcpResource( - alternate_backend_service_name, result['selfLink']) - gcp.backend_services.append(alternate_backend_service) - if not gcp.instance_groups: - result = compute.instanceGroups().get( - project=args.project_id, - zone=args.zone, - instanceGroup=instance_group_name).execute() - instance_group = InstanceGroup(instance_group_name, - result['selfLink'], args.zone) - gcp.instance_groups.append(instance_group) - result = compute.instanceGroups().get( - project=args.project_id, - zone=args.zone, - instanceGroup=same_zone_instance_group_name).execute() - same_zone_instance_group = InstanceGroup( - same_zone_instance_group_name, result['selfLink'], - args.zone) - gcp.instance_groups.append(same_zone_instance_group) - if _USE_SECONDARY_IG: - result = compute.instanceGroups().get( - project=args.project_id, - zone=args.secondary_zone, - instanceGroup=secondary_zone_instance_group_name - ).execute() - secondary_zone_instance_group = InstanceGroup( - secondary_zone_instance_group_name, result['selfLink'], - args.secondary_zone) - gcp.instance_groups.append(secondary_zone_instance_group) - if not gcp.health_check: - result = compute.healthChecks().get( - project=args.project_id, - healthCheck=health_check_name).execute() - gcp.health_check = GcpResource(health_check_name, - result['selfLink']) - if not gcp.url_map: - result = compute.urlMaps().get(project=args.project_id, - urlMap=url_map_name).execute() - gcp.url_map = GcpResource(url_map_name, result['selfLink']) - if not gcp.service_port: - gcp.service_port = args.service_port_range[0] - logger.warning('Using arbitrary service port in range: %d' % - gcp.service_port) - else: - raise http_error wait_for_healthy_backends(gcp, backend_service, instance_group) - if gcp.service_port == _DEFAULT_SERVICE_PORT: - server_uri = service_host_name - else: - server_uri = service_host_name + ':' + str(gcp.service_port) - with tempfile.NamedTemporaryFile(delete=False) as bootstrap_file: - bootstrap_file.write( - _BOOTSTRAP_TEMPLATE.format( - node_id=socket.gethostname()).encode('utf-8')) - bootstrap_path = bootstrap_file.name - client_env = dict(os.environ, GRPC_XDS_BOOTSTRAP=bootstrap_path) - client_cmd = shlex.split( - args.client_cmd.format(server_uri=server_uri, - stats_port=args.stats_port, - qps=args.qps)) - - test_results = {} - failed_tests = [] - for test_case in args.test_case: - result = jobset.JobResult() - log_dir = os.path.join(_TEST_LOG_BASE_DIR, test_case) - if not os.path.exists(log_dir): - os.makedirs(log_dir) - test_log_file = open(os.path.join(log_dir, _SPONGE_LOG_NAME), 'w+') - client_process = None - try: - client_process = subprocess.Popen(client_cmd, - env=client_env, - stderr=subprocess.STDOUT, - stdout=test_log_file) - if test_case == 'backends_restart': - test_backends_restart(gcp, backend_service, instance_group) - elif test_case == 'change_backend_service': - test_change_backend_service(gcp, backend_service, - instance_group, - alternate_backend_service, - same_zone_instance_group) - elif test_case == 'new_instance_group_receives_traffic': - test_new_instance_group_receives_traffic( - gcp, backend_service, instance_group, - same_zone_instance_group) - elif test_case == 'ping_pong': - test_ping_pong(gcp, backend_service, instance_group) - elif test_case == 'remove_instance_group': - test_remove_instance_group(gcp, backend_service, instance_group, - same_zone_instance_group) - elif test_case == 'round_robin': - test_round_robin(gcp, backend_service, instance_group) - elif test_case == 'secondary_locality_gets_no_requests_on_partial_primary_failure': - test_secondary_locality_gets_no_requests_on_partial_primary_failure( - gcp, backend_service, instance_group, - secondary_zone_instance_group) - elif test_case == 'secondary_locality_gets_requests_on_primary_failure': - test_secondary_locality_gets_requests_on_primary_failure( - gcp, backend_service, instance_group, - secondary_zone_instance_group) - else: - logger.error('Unknown test case: %s', test_case) - sys.exit(1) - result.state = 'PASSED' - result.returncode = 0 - except Exception as e: - logger.error('Test case %s failed: %s', test_case, e) - failed_tests.append(test_case) - result.state = 'FAILED' - result.message = str(e) - finally: - if client_process: - client_process.terminate() - # Workaround for Python 3, as report_utils will invoke decode() on - # result.message, which has a default value of ''. - result.message = result.message.encode('UTF-8') - test_results[test_case] = [result] - if not os.path.exists(_TEST_LOG_BASE_DIR): - os.makedirs(_TEST_LOG_BASE_DIR) - report_utils.render_junit_xml_report(test_results, - os.path.join(_TEST_LOG_BASE_DIR, - _SPONGE_XML_NAME), - suite_name='xds_tests', - multi_target=True) - if failed_tests: - logger.error('Test case(s) %s failed', failed_tests) - sys.exit(1) + if args.test_case: + + if gcp.service_port == _DEFAULT_SERVICE_PORT: + server_uri = service_host_name + else: + server_uri = service_host_name + ':' + str(gcp.service_port) + if args.bootstrap_file: + bootstrap_path = os.path.abspath(args.bootstrap_file) + else: + with tempfile.NamedTemporaryFile(delete=False) as bootstrap_file: + bootstrap_file.write( + _BOOTSTRAP_TEMPLATE.format( + node_id=socket.gethostname()).encode('utf-8')) + bootstrap_path = bootstrap_file.name + client_env = dict(os.environ, GRPC_XDS_BOOTSTRAP=bootstrap_path) + client_cmd = shlex.split( + args.client_cmd.format(server_uri=server_uri, + stats_port=args.stats_port, + qps=args.qps)) + + test_results = {} + failed_tests = [] + for test_case in args.test_case: + result = jobset.JobResult() + log_dir = os.path.join(_TEST_LOG_BASE_DIR, test_case) + if not os.path.exists(log_dir): + os.makedirs(log_dir) + test_log_filename = os.path.join(log_dir, _SPONGE_LOG_NAME) + test_log_file = open(test_log_filename, 'w+') + client_process = None + try: + client_process = subprocess.Popen(client_cmd, + env=client_env, + stderr=subprocess.STDOUT, + stdout=test_log_file) + if test_case == 'backends_restart': + test_backends_restart(gcp, backend_service, instance_group) + elif test_case == 'change_backend_service': + test_change_backend_service(gcp, backend_service, + instance_group, + alternate_backend_service, + same_zone_instance_group) + elif test_case == 'new_instance_group_receives_traffic': + test_new_instance_group_receives_traffic( + gcp, backend_service, instance_group, + same_zone_instance_group) + elif test_case == 'ping_pong': + test_ping_pong(gcp, backend_service, instance_group) + elif test_case == 'remove_instance_group': + test_remove_instance_group(gcp, backend_service, + instance_group, + same_zone_instance_group) + elif test_case == 'round_robin': + test_round_robin(gcp, backend_service, instance_group) + elif test_case == 'secondary_locality_gets_no_requests_on_partial_primary_failure': + test_secondary_locality_gets_no_requests_on_partial_primary_failure( + gcp, backend_service, instance_group, + secondary_zone_instance_group) + elif test_case == 'secondary_locality_gets_requests_on_primary_failure': + test_secondary_locality_gets_requests_on_primary_failure( + gcp, backend_service, instance_group, + secondary_zone_instance_group) + else: + logger.error('Unknown test case: %s', test_case) + sys.exit(1) + result.state = 'PASSED' + result.returncode = 0 + except Exception as e: + logger.exception('Test case %s failed', test_case) + failed_tests.append(test_case) + result.state = 'FAILED' + result.message = str(e) + finally: + if client_process: + client_process.terminate() + test_log_file.close() + # Workaround for Python 3, as report_utils will invoke decode() on + # result.message, which has a default value of ''. + result.message = result.message.encode('UTF-8') + test_results[test_case] = [result] + if args.log_client_output: + logger.info('Client output:') + with open(test_log_filename, 'r') as client_output: + logger.info(client_output.read()) + if not os.path.exists(_TEST_LOG_BASE_DIR): + os.makedirs(_TEST_LOG_BASE_DIR) + report_utils.render_junit_xml_report(test_results, + os.path.join( + _TEST_LOG_BASE_DIR, + _SPONGE_XML_NAME), + suite_name='xds_tests', + multi_target=True) + if failed_tests: + logger.error('Test case(s) %s failed', failed_tests) + sys.exit(1) finally: if not args.keep_gcp_resources: logger.info('Cleaning up GCP resources. This may take some time.') -- 2.7.4