Kafka: Add broker-level metrics-collecting filter (#8188)

Signed-off-by: Adam Kotwasinski <adam.kotwasinski@gmail.com>

Mirrored from https://github.com/envoyproxy/envoy @ a60f6853a2c2ebbbfed79dfff0b5b644fd735980
master-ci-test
data-plane-api(CircleCI) 5 years ago
parent ce247dc63a
commit 47d665dff3
  1. 2
      BUILD
  2. 25
      bazel/repositories.bzl
  3. 7
      bazel/repository_locations.bzl
  4. 1
      docs/BUILD
  5. 9
      envoy/config/filter/network/kafka_broker/v2alpha1/BUILD
  6. 21
      envoy/config/filter/network/kafka_broker/v2alpha1/kafka_broker.proto
  7. 12
      envoy/extensions/filters/network/kafka_broker/v3alpha/BUILD
  8. 23
      envoy/extensions/filters/network/kafka_broker/v3alpha/kafka_broker.proto

@ -59,6 +59,7 @@ proto_library(
"//envoy/config/filter/network/dubbo_proxy/v2alpha1:pkg",
"//envoy/config/filter/network/ext_authz/v2:pkg",
"//envoy/config/filter/network/http_connection_manager/v2:pkg",
"//envoy/config/filter/network/kafka_broker/v2alpha1:pkg",
"//envoy/config/filter/network/local_rate_limit/v2alpha:pkg",
"//envoy/config/filter/network/local_rate_limit/v3alpha:pkg",
"//envoy/config/filter/network/mongo_proxy/v2:pkg",
@ -137,6 +138,7 @@ proto_library(
"//envoy/extensions/filters/network/dubbo_proxy/v3alpha:pkg",
"//envoy/extensions/filters/network/ext_authz/v3alpha:pkg",
"//envoy/extensions/filters/network/http_connection_manager/v3alpha:pkg",
"//envoy/extensions/filters/network/kafka_broker/v3alpha:pkg",
"//envoy/extensions/filters/network/mongo_proxy/v3alpha:pkg",
"//envoy/extensions/filters/network/mysql_proxy/v3alpha:pkg",
"//envoy/extensions/filters/network/ratelimit/v3alpha:pkg",

@ -33,11 +33,6 @@ def api_dependencies():
name = "rules_proto",
locations = REPOSITORY_LOCATIONS,
)
envoy_http_archive(
name = "kafka_source",
locations = REPOSITORY_LOCATIONS,
build_file_content = KAFKASOURCE_BUILD_CONTENT,
)
envoy_http_archive(
name = "com_github_openzipkin_zipkinapi",
locations = REPOSITORY_LOCATIONS,
@ -84,26 +79,6 @@ go_proto_library(
)
"""
KAFKASOURCE_BUILD_CONTENT = """
filegroup(
name = "request_protocol_files",
srcs = glob([
"*Request.json",
]),
visibility = ["//visibility:public"],
)
filegroup(
name = "response_protocol_files",
srcs = glob([
"*Response.json",
]),
visibility = ["//visibility:public"],
)
"""
ZIPKINAPI_BUILD_CONTENT = """
load("@envoy_api//bazel:api_build_system.bzl", "api_cc_py_proto_library")

@ -13,8 +13,6 @@ GOOGLEAPIS_SHA = "a45019af4d3290f02eaeb1ce10990166978c807cb33a9692141a076ba46d14
PROMETHEUS_GIT_SHA = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c" # Nov 17, 2017
PROMETHEUS_SHA = "783bdaf8ee0464b35ec0c8704871e1e72afa0005c3f3587f65d9d6694bf3911b"
KAFKA_SOURCE_SHA = "ae7a1696c0a0302b43c5b21e515c37e6ecd365941f68a510a7e442eebddf39a1" # 2.2.0-rc2
UDPA_GIT_SHA = "edbea6a78f6d1ba34edc69c53a396b1d88d59651" # Dec 30, 2019
UDPA_SHA256 = "8cabd617b68354fa8b4adab8a031f80c10e2ea43f57d5f6210bc7b3ebb79b684"
@ -60,11 +58,6 @@ REPOSITORY_LOCATIONS = dict(
strip_prefix = "rules_proto-" + RULES_PROTO_GIT_SHA + "",
urls = ["https://github.com/bazelbuild/rules_proto/archive/" + RULES_PROTO_GIT_SHA + ".tar.gz"],
),
kafka_source = dict(
sha256 = KAFKA_SOURCE_SHA,
strip_prefix = "kafka-2.2.0-rc2/clients/src/main/resources/common/message",
urls = ["https://github.com/apache/kafka/archive/2.2.0-rc2.zip"],
),
com_github_openzipkin_zipkinapi = dict(
sha256 = ZIPKINAPI_SHA256,
strip_prefix = "zipkin-api-" + ZIPKINAPI_RELEASE,

@ -59,6 +59,7 @@ proto_library(
"//envoy/config/filter/network/dubbo_proxy/v2alpha1:pkg",
"//envoy/config/filter/network/ext_authz/v2:pkg",
"//envoy/config/filter/network/http_connection_manager/v2:pkg",
"//envoy/config/filter/network/kafka_broker/v2alpha1:pkg",
"//envoy/config/filter/network/local_rate_limit/v2alpha:pkg",
"//envoy/config/filter/network/mongo_proxy/v2:pkg",
"//envoy/config/filter/network/mysql_proxy/v1alpha1:pkg",

@ -0,0 +1,9 @@
# DO NOT EDIT. This file is generated by tools/proto_sync.py.
load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package")
licenses(["notice"]) # Apache 2
api_proto_package(
deps = ["@com_github_cncf_udpa//udpa/annotations:pkg"],
)

@ -0,0 +1,21 @@
syntax = "proto3";
package envoy.config.filter.network.kafka_broker.v2alpha1;
import "udpa/annotations/migrate.proto";
import "validate/validate.proto";
option java_package = "io.envoyproxy.envoy.config.filter.network.kafka_broker.v2alpha1";
option java_outer_classname = "KafkaBrokerProto";
option java_multiple_files = true;
option (udpa.annotations.file_migrate).move_to_package =
"envoy.extensions.filters.network.kafka_broker.v3alpha";
// [#protodoc-title: Kafka Broker]
// Kafka Broker :ref:`configuration overview <config_network_filters_kafka_broker>`.
// [#extension: envoy.filters.network.kafka_broker]
message KafkaBroker {
// The prefix to use when emitting :ref:`statistics <config_network_filters_kafka_broker_stats>`.
string stat_prefix = 1 [(validate.rules).string = {min_bytes: 1}];
}

@ -0,0 +1,12 @@
# DO NOT EDIT. This file is generated by tools/proto_sync.py.
load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package")
licenses(["notice"]) # Apache 2
api_proto_package(
deps = [
"//envoy/config/filter/network/kafka_broker/v2alpha1:pkg",
"@com_github_cncf_udpa//udpa/annotations:pkg",
],
)

@ -0,0 +1,23 @@
syntax = "proto3";
package envoy.extensions.filters.network.kafka_broker.v3alpha;
import "udpa/annotations/versioning.proto";
import "validate/validate.proto";
option java_package = "io.envoyproxy.envoy.extensions.filters.network.kafka_broker.v3alpha";
option java_outer_classname = "KafkaBrokerProto";
option java_multiple_files = true;
// [#protodoc-title: Kafka Broker]
// Kafka Broker :ref:`configuration overview <config_network_filters_kafka_broker>`.
// [#extension: envoy.filters.network.kafka_broker]
message KafkaBroker {
option (udpa.annotations.versioning).previous_message_type =
"envoy.config.filter.network.kafka_broker.v2alpha1.KafkaBroker";
// The prefix to use when emitting :ref:`statistics <config_network_filters_kafka_broker_stats>`.
string stat_prefix = 1 [(validate.rules).string = {min_bytes: 1}];
}
Loading…
Cancel
Save