Kafka-mesh filter (#11936)

Signed-off-by: Adam Kotwasinski <adam.kotwasinski@gmail.com>

Mirrored from https://github.com/envoyproxy/envoy @ 0c8942dd82e3991d5162c4c53ec5b2e5c1d71010
pull/624/head
data-plane-api(Azure Pipelines) 3 years ago
parent 12ae102529
commit dd7cde1c7b
  1. 1
      BUILD
  2. 9
      contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD
  3. 58
      contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto
  4. 1
      versioning/BUILD

@ -60,6 +60,7 @@ proto_library(
"//contrib/envoy/extensions/filters/http/squash/v3:pkg",
"//contrib/envoy/extensions/filters/http/sxg/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/kafka_broker/v3:pkg",
"//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/mysql_proxy/v3:pkg",
"//contrib/envoy/extensions/filters/network/postgres_proxy/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/rocketmq_proxy/v3:pkg",

@ -0,0 +1,9 @@
# DO NOT EDIT. This file is generated by tools/proto_format/proto_sync.py.
load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package")
licenses(["notice"]) # Apache 2
api_proto_package(
deps = ["@com_github_cncf_udpa//udpa/annotations:pkg"],
)

@ -0,0 +1,58 @@
syntax = "proto3";
package envoy.extensions.filters.network.kafka_mesh.v3alpha;
import "udpa/annotations/status.proto";
import "validate/validate.proto";
option java_package = "io.envoyproxy.envoy.extensions.filters.network.kafka_mesh.v3alpha";
option java_outer_classname = "KafkaMeshProto";
option java_multiple_files = true;
option (udpa.annotations.file_status).work_in_progress = true;
option (udpa.annotations.file_status).package_version_status = ACTIVE;
// [#protodoc-title: Kafka Mesh]
// Kafka Mesh :ref:`configuration overview <config_network_filters_kafka_mesh>`.
// [#extension: envoy.filters.network.kafka_mesh]
message KafkaMesh {
// Envoy's host that's advertised to clients.
// Has the same meaning as corresponding Kafka broker properties.
// Usually equal to filter chain's listener config, but needs to be reachable by clients
// (so 0.0.0.0 will not work).
string advertised_host = 1 [(validate.rules).string = {min_len: 1}];
// Envoy's port that's advertised to clients.
int32 advertised_port = 2 [(validate.rules).int32 = {gt: 0}];
// Upstream clusters this filter will connect to.
repeated KafkaClusterDefinition upstream_clusters = 3;
// Rules that will decide which cluster gets which request.
repeated ForwardingRule forwarding_rules = 4;
}
message KafkaClusterDefinition {
// Cluster name.
string cluster_name = 1 [(validate.rules).string = {min_len: 1}];
// Kafka cluster address.
string bootstrap_servers = 2 [(validate.rules).string = {min_len: 1}];
// Default number of partitions present in this cluster.
// This is especially important for clients that do not specify partition in their payloads and depend on this value for hashing.
int32 partition_count = 3 [(validate.rules).int32 = {gt: 0}];
// Custom configuration passed to Kafka producer.
map<string, string> producer_config = 4;
}
message ForwardingRule {
// Cluster name.
string target_cluster = 1;
oneof trigger {
// Intended place for future types of forwarding rules.
string topic_prefix = 2;
}
}

@ -12,6 +12,7 @@ proto_library(
"//contrib/envoy/extensions/filters/http/squash/v3:pkg",
"//contrib/envoy/extensions/filters/http/sxg/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/kafka_broker/v3:pkg",
"//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/mysql_proxy/v3:pkg",
"//contrib/envoy/extensions/filters/network/postgres_proxy/v3alpha:pkg",
"//contrib/envoy/extensions/filters/network/rocketmq_proxy/v3:pkg",

Loading…
Cancel
Save