Metrics Service Support (#220)

Signed-off-by: Rama <ramaraochavali@gmail.com>
pull/224/head
ramaraochavali 7 years ago committed by Matt Klein
parent b236cc7c73
commit c54d0ded74
  1. 1
      README.md
  2. 11
      api/BUILD
  3. 40
      api/metrics_service.proto
  4. 5
      bazel/api_build_system.bzl
  5. 29
      bazel/repositories.bzl
  6. 1
      test/build/BUILD
  7. 1
      test/build/build_test.cc

@ -108,6 +108,7 @@ Unless otherwise stated, the APIs with the same names as v1 APIs have a similar
endpoints. The health check subset may not be a subset of the Envoy instance's endpoints. The health check subset may not be a subset of the Envoy instance's
EDS endpoints. EDS endpoints.
* [Listener Discovery Service (LDS)](api/lds.proto). This new API supports dynamic discovery of the listener configuration (which ports to bind to, TLS details, filter chains, etc.). * [Listener Discovery Service (LDS)](api/lds.proto). This new API supports dynamic discovery of the listener configuration (which ports to bind to, TLS details, filter chains, etc.).
* [Metric Service (MS)](api/metrics_service.proto). This new API allows Envoy to push (stream) metrics forever for servers to consume.
* [Rate Limit Service (RLS)](api/rls.proto) * [Rate Limit Service (RLS)](api/rls.proto)
* [Route Discovery Service (RDS)](api/rds.proto). * [Route Discovery Service (RDS)](api/rds.proto).
* [Secret Discovery Service (SDS)](api/sds.proto). * [Secret Discovery Service (SDS)](api/sds.proto).

@ -132,6 +132,17 @@ api_proto_library(
], ],
) )
api_proto_library(
name = "metrics",
srcs = ["metrics_service.proto"],
has_services = 1,
deps = [
":base",
"@promotheus_metrics_model//:client_model_protos_lib",
],
require_py = 0,
)
api_proto_library( api_proto_library(
name = "protocol", name = "protocol",
srcs = ["protocol.proto"], srcs = ["protocol.proto"],

@ -0,0 +1,40 @@
syntax = "proto3";
package envoy.api.v2;
import "api/base.proto";
import "google/api/annotations.proto";
import "metrics.proto";
// Service for streaming metrics to server that consumes the metrics data. It uses Prometheus metric
// data model as a standard to represent metrics information.
service MetricsService {
// Envoy will connect and send StreamMetricsMessage messages forever. It does not expect any
// response to be sent as nothing would be done in the case of failure.
rpc StreamMetrics(stream StreamMetricsMessage) returns (StreamMetricsResponse) {
}
}
message StreamMetricsResponse {}
message StreamMetricsMessage {
message Identifier {
// The node sending the access log messages over the stream.
Node node = 1;
}
// Identifier data effectively is a structured metadata.
// As a performance optimization this will only be sent in the first message on the stream.
Identifier identifier = 1;
// A list of metric entries
repeated io.prometheus.client.MetricFamily envoy_metrics = 2;
}
// Configuration structure.
message MetricsServiceConfig {
// The name of the upstream cluster that hosts the metrics service. The cluster must be
// configured in the cluster manager.
string cluster_name = 1;
}

@ -23,7 +23,7 @@ def api_py_proto_library(name, srcs = [], deps = [], has_services = 0):
# TODO(htuch): has_services is currently ignored but will in future support # TODO(htuch): has_services is currently ignored but will in future support
# gRPC stub generation. # gRPC stub generation.
def api_proto_library(name, srcs = [], deps = [], has_services = 0): def api_proto_library(name, srcs = [], deps = [], has_services = 0, require_py = 1):
native.proto_library( native.proto_library(
name = name, name = name,
srcs = srcs, srcs = srcs,
@ -43,7 +43,8 @@ def api_proto_library(name, srcs = [], deps = [], has_services = 0):
deps = [name], deps = [name],
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
api_py_proto_library(name, srcs, deps, has_services) if (require_py == 1):
api_py_proto_library(name, srcs, deps, has_services)
def api_cc_test(name, srcs, proto_deps): def api_cc_test(name, srcs, proto_deps):
native.cc_test( native.cc_test(

@ -1,4 +1,5 @@
GOOGLEAPIS_SHA = "5c6df0cd18c6a429eab739fb711c27f6e1393366" # May 14, 2017 GOOGLEAPIS_SHA = "5c6df0cd18c6a429eab739fb711c27f6e1393366" # May 14, 2017
PROMETHEUS_SHA = "6f3806018612930941127f2a7c6c453ba2c527d2" # Nov 02, 2017
def api_dependencies(): def api_dependencies():
native.new_http_archive( native.new_http_archive(
@ -44,3 +45,31 @@ py_proto_library(
) )
""", """,
) )
native.new_http_archive(
name = "promotheus_metrics_model",
strip_prefix = "client_model-" + PROMETHEUS_SHA,
url = "https://github.com/prometheus/client_model/archive/" + PROMETHEUS_SHA + ".tar.gz",
build_file_content = """
filegroup(
name = "client_model_protos_src",
srcs = [
"metrics.proto",
],
visibility = ["//visibility:public"],
)
proto_library(
name = "client_model_protos_lib",
srcs = [":client_model_protos_src"],
visibility = ["//visibility:public"],
)
cc_proto_library(
name = "client_model_protos",
deps = [":client_model_protos_lib"],
visibility = ["//visibility:public"],
)
""",
)

@ -12,6 +12,7 @@ api_cc_test(
"//api:eds", "//api:eds",
"//api:hds", "//api:hds",
"//api:lds", "//api:lds",
"//api:metrics",
"//api:rds", "//api:rds",
"//api:rls", "//api:rls",
], ],

@ -16,6 +16,7 @@ int main(int argc, char *argv[]) {
"envoy.api.v2.HealthDiscoveryService.StreamHealthCheck", "envoy.api.v2.HealthDiscoveryService.StreamHealthCheck",
"envoy.api.v2.ListenerDiscoveryService.FetchListeners", "envoy.api.v2.ListenerDiscoveryService.FetchListeners",
"envoy.api.v2.ListenerDiscoveryService.StreamListeners", "envoy.api.v2.ListenerDiscoveryService.StreamListeners",
"envoy.api.v2.MetricsService.StreamMetrics",
"envoy.api.v2.RouteDiscoveryService.FetchRoutes", "envoy.api.v2.RouteDiscoveryService.FetchRoutes",
"envoy.api.v2.RouteDiscoveryService.StreamRoutes", "envoy.api.v2.RouteDiscoveryService.StreamRoutes",
"envoy.api.v2.RateLimitService.ShouldRateLimit", "envoy.api.v2.RateLimitService.ShouldRateLimit",

Loading…
Cancel
Save