From 74e4ac6997020eab4e9d1e9a7c61cd9d63f37ec1 Mon Sep 17 00:00:00 2001 From: Shashi Ranjan Date: Wed, 12 Apr 2017 17:17:45 -0700 Subject: [PATCH] feat(plugins/datadog) new metrics and more flexible configuration - Now each metric supports configurable stat type, sample rate and customer identifier if applicable. - Custom tags for Datadog. - New metrics `upstream_latency`, `kong_latency` and `status_count_per_user`. --- kong-0.10.1-0.rockspec | 2 + kong/plugins/datadog/handler.lua | 130 +++++++++---- kong/plugins/datadog/migrations/cassandra.lua | 89 +++++++++ kong/plugins/datadog/migrations/postgres.lua | 95 ++++++++++ kong/plugins/datadog/schema.lua | 175 +++++++++++++++--- kong/plugins/datadog/statsd_logger.lua | 3 +- spec/03-plugins/08-datadog/01-log_spec.lua | 91 ++++++--- spec/03-plugins/08-datadog/02-schema_spec.lua | 140 ++++++++++++++ 8 files changed, 644 insertions(+), 81 deletions(-) create mode 100644 kong/plugins/datadog/migrations/cassandra.lua create mode 100644 kong/plugins/datadog/migrations/postgres.lua create mode 100644 spec/03-plugins/08-datadog/02-schema_spec.lua diff --git a/kong-0.10.1-0.rockspec b/kong-0.10.1-0.rockspec index 98608daa80f..b62d254ab82 100644 --- a/kong-0.10.1-0.rockspec +++ b/kong-0.10.1-0.rockspec @@ -260,6 +260,8 @@ build = { ["kong.plugins.loggly.handler"] = "kong/plugins/loggly/handler.lua", ["kong.plugins.loggly.schema"] = "kong/plugins/loggly/schema.lua", + ["kong.plugins.datadog.migrations.cassandra"] = "kong/plugins/datadog/migrations/cassandra.lua", + ["kong.plugins.datadog.migrations.postgres"] = "kong/plugins/datadog/migrations/postgres.lua", ["kong.plugins.datadog.handler"] = "kong/plugins/datadog/handler.lua", ["kong.plugins.datadog.schema"] = "kong/plugins/datadog/schema.lua", ["kong.plugins.datadog.statsd_logger"] = "kong/plugins/datadog/statsd_logger.lua", diff --git a/kong/plugins/datadog/handler.lua b/kong/plugins/datadog/handler.lua index 1c41bd72932..2a52c80b5fb 100644 --- a/kong/plugins/datadog/handler.lua +++ b/kong/plugins/datadog/handler.lua @@ -9,60 +9,122 @@ DatadogHandler.PRIORITY = 1 local ngx_timer_at = ngx.timer.at local string_gsub = string.gsub local ipairs = ipairs +local NGX_ERR = ngx.ERR +local ngx_log = ngx.log + +local function allow_user_metric(message, identifier) + if message.consumer ~= nil and + message.consumer[identifier] ~= nil then + return true, message.consumer[identifier] + end + return false +end local gauges = { - request_size = function (api_name, message, logger, tags) - local stat = api_name..".request.size" - logger:gauge(stat, message.request.size, 1, tags) + gauge = function (stat_name, stat_value, metric_config, logger) + logger:gauge(stat_name, stat_value, metric_config.sample_rate, metric_config.tags) end, - response_size = function (api_name, message, logger, tags) - local stat = api_name..".response.size" - logger:gauge(stat, message.response.size, 1, tags) + timer = function (stat_name, stat_value, metric_config, logger) + logger:timer(stat_name, stat_value, metric_config.tags) end, - status_count = function (api_name, message, logger, tags) - local stat = api_name..".request.status."..message.response.status - logger:counter(stat, 1, 1, tags) + counter = function (stat_name, _, metric_config, logger) + logger:counter(stat_name, 1, metric_config.sample_rate, metric_config.tags) end, - latency = function (api_name, message, logger, tags) - local stat = api_name..".latency" - logger:gauge(stat, message.latencies.request, 1, tags) + set = function (stat_name, stat_value, metric_config, logger) + logger:set(stat_name, stat_value, metric_config.tags) end, - request_count = function (api_name, message, logger, tags) - local stat = api_name..".request.count" - logger:counter(stat, 1, 1, tags) + histogram = function (stat_name, stat_value, metric_config, logger) + logger:histogram(stat_name, stat_value, metric_config.tags) end, - unique_users = function (api_name, message, logger, tags) - if message.authenticated_entity ~= nil and message.authenticated_entity.consumer_id ~= nil then + meter = function (stat_name, stat_value, metric_config, logger) + logger:meter(stat_name, stat_value, metric_config.tags) + end, + status_count = function (api_name, message, metric_config, logger) + local stat = api_name..".request.status."..message.response.status + local total_count = api_name..".request.status.total" + local sample_rate = metric_config.sample_rate + logger:counter(stat, 1, sample_rate, metric_config.tags) + logger:counter(total_count, 1, sample_rate, metric_config.tags) + end, + unique_users = function (api_name, message, metric_config, logger) + local identifier = metric_config.consumer_identifier + local allow_metric, cust_id = allow_user_metric(message, identifier) + if allow_metric then local stat = api_name..".user.uniques" - logger:set(stat, message.authenticated_entity.consumer_id, tags) + logger:set(stat, cust_id, metric_config.tags) end end, - request_per_user = function (api_name, message, logger, tags) - if message.authenticated_entity ~= nil and message.authenticated_entity.consumer_id ~= nil then - local stat = api_name.."."..string_gsub(message.authenticated_entity.consumer_id, "-", "_")..".request.count" - logger:counter(stat, 1, 1, tags) + request_per_user = function (api_name, message, metric_config, logger) + local identifier = metric_config.consumer_identifier + local allow_metric, cust_id = allow_user_metric(message, identifier) + if allow_metric then + local sample_rate = metric_config.sample_rate + local stat = api_name..".user."..string_gsub(cust_id, "-", "_") + ..".request.count" + logger:counter(stat, 1, sample_rate, metric_config.tags) end end, - upstream_latency = function (api_name, message, logger, tags) - local stat = api_name..".upstream_latency" - logger:gauge(stat, message.latencies.proxy, 1, tags) + status_count_per_user = function (api_name, message, metric_config, logger) + local identifier = metric_config.consumer_identifier + local allow_metric, cust_id = allow_user_metric(message, identifier) + if allow_metric then + local stat = api_name..".user."..string_gsub(cust_id, "-", "_") + ..".request.status."..message.response.status + local total_count = api_name..".user."..string_gsub(cust_id, "-", "_") + ..".request.status.total" + local sample_rate = metric_config.sample_rate + logger:counter(stat, 1, sample_rate, metric_config.tags) + logger:counter(total_count, 1, sample_rate, metric_config.tags) + end end } local function log(premature, conf, message) if premature then return end + + local api_name = string_gsub(message.api.name, "%.", "_") + + local stat_name = { + request_size = api_name..".request.size", + response_size = api_name..".response.size", + latency = api_name..".latency", + upstream_latency = api_name..".upstream_latency", + kong_latency = api_name..".kong_latency", + request_count = api_name..".request.count" + } + + local stat_value = { + request_size = message.request.size, + response_size = message.response.size, + latency = message.latencies.request, + upstream_latency = message.latencies.proxy, + kong_latency = message.latencies.kong, + request_count = 1 + } + local logger, err = statsd_logger:new(conf) + if err then - ngx.log(ngx.ERR, "failed to create Statsd logger: ", err) + ngx_log(NGX_ERR, "failed to create Statsd logger: ", err) return end - - local api_name = string_gsub(message.api.name, "%.", "_") - for _, metric in ipairs(conf.metrics) do - local gauge = gauges[metric] - if gauge then - - gauge(api_name, message, logger, conf.tags[metric]) + for _, metric_config in ipairs(conf.metrics) do + if metric_config.name ~= "status_count" + and metric_config.name ~= "unique_users" + and metric_config.name ~= "request_per_user" + and metric_config.name ~= "status_count_per_user" then + local stat_name = stat_name[metric_config.name] + local stat_value = stat_value[metric_config.name] + local gauge = gauges[metric_config.stat_type] + if stat_name ~= nil and gauge ~= nil and stat_value ~= nil then + gauge(stat_name, stat_value, metric_config, logger) + end + + else + local gauge = gauges[metric_config.name] + if gauge ~= nil then + gauge(api_name, message, metric_config, logger) + end end end @@ -79,7 +141,7 @@ function DatadogHandler:log(conf) local ok, err = ngx_timer_at(0, log, conf, message) if not ok then - ngx.log(ngx.ERR, "failed to create timer: ", err) + ngx_log(NGX_ERR, "failed to create timer: ", err) end end diff --git a/kong/plugins/datadog/migrations/cassandra.lua b/kong/plugins/datadog/migrations/cassandra.lua new file mode 100644 index 00000000000..c457e457da5 --- /dev/null +++ b/kong/plugins/datadog/migrations/cassandra.lua @@ -0,0 +1,89 @@ +return { + { + name = "2017-02-09-160000_datadog_schema_changes", + up = function(_, _, factory) + + local plugins, err = factory.plugins:find_all {name = "datadog"} + if err then + return err + end + + local default_metrics = { + request_count = { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + latency = { + name = "latency", + stat_type = "timer", + tags = {"app:kong"} + }, + request_size = { + name = "request_size", + stat_type = "timer", + tags = {"app:kong"} + }, + status_count = { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + response_size = { + name = "response_size", + stat_type = "timer", + tags = {"app:kong"} + }, + unique_users = { + name = "unique_users", + stat_type = "set", + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + request_per_user = { + name = "request_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + upstream_latency = { + name = "upstream_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + kong_latency = { + name = "kong_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + status_count_per_user = { + name = "status_count_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + } + } + + for _, plugin in ipairs(plugins) do + local new_metrics = {} + plugin.config.tags = nil + plugin.config.timeout = nil + if plugin.config.metrics ~=nil then + for _, metric in ipairs(plugin.config.metrics) do + table.insert(new_metrics, default_metrics[metric]) + end + plugin.config.metrics = new_metrics + plugin.config.timeout = nil + local _, err = factory.plugins:update(plugin, plugin, {full = true}) + if err then + return err + end + end + end + end + } +} diff --git a/kong/plugins/datadog/migrations/postgres.lua b/kong/plugins/datadog/migrations/postgres.lua new file mode 100644 index 00000000000..fbbc6bd72f1 --- /dev/null +++ b/kong/plugins/datadog/migrations/postgres.lua @@ -0,0 +1,95 @@ +return { + { + name = "2017-02-09-160000_datadog_schema_changes", + up = function(_, _, dao) + local rows, err = dao.plugins:find_all {name = "datadog"} + if err then return err end + + local default_metrics = { + request_count = { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + latency = { + name = "latency", + stat_type = "timer", + tags = {"app:kong"} + }, + request_size = { + name = "request_size", + stat_type = "timer", + tags = {"app:kong"} + }, + status_count = { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + response_size = { + name = "response_size", + stat_type = "timer", + tags = {"app:kong"} + }, + unique_users = { + name = "unique_users", + stat_type = "set", + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + request_per_user = { + name = "request_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + upstream_latency = { + name = "upstream_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + kong_latency = { + name = "kong_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + status_count_per_user = { + name = "status_count_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + } + } + + + for i = 1, #rows do + local row = rows[i] + + local _, err = dao.plugins:delete(row) + if err then return err end + local new_metrics = {} + if row.config.metrics ~=nil then + for _, metric in ipairs(row.config.metrics) do + table.insert(new_metrics, default_metrics[metric]) + end + end + + local _, err = dao.plugins:insert { + name = "datadog", + api_id = row.api_id, + enabled = row.enabled, + config = { + host = row.config.host, + port = row.config.port, + metrics = new_metrics + } + } + if err then return err end + end + end + } +} diff --git a/kong/plugins/datadog/schema.lua b/kong/plugins/datadog/schema.lua index 6c8e29b59b6..3368fc42535 100644 --- a/kong/plugins/datadog/schema.lua +++ b/kong/plugins/datadog/schema.lua @@ -8,41 +8,174 @@ local metrics = { "response_size", "unique_users", "request_per_user", - "upstream_latency" + "upstream_latency", + "kong_latency", + "status_count_per_user" } + +local stat_types = { + "gauge", + "timer", + "counter", + "histogram", + "meter", + "set" +} + +local default_metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + { + name = "latency", + stat_type = "timer", + tags = {"app:kong"} + }, + { + name = "request_size", + stat_type = "timer", + tags = {"app:kong"} + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {"app:kong"} + }, + { + name = "response_size", + stat_type = "timer", + tags = {"app:kong"} + }, + { + name = "unique_users", + stat_type = "set", + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + { + name = "request_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + }, + { + name = "upstream_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + { + name = "kong_latency", + stat_type = "timer", + tags = {"app:kong"} + }, + { + name = "status_count_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = "custom_id", + tags = {"app:kong"} + } +} + +local consumer_identifiers = { + "consumer_id", + "custom_id", + "username" +} + +local function check_entry(entry) + local allowed_entry = { + name = true, + stat_type = true, + tags = true, + sample_rate = true, + consumer_identifier = true + } + for property, _ in pairs(entry) do + if allowed_entry[property] == nil then + return false,"property '"..property.."' is not supported" + end + allowed_entry[property] = nil + end + return true +end + +local function check_value(table, element) + for _, value in pairs(table) do + if value == element then + return true + end + end + return false +end + -- entries must have colons to set the key and value apart -local function check_for_value(value) +local function check_tag_value(value) + if value == nil then return true end for i, entry in ipairs(value) do local ok = find(entry, ":") - if ok then + if ok then local _,next = pl_utils.splitv(entry, ':') if not next or #next == 0 then - return false, "key '"..entry.."' has no value, " + return false, "key '"..entry.."' has no value" + end + end + end + return true +end + +local function check_schema(value) + for _, entry in ipairs(value) do + local entry_ok, entry_error = check_entry(entry) + local name_ok = check_value(metrics, entry.name) + local type_ok = check_value(stat_types, entry.stat_type) + local tag_ok, tag_error = check_tag_value(entry.tags) + if not entry_ok then + return false, "malformed metrics:"..entry_error.."." + end + if entry.name == nil or entry.stat_type == nil then + return false, "name and stat_type must be defined for all stats" + end + if not name_ok then + return false, "unrecognized metric name: "..entry.name + end + if not type_ok then + return false, "unrecognized stat_type: "..entry.stat_type + end + if not tag_ok then + return false, "malformed tags: "..tag_error..". Tags must be list of key[:value]" + end + if entry.name == "unique_users" and entry.stat_type ~= "set" then + return false, "unique_users metric only works with stat_type 'set'" + end + if (entry.stat_type == "counter" or entry.stat_type == "gauge") and ((entry.sample_rate == nil) or (entry.sample_rate ~= nil and type(entry.sample_rate) ~= "number") or (entry.sample_rate ~= nil and entry.sample_rate < 1)) then + return false, "sample rate must be defined for counters and gauges." + end + if (entry.name == "status_count_per_user" or entry.name == "request_per_user" or entry.name == "unique_users") and entry.consumer_identifier == nil then + return false, "consumer_identifier must be defined for metric "..entry.name + end + if (entry.name == "status_count_per_user" or entry.name == "request_per_user" or entry.name == "unique_users") and entry.consumer_identifier ~= nil then + local identifier_ok = check_value(consumer_identifiers, entry.consumer_identifier) + if not identifier_ok then + return false, "invalid consumer_identifier for metric "..entry.name..". Choices are consumer_id, custom_id, and username" end end + if (entry.name == "status_count" or entry.name == "status_count_per_user" or entry.name == "request_per_user") and entry.stat_type ~= "counter" then + return false, entry.name.." metric only works with stat_type 'counter'" + end end return true end + return { fields = { host = {required = true, type = "string", default = "localhost"}, port = {required = true, type = "number", default = 8125}, - metrics = {required = true, type = "array", enum = metrics, default = metrics}, - tags = { - type = "table", - schema = { - fields = { - request_count = {type = "array", default = {}, func = check_for_value}, - latency = {type = "array", default = {}, func = check_for_value}, - request_size = {type = "array", default = {}, func = check_for_value}, - status_count = {type = "array", default = {}, func = check_for_value}, - response_size = {type = "array", default = {}, func = check_for_value}, - unique_users = {type = "array", default = {}, func = check_for_value}, - request_per_user = {type = "array", default = {}, func = check_for_value}, - upstream_latency = {type = "array", default = {}, func = check_for_value} - } - } - }, - timeout = {type = "number", default = 10000} + metrics = {required = true, type = "array", default = default_metrics, func = check_schema} } } diff --git a/kong/plugins/datadog/statsd_logger.lua b/kong/plugins/datadog/statsd_logger.lua index dd78a9b0a41..663fd2421e8 100644 --- a/kong/plugins/datadog/statsd_logger.lua +++ b/kong/plugins/datadog/statsd_logger.lua @@ -10,7 +10,6 @@ statsd_mt.__index = statsd_mt function statsd_mt:new(conf) local sock = ngx_socket_udp() - sock:settimeout(conf.timeout) local ok, err = sock:setpeername(conf.host, conf.port) if not ok then return nil, "failed to connect to "..conf.host..":"..conf.port..": "..err @@ -31,7 +30,7 @@ function statsd_mt:create_statsd_message(stat, delta, kind, sample_rate, tags) if sample_rate and sample_rate ~= 1 then rate = "|@"..sample_rate end - + if tags and #tags > 0 then str_tags = "|#"..table_concat(tags, ",") end diff --git a/spec/03-plugins/08-datadog/01-log_spec.lua b/spec/03-plugins/08-datadog/01-log_spec.lua index e1ff2cd14eb..d82bc3a7894 100644 --- a/spec/03-plugins/08-datadog/01-log_spec.lua +++ b/spec/03-plugins/08-datadog/01-log_spec.lua @@ -4,11 +4,24 @@ local threads = require "llthreads2.ex" describe("Plugin: datadog (log)", function() local client setup(function() + local consumer1 = assert(helpers.dao.consumers:insert { + username = "foo", + custom_id = "bar" + }) + assert(helpers.dao.keyauth_credentials:insert { + key = "kong", + consumer_id = consumer1.id + }) + local api1 = assert(helpers.dao.apis:insert { name = "datadog1_com", hosts = { "datadog1.com" }, upstream_url = "http://mockbin.com" }) + assert(helpers.dao.plugins:insert { + name = "key-auth", + api_id = api1.id + }) local api2 = assert(helpers.dao.apis:insert { name = "datadog2_com", hosts = { "datadog2.com" }, @@ -25,8 +38,7 @@ describe("Plugin: datadog (log)", function() api_id = api1.id, config = { host = "127.0.0.1", - port = 9999, - tags = {} + port = 9999 } }) assert(helpers.dao.plugins:insert { @@ -35,8 +47,18 @@ describe("Plugin: datadog (log)", function() config = { host = "127.0.0.1", port = 9999, - metrics = "request_count,status_count", - tags = {} + metrics = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1 + }, + { + name = "request_count", + stat_type = "counter", + sample_rate = 1 + } + } } }) assert(helpers.dao.plugins:insert { @@ -45,11 +67,25 @@ describe("Plugin: datadog (log)", function() config = { host = "127.0.0.1", port = 9999, - metrics = "request_count,status_count,latency", - tags = { - request_count = {"T1:V1"}, - status_count = {"T2:V2,T3:V3,T4"}, - latency = {"T2:V2:V3,T4"}, + metrics = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {"T1:V1"}, + }, + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + tags = {"T2:V2,T3:V3,T4"}, + }, + { + name = "latency", + stat_type = "gauge", + sample_rate = 1, + tags = {"T2:V2:V3,T4"} + } } } }) @@ -71,7 +107,7 @@ describe("Plugin: datadog (log)", function() server:setoption("reuseaddr", true) server:setsockname("127.0.0.1", 9999) local gauges = {} - for _ = 1, 6 do + for _ = 1, 12 do gauges[#gauges+1] = server:receive() end server:close() @@ -82,7 +118,7 @@ describe("Plugin: datadog (log)", function() local res = assert(client:send { method = "GET", - path = "/status/200", + path = "/status/200/?apikey=kong", headers = { ["Host"] = "datadog1.com" } @@ -91,13 +127,19 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) - assert.contains("kong.datadog1_com.request.count:1|c", gauges) - assert.contains("kong.datadog1_com.latency:%d+|g", gauges, true) - assert.contains("kong.datadog1_com.request.size:%d+|g", gauges, true) - assert.contains("kong.datadog1_com.request.status.200:1|c", gauges) - assert.contains("kong.datadog1_com.response.size:%d+|g", gauges, true) - assert.contains("kong.datadog1_com.upstream_latency:%d+|g", gauges, true) + assert.equal(12, #gauges) + assert.contains("kong.datadog1_com.request.count:1|c|#app:kong", gauges) + assert.contains("kong.datadog1_com.latency:%d+|ms|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.request.size:%d+|ms|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.request.status.200:1|c|#app:kong", gauges) + assert.contains("kong.datadog1_com.request.status.total:1|c|#app:kong", gauges) + assert.contains("kong.datadog1_com.response.size:%d+|ms|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.upstream_latency:%d+|ms|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.kong_latency:%d*|ms|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.user.uniques:.*|s|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.user.*.request.count:1|c|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.user.*.request.status.total:1|c|#app:kong", gauges, true) + assert.contains("kong.datadog1_com.user.*.request.status.200:1|c|#app:kong", gauges, true) end) it("logs only given metrics", function() @@ -109,7 +151,7 @@ describe("Plugin: datadog (log)", function() server:setoption("reuseaddr", true) server:setsockname("127.0.0.1", 9999) local gauges = {} - for _ = 1, 2 do + for _ = 1, 3 do gauges[#gauges+1] = server:receive() end server:close() @@ -129,9 +171,10 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(2, #gauges) + assert.equal(3, #gauges) assert.contains("kong.datadog2_com.request.count:1|c", gauges) assert.contains("kong.datadog2_com.request.status.200:1|c", gauges) + assert.contains("kong.datadog2_com.request.status.total:1|c", gauges) end) it("logs metrics with tags", function() @@ -143,7 +186,7 @@ describe("Plugin: datadog (log)", function() server:setoption("reuseaddr", true) server:setsockname("127.0.0.1", 9999) local gauges = {} - for _ = 1, 3 do + for _ = 1, 4 do gauges[#gauges+1] = server:receive() end server:close() @@ -163,9 +206,9 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(3, #gauges) - assert.contains("kong.datadog3_com.request.count:1|c|#T1:V1", gauges) - assert.contains("kong.datadog3_com.request.status.200:1|c|#T2:V2,T3:V3,T4", gauges) + assert.contains("kong.datadog3_com.request.count:1|c|#T2:V2,T3:V3,T4", gauges) + assert.contains("kong.datadog3_com.request.status.200:1|c|#T1:V1", gauges) + assert.contains("kong.datadog3_com.request.status.total:1|c|#T1:V1", gauges) assert.contains("kong.datadog3_com.latency:%d+|g|#T2:V2:V3,T4", gauges, true) end) end) diff --git a/spec/03-plugins/08-datadog/02-schema_spec.lua b/spec/03-plugins/08-datadog/02-schema_spec.lua new file mode 100644 index 00000000000..45e4d9a776e --- /dev/null +++ b/spec/03-plugins/08-datadog/02-schema_spec.lua @@ -0,0 +1,140 @@ +local schemas = require "kong.dao.schemas_validation" +local datadog_schema = require "kong.plugins.datadog.schema" +local validate_entity = schemas.validate_entity + +describe("Plugin: datadog (schema)", function() + it("accepts empty config #o", function() + local ok, err = validate_entity({}, datadog_schema) + assert.is_nil(err) + assert.True(ok) + end) + it("accepts empty metrics", function() + local metrics_input = {} + local ok, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.is_nil(err) + assert.True(ok) + end) + it("accepts just one metrics", function() + local metrics_input = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + tags = {"K1:V1"} + } + } + local ok, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.is_nil(err) + assert.True(ok) + end) + it("rejects if name or stat not defined", function() + local metrics_input = { + { + name = "request_count", + sample_rate = 1 + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("name and stat_type must be defined for all stats", err.metrics) + local metrics_input = { + { + stat_type = "counter", + sample_rate = 1 + } + } + _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("name and stat_type must be defined for all stats", err.metrics) + end) + it("rejects counters without sample rate", function() + local metrics_input = { + { + name = "request_count", + stat_type = "counter", + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + end) + it("rejects invalid metrics name", function() + local metrics_input = { + { + name = "invalid_name", + stat_type = "counter", + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("unrecognized metric name: invalid_name", err.metrics) + end) + it("rejects invalid stat type", function() + local metrics_input = { + { + name = "request_count", + stat_type = "invalid_stat", + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("unrecognized stat_type: invalid_stat", err.metrics) + end) + it("rejects if customer identifier missing", function() + local metrics_input = { + { + name = "status_count_per_user", + stat_type = "counter", + sample_rate = 1 + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("consumer_identifier must be defined for metric status_count_per_user", err.metrics) + end) + it("rejects if metric has wrong stat type", function() + local metrics_input = { + { + name = "unique_users", + stat_type = "counter" + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("unique_users metric only works with stat_type 'set'", err.metrics) + metrics_input = { + { + name = "status_count", + stat_type = "set", + sample_rate = 1 + } + } + _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("status_count metric only works with stat_type 'counter'", err.metrics) + end) + it("rejects if tags malformed", function() + local metrics_input = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {"T1:"} + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.not_nil(err) + assert.equal("malformed tags: key 'T1:' has no value. Tags must be list of key[:value]", err.metrics) + end) + it("accept if tags is aempty list", function() + local metrics_input = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + tags = {} + } + } + local _, err = validate_entity({ metrics = metrics_input}, datadog_schema) + assert.is_nil(err) + end) +end)