Skip to content

Commit

Permalink
feat(datadog) new metrics and more flexible configuration
Browse files Browse the repository at this point in the history
- Now each metric supports configurable stat type,
  sample rate and customer identifier if applicable.
- Custom prefix for stats's name
- Custom tags for Datadog.
- New metrics `upstream_latency`, `kong_latency` and `status_count_per_user`.
- Code style format
  • Loading branch information
Shashi Ranjan committed Jun 22, 2017
1 parent 04e5127 commit 44b446c
Show file tree
Hide file tree
Showing 8 changed files with 799 additions and 161 deletions.
2 changes: 2 additions & 0 deletions kong-0.10.3-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,8 @@ build = {
["kong.plugins.loggly.handler"] = "kong/plugins/loggly/handler.lua",
["kong.plugins.loggly.schema"] = "kong/plugins/loggly/schema.lua",

["kong.plugins.datadog.migrations.cassandra"] = "kong/plugins/datadog/migrations/cassandra.lua",
["kong.plugins.datadog.migrations.postgres"] = "kong/plugins/datadog/migrations/postgres.lua",
["kong.plugins.datadog.handler"] = "kong/plugins/datadog/handler.lua",
["kong.plugins.datadog.schema"] = "kong/plugins/datadog/schema.lua",
["kong.plugins.datadog.statsd_logger"] = "kong/plugins/datadog/statsd_logger.lua",
Expand Down
148 changes: 104 additions & 44 deletions kong/plugins/datadog/handler.lua
Original file line number Diff line number Diff line change
@@ -1,76 +1,136 @@
local BasePlugin = require "kong.plugins.base_plugin"
local BasePlugin = require "kong.plugins.base_plugin"
local basic_serializer = require "kong.plugins.log-serializers.basic"
local statsd_logger = require "kong.plugins.datadog.statsd_logger"
local statsd_logger = require "kong.plugins.datadog.statsd_logger"

local DatadogHandler = BasePlugin:extend()

local ngx_log = ngx.log
local ngx_timer_at = ngx.timer.at
local string_gsub = string.gsub
local pairs = pairs
local string_format = string.format
local NGX_ERR = ngx.ERR


local DatadogHandler = BasePlugin:extend()
DatadogHandler.PRIORITY = 1

local ngx_timer_at = ngx.timer.at
local string_gsub = string.gsub
local ipairs = ipairs

local gauges = {
request_size = function (api_name, message, logger, tags)
local stat = api_name .. ".request.size"
logger:gauge(stat, message.request.size, 1, tags)
local get_consumer_id = {
consumer_id = function(consumer)
return consumer and string_gsub(consumer.id, "-", "_")
end,
response_size = function (api_name, message, logger, tags)
local stat = api_name .. ".response.size"
logger:gauge(stat, message.response.size, 1, tags)
custom_id = function(consumer)
return consumer and consumer.custom_id
end,
status_count = function (api_name, message, logger, tags)
local stat = api_name .. ".request.status." .. message.response.status
logger:counter(stat, 1, 1, tags)
end,
latency = function (api_name, message, logger, tags)
local stat = api_name .. ".latency"
logger:gauge(stat, message.latencies.request, 1, tags)
username = function(consumer)
return consumer and consumer.username
end
}


local metrics = {
status_count = function (api_name, message, metric_config, logger)
local fmt = string_format("%s.request.status", api_name,
message.response.status)

logger:send_statsd(string_format("%s.%s", fmt, message.response.status),
1, logger.stat_types.counter,
metric_config.sample_rate, metric_config.tags)

logger:send_statsd(string_format("%s.%s", fmt, "total"), 1,
logger.stat_types.counter,
metric_config.sample_rate, metric_config.tags)
end,
request_count = function (api_name, message, logger, tags)
local stat = api_name .. ".request.count"
logger:counter(stat, 1, 1, tags)
unique_users = function (api_name, message, metric_config, logger)
local get_consumer_id = get_consumer_id[metric_config.consumer_identifier]
local consumer_id = get_consumer_id(message.consumer)

if consumer_id then
local stat = string_format("%s.user.uniques", api_name)

logger:send_statsd(stat, consumer_id, logger.stat_types.set,
nil, metric_config.tags)
end
end,
unique_users = function (api_name, message, logger, tags)
if message.authenticated_entity ~= nil and message.authenticated_entity.consumer_id ~= nil then
local stat = api_name .. ".user.uniques"
logger:set(stat, message.authenticated_entity.consumer_id, tags)
request_per_user = function (api_name, message, metric_config, logger)
local get_consumer_id = get_consumer_id[metric_config.consumer_identifier]
local consumer_id = get_consumer_id(message.consumer)

if consumer_id then
local stat = string_format("%s.user.%s.request.count", api_name, consumer_id)

logger:send_statsd(stat, 1, logger.stat_types.counter,
metric_config.sample_rate, metric_config.tags)
end
end,
request_per_user = function (api_name, message, logger, tags)
if message.authenticated_entity ~= nil and message.authenticated_entity.consumer_id ~= nil then
local stat = api_name .. "." .. string_gsub(message.authenticated_entity.consumer_id, "-", "_") .. ".request.count"
logger:counter(stat, 1, 1, tags)
status_count_per_user = function (api_name, message, metric_config, logger)
local get_consumer_id = get_consumer_id[metric_config.consumer_identifier]
local consumer_id = get_consumer_id(message.consumer)

if consumer_id then
local fmt = string_format("%s.user.%s.request.status", api_name, consumer_id)

logger:send_statsd(string_format("%s.%s", fmt, message.response.status),
1, logger.stat_types.counter,
metric_config.sample_rate, metric_config.tags)

logger:send_statsd(string_format("%s.%s", fmt, "total"),
1, logger.stat_types.counter,
metric_config.sample_rate, metric_config.tags)
end
end,
upstream_latency = function (api_name, message, logger, tags)
local stat = api_name .. ".upstream_latency"
logger:gauge(stat, message.latencies.proxy, 1, tags)
end
}


local function log(premature, conf, message)
if premature then
return
end

local api_name = string_gsub(message.api.name, "%.", "_")
local stat_name = {
request_size = api_name .. ".request.size",
response_size = api_name .. ".response.size",
latency = api_name .. ".latency",
upstream_latency = api_name .. ".upstream_latency",
kong_latency = api_name .. ".kong_latency",
request_count = api_name .. ".request.count",
}
local stat_value = {
request_size = message.request.size,
response_size = message.response.size,
latency = message.latencies.request,
upstream_latency = message.latencies.proxy,
kong_latency = message.latencies.kong,
request_count = 1,
}

local logger, err = statsd_logger:new(conf)
if err then
ngx.log(ngx.ERR, "failed to create Statsd logger: ", err)
ngx_log(NGX_ERR, "failed to create Statsd logger: ", err)
return
end

local api_name = string_gsub(message.api.name, "%.", "_")
for _, metric in ipairs(conf.metrics) do
local gauge = gauges[metric]
if gauge then

gauge(api_name, message, logger, conf.tags[metric])

for _, metric_config in pairs(conf.metrics) do
local metric = metrics[metric_config.name]

if metric then
metric(api_name, message, metric_config, logger)

else
local stat_name = stat_name[metric_config.name]
local stat_value = stat_value[metric_config.name]

logger:send_statsd(stat_name, stat_value,
logger.stat_types[metric_config.stat_type],
metric_config.sample_rate, metric_config.tags)
end
end

logger:close_socket()
end


function DatadogHandler:new()
DatadogHandler.super.new(self, "datadog")
end
Expand All @@ -81,7 +141,7 @@ function DatadogHandler:log(conf)

local ok, err = ngx_timer_at(0, log, conf, message)
if not ok then
ngx.log(ngx.ERR, "failed to create timer: ", err)
ngx_log(NGX_ERR, "failed to create timer: ", err)
end
end

Expand Down
91 changes: 91 additions & 0 deletions kong/plugins/datadog/migrations/cassandra.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
return {
{
name = "2017-06-09-160000_datadog_schema_changes",
up = function(_, _, dao)

local plugins, err = dao.plugins:find_all { name = "datadog" }
if err then
return err
end

local default_metrics = {
request_count = {
name = "request_count",
stat_type = "counter",
sample_rate = 1,
},
latency = {
name = "latency",
stat_type = "gauge",
sample_rate = 1,
},
request_size = {
name = "request_size",
stat_type = "gauge",
sample_rate = 1,
},
status_count = {
name = "status_count",
stat_type = "counter",
sample_rate = 1,
},
response_size = {
name = "response_size",
stat_type = "timer",
},
unique_users = {
name = "unique_users",
stat_type = "set",
consumer_identifier = "consumer_id",
},
request_per_user = {
name = "request_per_user",
stat_type = "counter",
sample_rate = 1,
consumer_identifier = "consumer_id",
},
upstream_latency = {
name = "upstream_latency",
stat_type = "gauge",
sample_rate = 1,
},
}

for i = 1, #plugins do
local datadog = plugins[i]
local _, err = dao.plugins:delete(datadog)
if err then
return err
end

local tags = datadog.config.tags or {}
local new_metrics = {}
if datadog.config.metrics then
for _, metric in ipairs(datadog.config.metrics) do
local new_metric = default_metrics[metric]
if new_metric then
new_metric.tags = tags[metric]
table.insert(new_metrics, new_metric)
end
end
end

local _, err = dao.plugins:insert {
name = "datadog",
api_id = datadog.api_id,
enabled = datadog.enabled,
config = {
host = datadog.config.host,
port = datadog.config.port,
metrics = new_metrics,
prefix = "kong",
}
}

if err then
return err
end
end
end
}
}
93 changes: 93 additions & 0 deletions kong/plugins/datadog/migrations/postgres.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
return {
{
name = "2017-06-09-160000_datadog_schema_changes",
up = function(_, _, dao)

local plugins, err = dao.plugins:find_all { name = "datadog" }
if err then
return err
end

local default_metrics = {
request_count = {
name = "request_count",
stat_type = "counter",
sample_rate = 1,
},
latency = {
name = "latency",
stat_type = "gauge",
sample_rate = 1,
},
request_size = {
name = "request_size",
stat_type = "gauge",
sample_rate = 1,
},
status_count = {
name = "status_count",
stat_type = "counter",
sample_rate = 1,
},
response_size = {
name = "response_size",
stat_type = "timer",
},
unique_users = {
name = "unique_users",
stat_type = "set",
consumer_identifier = "consumer_id",
},
request_per_user = {
name = "request_per_user",
stat_type = "counter",
sample_rate = 1,
consumer_identifier = "consumer_id",
},
upstream_latency = {
name = "upstream_latency",
stat_type = "gauge",
sample_rate = 1,
},
}

for i = 1, #plugins do
local datadog = plugins[i]
local _, err = dao.plugins:delete(datadog)
if err then
return err
end

local tags = datadog.config.tags or {}
local new_metrics = {}
if datadog.config.metrics then
for _, metric in ipairs(datadog.config.metrics) do
local new_metric = default_metrics[metric]
if new_metric then
new_metric.tags = tags[metric]
table.insert(new_metrics, new_metric)
end
end
end

local _, err = dao.plugins:insert {
name = "datadog",
api_id = datadog.api_id,
enabled = datadog.enabled,
config = {
host = datadog.config.host,
port = datadog.config.port,
metrics = new_metrics,
prefix = "kong",
}
}

if err then
return err
end
end
end
},
down = function()
end,
}
Loading

0 comments on commit 44b446c

Please sign in to comment.