diff --git a/kong/plugins/apianalytics/handler.lua b/kong/plugins/apianalytics/handler.lua index 36f79f0e1cc..e59b90dc93e 100644 --- a/kong/plugins/apianalytics/handler.lua +++ b/kong/plugins/apianalytics/handler.lua @@ -9,12 +9,19 @@ local APIANALYTICS_SOCKET = { } local function send_batch(premature, conf, alf) + -- Abort the sending if the entries are empty, maybe it was triggered from the delayed + -- timer, but already sent because we reached the limit in a request later. + if table.getn(alf.har.log.entries) < 1 then + return + end + local message = alf:to_json_string(conf.service_token) + local ok, err local client = http:new() client:set_timeout(50000) -- 5 sec - local ok, err = client:connect(APIANALYTICS_SOCKET.host, APIANALYTICS_SOCKET.port) + ok, err = client:connect(APIANALYTICS_SOCKET.host, APIANALYTICS_SOCKET.port) if not ok then ngx.log(ngx.ERR, "[apianalytics] failed to connect to the socket: "..err) return @@ -27,7 +34,7 @@ local function send_batch(premature, conf, alf) -- close connection, or put it into the connection pool if res.headers["connection"] == "close" then - local ok, err = client:close() + ok, err = client:close() if not ok then ngx.log(ngx.ERR, "[apianalytics] failed to close: "..err) end @@ -100,11 +107,19 @@ function APIAnalyticsHandler:log(conf) -- Simply adding the entry to the ALF local n_entries = ngx.shared.apianalytics[api_id]:add_entry(ngx) - -- Batch size reached, let's send the data if n_entries >= conf.batch_size then + -- Batch size reached, let's send the data local ok, err = ngx.timer.at(0, send_batch, conf, ngx.shared.apianalytics[api_id]) if not ok then - ngx.log(ngx.ERR, "[apianalytics] failed to create timer: ", err) + ngx.log(ngx.ERR, "[apianalytics] failed to create batch sending timer: ", err) + end + else + -- Batch size not yet reached + -- Set a timer sending the data only in case nothing happens for awhile or if the batch_size is taking + -- too much time to reach the limit and trigger the flush. + local ok, err = ngx.timer.at(conf.delay, send_batch, conf, ngx.shared.apianalytics[api_id]) + if not ok then + ngx.log(ngx.ERR, "[apianalytics] failed to create delayed batch sending timer: ", err) end end end diff --git a/kong/plugins/apianalytics/schema.lua b/kong/plugins/apianalytics/schema.lua index c0d1b7c0bf2..444c12cc6d9 100644 --- a/kong/plugins/apianalytics/schema.lua +++ b/kong/plugins/apianalytics/schema.lua @@ -1,5 +1,6 @@ return { service_token = { type = "string", required = true }, batch_size = { type = "number", default = 100 }, - log_body = { type = "boolean", default = false } + log_body = { type = "boolean", default = false }, + delay = { type = "number", default = 10 } } diff --git a/kong/plugins/filelog/handler.lua b/kong/plugins/filelog/handler.lua index a1ab4dda6cc..4a950e5ef41 100644 --- a/kong/plugins/filelog/handler.lua +++ b/kong/plugins/filelog/handler.lua @@ -1,6 +1,5 @@ -local json = require "cjson" +local log = require "kong.plugins.filelog.log" local BasePlugin = require "kong.plugins.base_plugin" -local basic_serializer = require "kong.plugins.log_serializers.basic" local FileLogHandler = BasePlugin:extend() @@ -10,9 +9,7 @@ end function FileLogHandler:log(conf) FileLogHandler.super.log(self) - - local message = basic_serializer.serialize(ngx) - ngx.log(ngx.INFO, json.encode(message)) + log.execute(conf) end return FileLogHandler