Skip to content

Commit

Permalink
refactor(galileo) use lua-cjson empty_table_mt
Browse files Browse the repository at this point in the history
Now that openresty/lua-cjson#6 has been merged
and OpenResty has been bumped to 1.9.15.1, we can use more granular
empty array encoding for ALF serialization.
  • Loading branch information
thibaultcha committed Aug 6, 2016
1 parent 1d773b7 commit 04d8c4d
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 51 deletions.
7 changes: 4 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,12 @@ The main focus of this release is Kong's new CLI. With a simpler configuration f
- `request_host`: internationalized url support; utf-8 domain names through punycode support and paths through %-encoding. [#1300](https://github.com/Mashape/kong/issues/1300)
- Implements caching locks when fetching database configuration (APIs, Plugins...) to avoid dog pile effect on cold nodes. [#1402](https://github.com/Mashape/kong/pull/1402)
- Plugins:
- Bot Detection: Added a new plugin that protects APIs by detecting common bots and crawlers. [#1413](https://github.com/Mashape/kong/pull/1413)
- :fireworks: **New bot-detection plugin**: protect your APIs by detecting and rejecting common bots and crawlers. [#1413](https://github.com/Mashape/kong/pull/1413)
- correlation-id: new "tracker" generator, identifying requests per worker and connection. [#1288](https://github.com/Mashape/kong/pull/1288)
- request/response-transformer: ability to add strings including colon characters. [#1353](https://github.com/Mashape/kong/pull/1353)
- Rate Limiting: support for new rate-limiting policies (`cluster`, `local` and `redis`), and for a new `limit_by` property to force rate-limiting by `consumer`, `credential` or `ip`.
- Response Rate Limiting: support for new rate-limiting policies (`cluster`, `local` and `redis`), and for a new `limit_by` property to force rate-limiting by `consumer`, `credential` or `ip`.
- rate-limiting: support for new rate-limiting policies (`cluster`, `local` and `redis`), and for a new `limit_by` property to force rate-limiting by `consumer`, `credential` or `ip`.
- response-rate-limiting: support for new rate-limiting policies (`cluster`, `local` and `redis`), and for a new `limit_by` property to force rate-limiting by `consumer`, `credential` or `ip`.
- galileo: performance improvements of ALF serialization. ALFs are not discarded when exceeding 20MBs anymore. [#1463](https://github.com/Mashape/kong/issues/1463)

### Removed

Expand Down
41 changes: 3 additions & 38 deletions kong/plugins/galileo/alf.lua
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,6 @@
-- * bodyCaptured properties are determined using HTTP headers
-- * timings.blocked is ignored
-- * timings.connect is ignored
--
-- * we are waiting for lua-cjson support of granular 'empty table
-- as JSON arrays' to be released in OpenResty. Until then, we
-- have to use a workaround involving string substituion, which
-- is slower and limits the maximum size of ALFs we can deal with.
-- ALFs are thus limited to 20MB

local cjson = require "cjson.safe"
local resp_get_headers = ngx.resp.get_headers
Expand Down Expand Up @@ -61,18 +55,10 @@ function _M.new(log_bodies, server_addr)
return setmetatable(alf, _mt)
end

local _empty_arr_placeholder = "__empty_array_placeholder__"
local _empty_arr_t = {_empty_arr_placeholder}

-- Convert a table such as returned by ngx.*.get_headers()
-- to integer-indexed arrays.
-- @warn Encoding of empty arrays workaround forces us
-- to replace empty arrays with a placeholder to be substituted
-- at serialization time.
-- waiting on the releast of:
-- https://github.com/openresty/lua-cjson/pull/6
local function hash_to_array(t)
local arr = {}
local arr = setmetatable({}, cjson.empty_array_mt)
for k, v in pairs(t) do
if type(v) == "table" then
for i = 1, #v do
Expand All @@ -82,12 +68,7 @@ local function hash_to_array(t)
arr[#arr+1] = {name = k, value = v}
end
end

if #arr == 0 then
return _empty_arr_t
else
return arr
end
return arr
end

local function get_header(t, name, default)
Expand Down Expand Up @@ -228,10 +209,6 @@ local buf = {
}
}

local gsub = string.gsub
local pat = '"'.._empty_arr_placeholder..'"'
local _alf_max_size = 20 * 2^20

--- Encode the current ALF to JSON
-- @param[type=string] service_token The ALF `serviceToken`
-- @param[type=string] environment (optional) The ALF `environment`
Expand All @@ -249,19 +226,7 @@ function _M:serialize(service_token, environment)
buf.environment = environment
buf.har.log.entries = self.entries

-- tmp workaround for empty arrays
-- this prevents us from dealing with ALFs
-- larger than a few MBs at once
local encoded = cjson.encode(buf)

if #encoded > _alf_max_size then
return nil, "ALF too large (> 20MB)"
end

encoded = gsub(encoded, pat, "")
return gsub(encoded, "\\/", "/"), #self.entries

--return cjson.encode(buf)
return cjson.encode(buf)
end

--- Empty the ALF
Expand Down
14 changes: 4 additions & 10 deletions spec/03-plugins/05-galileo/01-alf_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -463,21 +463,15 @@ describe("ALF serializer", function()
_G.ngx.resp.get_headers = function()
return {}
end
_G.ngx.req.get_uri_args = function()
return {}
end
reload_alf_serializer()
local alf = alf_serializer.new()
assert(alf:add_entry(_ngx))
local json_encoded_alf = assert(alf:serialize("abcd"))
assert.matches('"headers":[]', json_encoded_alf, nil, true)
end)
-- @TODO: get rid of once lua-cjson gets a release including
-- granular 'empty table as arrays'
it("limits ALF sizes to 20MB", function()
local alf = alf_serializer.new(true)
local body_12mb = string.rep(".", 21 * 2^20)
assert(alf:add_entry(_ngx, body_12mb))
local json_encoded_alf, err = alf:serialize("abcd")
assert.equal("ALF too large (> 20MB)", err)
assert.is_nil(json_encoded_alf)
assert.matches('"queryString":[]', json_encoded_alf, nil, true)
end)
it("handles nil environment", function()
local alf = alf_serializer.new()
Expand Down

0 comments on commit 04d8c4d

Please sign in to comment.