From 0354778de73400dbe1dcac2b85de4cd006bd0495 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Fri, 31 Mar 2023 14:22:37 +0300 Subject: [PATCH] fix(proxy): fix request buffering for HTTP/2.0 (#10595) ### Summary Turning request/response buffering off for HTTP/2.0 is not possible. There is a check that causes buffering only to be controllable for HTTP/1.1. This was probably done because of an issue in nginx, which was fixed in version 1.9.14 (http://nginx.org/en/docs/http/ngx_http_v2_module.html): > Before version 1.9.14, buffering of a client request body could not be > disabled regardless of [proxy_request_buffering](http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_request_buffering), > [fastcgi_request_buffering](http://nginx.org/en/docs/http/ngx_http_fastcgi_module.html#fastcgi_request_buffering), > [uwsgi_request_buffering](http://nginx.org/en/docs/http/ngx_http_uwsgi_module.html#uwsgi_request_buffering), and > [scgi_request_buffering](http://nginx.org/en/docs/http/ngx_http_scgi_module.html#scgi_request_buffering) directive values. Kong now has Nginx > 1.9.14, so the check is not needed any more. The work was done by @PidgeyBE, thank you very much! ### Issues Resolved Fix #7418 Close #10204 Signed-off-by: Aapo Talvensaari Co-authored-by: PidgeyBE --- .devcontainer/Dockerfile | 3 +- .devcontainer/docker-compose.yml | 8 +- .github/workflows/build_and_test.yml | 2 +- CHANGELOG.md | 4 + Makefile | 6 +- kong/runloop/handler.lua | 18 +- .../05-proxy/27-unbuffered_spec.lua | 226 +++++++++++++++--- 7 files changed, 219 insertions(+), 48 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 92016901500b..4a1cc8a67812 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -9,4 +9,5 @@ RUN apt-get install -y \ unzip \ git \ m4 \ - libyaml-dev + libyaml-dev \ + libcurl4-openssl-dev diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 0b69da3395dc..48e238d3f9ca 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -19,8 +19,8 @@ services: - ..:/workspace:cached # Uncomment the next line to use Docker from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker-compose for details. - - /var/run/docker.sock:/var/run/docker.sock - + - /var/run/docker.sock:/var/run/docker.sock + # Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust. cap_add: - SYS_PTRACE @@ -37,12 +37,12 @@ services: CRYPTO_DIR: /usr/local/kong # Overrides default command so things don't shut down after the process ends. - command: /bin/sh -c "while sleep 1000; do :; done" + command: /bin/sh -c "while sleep 1000; do :; done" # Runs app on the same network as the service container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db - # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. + # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. # (Adding the "ports" property to this file will not forward from a Codespace.) # Uncomment the next line to use a non-root user for all processes - See https://aka.ms/vscode-remote/containers/non-root for details. diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index f027dd5a7449..a2f133b82cad 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -43,7 +43,7 @@ jobs: - name: Install packages if: steps.cache-deps.outputs.cache-hit != 'true' - run: sudo apt update && sudo apt install libyaml-dev valgrind libprotobuf-dev + run: sudo apt update && sudo apt install libyaml-dev valgrind libprotobuf-dev libcurl4-openssl-dev - name: Build Kong if: steps.cache-deps.outputs.cache-hit != 'true' diff --git a/CHANGELOG.md b/CHANGELOG.md index bc08272fa396..625aaa8e642f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -100,6 +100,10 @@ [#10389](https://github.com/Kong/kong/pull/10389) - Support for configurable Node IDs [#10385](https://github.com/Kong/kong/pull/10385) +- Request and response buffering options are now enabled for incoming HTTP 2.0 requests too. + Thanks [@PidgeyBE](https://github.com/PidgeyBE) for contributing this change. + [#10595](https://github.com/Kong/kong/pull/10595) + [#10204](https://github.com/Kong/kong/pull/10204) #### Admin API diff --git a/Makefile b/Makefile index 45e0059940c6..28b01e265271 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ OS := $(shell uname | awk '{print tolower($$0)}') MACHINE := $(shell uname -m) -DEV_ROCKS = "busted 2.1.2" "busted-htest 1.0.0" "luacheck 1.1.0" "lua-llthreads2 0.1.6" "http 0.4" "ldoc 1.4.6" +DEV_ROCKS = "busted 2.1.2" "busted-htest 1.0.0" "luacheck 1.1.0" "lua-llthreads2 0.1.6" "http 0.4" "ldoc 1.4.6" "Lua-cURL 0.3.13" WIN_SCRIPTS = "bin/busted" "bin/kong" "bin/kong-health" BUSTED_ARGS ?= -v TEST_CMD ?= bin/busted $(BUSTED_ARGS) @@ -12,10 +12,12 @@ ifeq ($(OS), darwin) OPENSSL_DIR ?= $(shell brew --prefix)/opt/openssl GRPCURL_OS ?= osx YAML_DIR ?= $(shell brew --prefix)/opt/libyaml +CURL_INCDIR ?= $(shell brew --prefix)/opt/curl/include else OPENSSL_DIR ?= /usr GRPCURL_OS ?= $(OS) YAML_DIR ?= /usr +CURL_INCDIR ?= /usr/include/x86_64-linux-gnu endif ifeq ($(MACHINE), aarch64) @@ -77,7 +79,7 @@ install-dev-rocks: build-venv else \ echo $$rock not found, installing via luarocks... ; \ LIBRARY_PREFIX=$$(pwd)/bazel-bin/build/$(BUILD_NAME)/kong ; \ - luarocks install $$rock OPENSSL_DIR=$$LIBRARY_PREFIX CRYPTO_DIR=$$LIBRARY_PREFIX YAML_DIR=$(YAML_DIR) || exit 1; \ + luarocks install $$rock OPENSSL_DIR=$$LIBRARY_PREFIX CRYPTO_DIR=$$LIBRARY_PREFIX YAML_DIR=$(YAML_DIR) CURL_INCDIR=$(CURL_INCDIR) || exit 1; \ fi \ done; diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 3a7a94345091..f7a8585d9e75 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -1280,18 +1280,16 @@ return { return exec("@grpc") end - if protocol_version == 1.1 then - if route.request_buffering == false then - if route.response_buffering == false then - return exec("@unbuffered") - end - - return exec("@unbuffered_request") - end - + if route.request_buffering == false then if route.response_buffering == false then - return exec("@unbuffered_response") + return exec("@unbuffered") end + + return exec("@unbuffered_request") + end + + if route.response_buffering == false then + return exec("@unbuffered_response") end end end, diff --git a/spec/02-integration/05-proxy/27-unbuffered_spec.lua b/spec/02-integration/05-proxy/27-unbuffered_spec.lua index 5a32aca49112..9bfe040b3f78 100644 --- a/spec/02-integration/05-proxy/27-unbuffered_spec.lua +++ b/spec/02-integration/05-proxy/27-unbuffered_spec.lua @@ -1,10 +1,11 @@ local helpers = require "spec.helpers" local random = require "resty.random" local rstring = require "resty.string" +local curl = require("cURL") -- HTTP 1.1 Chunked Body (5 MB) -local function body() +local function chunked_random_body() local chunk = "2000" .."\r\n" .. rstring.to_hex(random.bytes(4096)) .. "\r\n" local i = 0 return function() @@ -23,6 +24,55 @@ local function body() end +--- Curl HTTP Body (5 MB) +local function ramdom_body() + return rstring.to_hex(random.bytes(5*1024*1024/2)) +end + + +local HTTP_VERSIONS = { + CURL_HTTP_VERSION_NONE = 0, + CURL_HTTP_VERSION_1_0 = 1, + CURL_HTTP_VERSION_1_1 = 2, + CURL_HTTP_VERSION_2_0 = 3, + CURL_HTTP_VERSION_2TLS = 4, + CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE = 5 +} + + +local function curl_post(url, body, http_version) + http_version = http_version or HTTP_VERSIONS["CURL_HTTP_VERSION_2_0"] + + local c = curl.easy{ + url = url, + ssl_verifypeer = false, + ssl_verifyhost = false, + post = true, + postfields = body, + --[curl.OPT_VERBOSE] = true, + [curl.OPT_HEADER] = true, + [curl.OPT_HTTP_VERSION] = http_version, + } + local response = {} + c:setopt_writefunction(table.insert, response) + c:perform() + + local status = c:getinfo_response_code() + local full_response = table.concat(response) + local raw_headers = string.sub(full_response, 1, c:getinfo(curl.INFO_HEADER_SIZE)) + local return_body = string.sub(full_response, c:getinfo(curl.INFO_HEADER_SIZE)) + + --parse headers + local return_headers = {} + for header in string.gmatch(raw_headers, "[%w%-]+:[^\n]+") do + local index = string.find(header, ":") + return_headers[string.lower(string.sub(header, 1, index-1))] = string.sub(header, index+2) + end + + return status, return_headers, return_body +end + + for _, strategy in helpers.each_strategy() do describe("HTTP 1.1 Chunked [#" .. strategy .. "]", function() local proxy_client @@ -72,7 +122,6 @@ for _, strategy in helpers.each_strategy() do database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", }) - end) lazy_teardown(function() @@ -101,7 +150,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:send({ method = "POST", path = "/buffered/post", - body = body(), + body = chunked_random_body(), headers = { ["Transfer-Encoding"] = "chunked", ["Content-Type"] = "application/octet-stream", @@ -121,7 +170,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:send({ method = "POST", path = "/unbuffered/post", - body = body(), + body = chunked_random_body(), headers = { ["Transfer-Encoding"] = "chunked", ["Content-Type"] = "application/octet-stream", @@ -141,7 +190,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:send({ method = "POST", path = "/unbuffered-request/post", - body = body(), + body = chunked_random_body(), headers = { ["Transfer-Encoding"] = "chunked", ["Content-Type"] = "application/octet-stream", @@ -161,7 +210,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:send({ method = "POST", path = "/unbuffered-response/post", - body = body(), + body = chunked_random_body(), headers = { ["Transfer-Encoding"] = "chunked", ["Content-Type"] = "application/octet-stream", @@ -200,18 +249,12 @@ for _, strategy in helpers.each_strategy() do it("is calculated for buffered", function() warmup_client:get("/buffered/stream/1") - local res = proxy_client:get("/buffered/stream/1000") - assert.equal(200, res.status) - local reader = res.body_reader - repeat local chunk, err = reader(8192 * 640) - assert.equal(nil, err) - if chunk then buffered_chunks = buffered_chunks + 1 end @@ -220,18 +263,12 @@ for _, strategy in helpers.each_strategy() do it("is calculated for unbuffered", function() warmup_client:get("/unbuffered/stream/1") - local res = proxy_client:get("/unbuffered/stream/1000") - assert.equal(200, res.status) - local reader = res.body_reader - repeat local chunk, err = reader(8192 * 640) - assert.equal(nil, err) - if chunk then unbuffered_chunks = unbuffered_chunks + 1 end @@ -240,18 +277,12 @@ for _, strategy in helpers.each_strategy() do it("is calculated for unbuffered request", function() warmup_client:get("/unbuffered-request/stream/1") - local res = proxy_client:get("/unbuffered-request/stream/1000") - assert.equal(200, res.status) - local reader = res.body_reader - repeat local chunk, err = reader(8192 * 640) - assert.equal(nil, err) - if chunk then unbuffered_request_chunks = unbuffered_request_chunks + 1 end @@ -260,18 +291,12 @@ for _, strategy in helpers.each_strategy() do it("is calculated for unbuffered response", function() warmup_client:get("/unbuffered-response/stream/1") - local res = proxy_client:get("/unbuffered-response/stream/1000") - assert.equal(200, res.status) - local reader = res.body_reader - repeat local chunk, err = reader(8192 * 640) - assert.equal(nil, err) - if chunk then unbuffered_response_chunks = unbuffered_response_chunks + 1 end @@ -296,3 +321,144 @@ for _, strategy in helpers.each_strategy() do end) end) end + + +for _, version in pairs({ "CURL_HTTP_VERSION_1_1", "CURL_HTTP_VERSION_2_0" }) do + local http_version = HTTP_VERSIONS[version] + + for _, strategy in helpers.each_strategy() do + describe("HTTP #" .. version .. " buffered requests/response [#" .. strategy .. "]", function() + local warmup_client + local base_url + local proxy_ip + local proxy_port + + lazy_setup(function() + local bp = helpers.get_db_utils(strategy, { + "routes", + "services" + }) + + local service = bp.services:insert() + + bp.routes:insert({ + protocols = { "http", "https" }, + paths = { "/buffered" }, + request_buffering = true, + response_buffering = true, + service = service, + }) + + bp.routes:insert({ + protocols = { "http", "https" }, + paths = { "/unbuffered" }, + request_buffering = false, + response_buffering = false, + service = service, + }) + + bp.routes:insert({ + protocols = { "http", "https" }, + paths = { "/unbuffered-request" }, + request_buffering = false, + response_buffering = true, + service = service, + }) + + bp.routes:insert({ + protocols = { "http", "https" }, + paths = { "/unbuffered-response" }, + request_buffering = true, + response_buffering = false, + service = service, + }) + + assert(helpers.start_kong { + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + }) + + end) + + lazy_teardown(function() + warmup_client:close() + helpers.stop_kong() + end) + + before_each(function() + proxy_ip = helpers.get_proxy_ip(true, true) + proxy_port = helpers.get_proxy_port(true, true) + warmup_client = helpers.proxy_client() + base_url = "https://" .. proxy_ip .. ":" .. proxy_port + end) + + describe("request latency", function() + local buffered_latency + local unbuffered_latency + local unbuffered_request_latency + local unbuffered_response_latency + local status, headers, _ + + it("is calculated for buffered", function() + warmup_client:post("/buffered/post", { body = "warmup" }) + status, headers, _ = curl_post( + base_url .. "/buffered/post", ramdom_body(), + http_version + ) + assert.equal(200, status) + buffered_latency = tonumber(headers["x-kong-proxy-latency"]) + assert.is_number(buffered_latency) + end) + + it("is calculated for unbuffered", function() + warmup_client:post("/unbuffered/post", { body = "warmup" }) + status, headers, _ = curl_post( + base_url .. "/unbuffered/post", ramdom_body(), + http_version + ) + assert.equal(200, status) + unbuffered_latency = tonumber(headers["x-kong-proxy-latency"]) + assert.is_number(unbuffered_latency) + end) + + it("is calculated for unbuffered request", function() + warmup_client:post("/unbuffered-request/post", { body = "warmup" }) + status, headers, _ = curl_post( + base_url .. "/unbuffered-request/post", ramdom_body(), + http_version + ) + assert.equal(200, status) + unbuffered_request_latency = tonumber(headers["x-kong-proxy-latency"]) + assert.is_number(unbuffered_request_latency) + end) + + it("is calculated for unbuffered response", function() + warmup_client:post("/unbuffered-response/post", { body = "warmup" }) + status, headers, _ = curl_post( + base_url .. "/unbuffered-response/post", ramdom_body(), + http_version + ) + assert.equal(200, status) + unbuffered_response_latency = tonumber(headers["x-kong-proxy-latency"]) + assert.is_number(unbuffered_response_latency) + end) + + it("is greater for buffered than unbuffered", function() + assert.equal(true, buffered_latency > unbuffered_latency) + end) + + it("is greater for buffered than unbuffered request", function() + assert.equal(true, buffered_latency > unbuffered_request_latency) + end) + + it("is greater for unbuffered response than unbuffered", function() + assert.equal(true, unbuffered_response_latency > unbuffered_latency) + end) + + it("is greater for unbuffered response than unbuffered request", function() + assert.equal(true, unbuffered_response_latency > unbuffered_request_latency) + end) + end) + end) + end +end