From a0d1743f41704eb5ca093910898e54a3a886b2ff Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 20:57:39 +0800 Subject: [PATCH 01/96] feat: mvp --- apisix/init.lua | 170 ++++++++++++++++++++++++++++++ apisix/plugins/kafka-consumer.lua | 73 +++++++++++++ apisix/pubsub.proto | 54 ++++++++++ apisix/schema_def.lua | 3 +- 4 files changed, 299 insertions(+), 1 deletion(-) create mode 100644 apisix/plugins/kafka-consumer.lua create mode 100644 apisix/pubsub.proto diff --git a/apisix/init.lua b/apisix/init.lua index 429d449262b7..4ce9a2d7dca1 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -40,8 +40,13 @@ local apisix_ssl = require("apisix.ssl") local upstream_util = require("apisix.utils.upstream") local xrpc = require("apisix.stream.xrpc") local ctxdump = require("resty.ctxdump") +local ws_server = require("resty.websocket.server") local ngx_balancer = require("ngx.balancer") local debug = require("apisix.debug") +local kafka_bconsumer = require("resty.kafka.basic-consumer") +local protoc = require("protoc") +local pb = require("pb") +local ffi = require("ffi") local ngx = ngx local get_method = ngx.req.get_method local ngx_exit = ngx.exit @@ -54,6 +59,8 @@ local str_byte = string.byte local str_sub = string.sub local tonumber = tonumber local pairs = pairs +local C = ffi.C +local ffi_new = ffi.new local control_api_router local is_http = false @@ -267,6 +274,160 @@ local function common_phase(phase_name) return plugin.run_plugin(phase_name, nil, api_ctx) end +protoc.reload() +local pubsub_protoc = protoc.new() +pb.option("int64_as_string") +ffi.cdef[[ + int64_t atoll(const char *num); +]] +local function kafka_access_phase(api_ctx) + local ws, err = ws_server:new() + if not ws then + ngx.log(ngx.ERR, "failed to new websocket: ", err) + return ngx.exit(444) + end + + local up_nodes = api_ctx.matched_upstream.nodes + + -- kafka client broker-related configuration + local broker_list = {} + for i, node in ipairs(up_nodes) do + broker_list[i] = { + host = node.host, + port = node.port, + } + + if api_ctx.kafka_consumer_enable_sasl then + broker_list[i].sasl_config = { + mechanism = "PLAIN", + user = api_ctx.kafka_consumer_sasl_username, + password = api_ctx.kafka_consumer_sasl_password, + } + end + end + + -- kafka client socket-related configuration + local client_config = { + ssl = api_ctx.kafka_consumer_enable_tls, + ssl_verify = api_ctx.kafka_consumer_ssl_verify, + refresh_interval = 30 * 60 * 1000 + } + + -- load and create the consumer instance when it is determined + -- that the websocket connection was created successfully + local c = kafka_bconsumer:new(broker_list, client_config) + + -- compile the protobuf file on initial connection + -- ensure that each worker is loaded once + if not pubsub_protoc.loaded["pubsub.proto"] then + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + pubsub_protoc:reset() + ngx.log(ngx.ERR, "failed to load pubsub protocol, err:", err) + return ngx.exit(444) + end + end + + while true do + -- read raw data frames from websocket connection + local raw_data, raw_type, err = ws:recv_frame() + + if err then + ws:send_close() + core.log.error("failed to recieve frame from kafka client, err: ", err) + return + end + + -- handle client close connection + if raw_type == "close" then + ws:send_close() + core.log.info("kafka client close connection, status code: ", err) + return + end + + -- decode req + if raw_type ~= "binary" then + ws:send_close() + core.log.error("recieve error type message from kafka client, err: ", err) + return + end + + local data = pb.decode("PubSubReq", raw_data) + local sequence = data.sequence + + -- list offset command + if data.cmd_kafka_list_offset then + local params = data.cmd_kafka_list_offset + local timestamp = type(params.timestamp) == "string" and + C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp + + local offset, err = c:list_offset(params.topic, params.partition, timestamp) + if not offset then + ws:send_binary(pb.encode("PubSubResp", { + sequence = sequence, + error_resp = { + code = 0, + message = "failed to list offset, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err, + } + })) + goto continue + end + + offset = tostring(offset) + ws:send_binary(pb.encode("PubSubResp", { + sequence = sequence, + kafka_list_offset_resp = { + offset = str_sub(offset, 1, #offset - 2) + } + })) + goto continue + end + + if data.cmd_kafka_fetch then + ngx.log(ngx.ERR, core.json.encode(data.cmd_kafka_fetch)) + local params = data.cmd_kafka_fetch + local offset = type(params.offset) == "string" and + C.atoll(str_sub(params.offset, 2, #params.offset)) or params.offset + + local ret, err = c:fetch(params.topic, params.partition, offset) + if not ret then + ws:send_binary(pb.encode("PubSubResp", { + sequence = sequence, + error_resp = { + code = 0, + message = "failed to fetch message, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err, + } + })) + goto continue + end + + -- split into multiple messages when the amount of data in + -- a single batch is too large + local messages = ret.records + + -- special handling of int64 for luajit compatibility + for _, message in ipairs(messages) do + local timestamp = tostring(message.timestamp) + message.timestamp = str_sub(timestamp, 1, #timestamp - 2) + local offset = tostring(message.offset) + message.offset = str_sub(offset, 1, #offset - 2) + end + + ws:send_binary(pb.encode("PubSubResp", { + sequence = sequence, + kafka_fetch_resp = { + messages = messages, + }, + })) + end + + ::continue:: + end +end + function _M.http_access_phase() local ngx_ctx = ngx.ctx @@ -440,6 +601,15 @@ function _M.http_access_phase() api_ctx.upstream_scheme = "grpc" end + -- load balancer is not required by kafka upstream + if api_ctx.matched_upstream.scheme == "kafka" then + if not api_ctx.kafka_consumer_enabled then + core.log.error("need to configure the kafka-consumer plugin for kafka upstream") + return core.response.exit(501) + end + return kafka_access_phase(api_ctx) + end + local code, err = set_upstream(route, api_ctx) if code then core.log.error("failed to set upstream: ", err) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua new file mode 100644 index 000000000000..50cfc02de0a9 --- /dev/null +++ b/apisix/plugins/kafka-consumer.lua @@ -0,0 +1,73 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx_var = ngx.var + + +local schema = { + type = "object", + properties = { + enable_tls = { + type = "boolean", + default = false, + }, + ssl_verify = { + type = "boolean", + default = true, + }, + enable_sasl = { + type = "boolean", + default = false, + }, + sasl_username = { + type = "string", + default = "", + }, + sasl_password = { + type = "string", + default = "", + }, + }, +} + + +local _M = { + version = 0.1, + priority = 508, + name = "kafka-consumer", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + ctx.kafka_consumer_enabled = true + + -- write kafka-related configuration + ctx.kafka_consumer_enable_tls = conf.enable_tls + ctx.kafka_consumer_ssl_verify = conf.ssl_verify + ctx.kafka_consumer_enable_sasl = conf.enable_sasl + ctx.kafka_consumer_sasl_username = conf.sasl_username + ctx.kafka_consumer_sasl_password = conf.sasl_password +end + + +return _M \ No newline at end of file diff --git a/apisix/pubsub.proto b/apisix/pubsub.proto new file mode 100644 index 000000000000..4469b6d17785 --- /dev/null +++ b/apisix/pubsub.proto @@ -0,0 +1,54 @@ +syntax = "proto3"; + +option go_package = "pubsub"; + +// request +message CmdKafkaListOffset { + string topic = 1; + int32 partition = 2; + int64 timestamp = 3; +} + +message CmdKafkaFetch { + string topic = 1; + int32 partition = 2; + int64 offset = 3; +} + +message PubSubReq { + int64 sequence = 1; + oneof req { + CmdKafkaFetch cmd_kafka_fetch = 31; + CmdKafkaListOffset cmd_kafka_list_offset = 32; + }; +} + +// response +message KafkaMessage { + int64 offset = 1; + int64 timestamp = 2; + bytes key = 3; + bytes value = 4; +} + +message ErrorResp { + int32 code = 1; + string message = 2; +} + +message KafkaFetchResp { + repeated KafkaMessage messages = 1; +} + +message KafkaListOffsetResp { + int64 offset = 1; +} + +message PubSubResp { + int64 sequence = 1; + oneof resp { + ErrorResp error_resp = 31; + KafkaFetchResp kafka_fetch_resp = 32; + KafkaListOffsetResp kafka_list_offset_resp = 33; + }; +} \ No newline at end of file diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 96cc11eaa715..1a1e52680a8b 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -451,7 +451,7 @@ local upstream_schema = { }, scheme = { default = "http", - enum = {"grpc", "grpcs", "http", "https", "tcp", "tls", "udp"}, + enum = {"grpc", "grpcs", "http", "https", "tcp", "tls", "udp", "kafka"}, description = "The scheme of the upstream." .. " For L7 proxy, it can be one of grpc/grpcs/http/https." .. " For L4 proxy, it can be one of tcp/tls/udp." @@ -493,6 +493,7 @@ local upstream_schema = { oneOf = { {required = {"type", "nodes"}}, {required = {"type", "service_name", "discovery_type"}}, + {required = {"scheme", "nodes"}} }, } From 8a354efff25fc3eddea4d4262773363ebf1a6149 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 21:14:57 +0800 Subject: [PATCH 02/96] chore: update dependency --- rockspec/apisix-master-0.rockspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rockspec/apisix-master-0.rockspec b/rockspec/apisix-master-0.rockspec index e5c56de9f148..5ed1b40a8b35 100644 --- a/rockspec/apisix-master-0.rockspec +++ b/rockspec/apisix-master-0.rockspec @@ -53,7 +53,7 @@ dependencies = { "nginx-lua-prometheus = 0.20220127", "jsonschema = 0.9.8", "lua-resty-ipmatcher = 0.6.1", - "lua-resty-kafka = 0.07", + "lua-resty-kafka = 0.20-0", "lua-resty-logger-socket = 2.0-0", "skywalking-nginx-lua = 0.6.0", "base64 = 1.5-2", From 2f710531ec01306f1e0498de5b874ac17e43407b Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 21:30:18 +0800 Subject: [PATCH 03/96] feat: add ssl verify to upstream --- apisix/schema_def.lua | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 1a1e52680a8b..2b300ff84009 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -406,8 +406,15 @@ local upstream_schema = { properties = { client_cert = certificate_scheme, client_key = private_key_schema, + verify = { + type = "boolean", + description = "Turn on server certificate verification", + }, + }, + anyOf = { + {required = {"client_cert", "client_key"}}, + {required = {"verify"}}, }, - required = {"client_cert", "client_key"}, }, keepalive_pool = { type = "object", From a6292fb6f6a35a4e7cf1654e231ef94327d75675 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 21:34:22 +0800 Subject: [PATCH 04/96] chore: fix --- apisix/init.lua | 1 - apisix/plugins/kafka-consumer.lua | 3 --- 2 files changed, 4 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index 4ce9a2d7dca1..41f29ea6d1e1 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -60,7 +60,6 @@ local str_sub = string.sub local tonumber = tonumber local pairs = pairs local C = ffi.C -local ffi_new = ffi.new local control_api_router local is_http = false diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua index 50cfc02de0a9..eca1f7b1d81c 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-consumer.lua @@ -15,7 +15,6 @@ -- limitations under the License. -- local core = require("apisix.core") -local ngx_var = ngx.var local schema = { @@ -62,8 +61,6 @@ function _M.access(conf, ctx) ctx.kafka_consumer_enabled = true -- write kafka-related configuration - ctx.kafka_consumer_enable_tls = conf.enable_tls - ctx.kafka_consumer_ssl_verify = conf.ssl_verify ctx.kafka_consumer_enable_sasl = conf.enable_sasl ctx.kafka_consumer_sasl_username = conf.sasl_username ctx.kafka_consumer_sasl_password = conf.sasl_password From d40d19a3d5516f6c16471491a1b7623b8263e61d Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 21:36:12 +0800 Subject: [PATCH 05/96] fix: typo --- apisix/init.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index 41f29ea6d1e1..2a6dd7b6d4ca 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -334,7 +334,7 @@ local function kafka_access_phase(api_ctx) if err then ws:send_close() - core.log.error("failed to recieve frame from kafka client, err: ", err) + core.log.error("failed to receive frame from kafka client, err: ", err) return end @@ -348,7 +348,7 @@ local function kafka_access_phase(api_ctx) -- decode req if raw_type ~= "binary" then ws:send_close() - core.log.error("recieve error type message from kafka client, err: ", err) + core.log.error("receive error type message from kafka client, err: ", err) return end From 32f218216b6ff1c52354333197e8300386924a10 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 21:40:03 +0800 Subject: [PATCH 06/96] chore: add license --- apisix/pubsub.proto | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/apisix/pubsub.proto b/apisix/pubsub.proto index 4469b6d17785..a8ab6cf6a5c4 100644 --- a/apisix/pubsub.proto +++ b/apisix/pubsub.proto @@ -1,3 +1,20 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + syntax = "proto3"; option go_package = "pubsub"; From 3b3625f88b3718408cefa920c3356ae61c3b7f93 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:34:22 +0800 Subject: [PATCH 07/96] feat: separate pubsub module --- apisix/core.lua | 1 + apisix/core/pubsub.lua | 128 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 129 insertions(+) create mode 100644 apisix/core/pubsub.lua diff --git a/apisix/core.lua b/apisix/core.lua index f448f9549a5f..e039969004b3 100644 --- a/apisix/core.lua +++ b/apisix/core.lua @@ -52,4 +52,5 @@ return { tablepool = require("tablepool"), resolver = require("apisix.core.resolver"), os = require("apisix.core.os"), + pubsub = require("apisix.core.pubsub"), } diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua new file mode 100644 index 000000000000..de143ea43715 --- /dev/null +++ b/apisix/core/pubsub.lua @@ -0,0 +1,128 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local ws_server = require("resty.websocket.server") +local protoc = require("protoc") +local pb = require("pb") + +protoc.reload() +pb.option("int64_as_string") +local pubsub_protoc = protoc.new() + +-- This module is used to handle ws server command +-- processing in pub-sub scenarios. +local _M = { version = 0.1 } +local mt = { __index = _M } + + +function _M.new() + -- compile the protobuf file on initial load module + -- ensure that each worker is loaded once + if not pubsub_protoc.loaded["pubsub.proto"] then + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + pubsub_protoc:reset() + return nil, "failed to load pubsub protocol: "..err + end + end + + local ws, err = ws_server:new() + if not ws then + return nil, err + end + + local obj = setmetatable({ + ws_server = ws, + cmd_handler = {}, + }, mt) + + return obj +end + + +-- add command callback function +-- handler is function(params) +-- return value is resp, err +function _M.on(self, command, handler) + self.cmd_handler[command] = handler +end + + +-- enter the message receiving loop and wait for client data +function _M.wait(self) + local ws = self.ws_server + while true do + -- read raw data frames from websocket connection + local raw_data, raw_type, err = ws:recv_frame() + if err then + ws:send_close() + return "websocket server: "..err + end + + -- handle client close connection + if raw_type == "close" then + ws:send_close() + return + end + + -- the pub-sub messages use binary, if the message is not + -- binary, skip this message + if raw_type ~= "binary" then + goto continue + end + + local data = pb.decode("PubSubReq", raw_data) + local sequence = data.sequence + + -- call command handler to generate response data + for key, value in pairs(data) do + -- There are sequence and command properties in the data, + -- select the handler according to the command value. + if key ~= "sequence" then + local handler = self.cmd_handler[key] + if not handler then + core.log.error("handler not registered for the", + " current command, command: ", key) + goto continue + end + + local resp, err = handler(value) + if not resp then + ws:send_binary(pb.encode("PubSubResp", { + sequence = sequence, + error_resp = { + code = 0, + message = err, + }, + })) + goto continue + end + + -- write back the sequence + resp.sequence = sequence + ws:send_binary(pb.encode("PubSubResp", resp)) + end + end + + ::continue:: + end +end + + +return _M From 53c4c21af2a242997ed93ec3961f0a2fa02b5304 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:35:09 +0800 Subject: [PATCH 08/96] refactor: kafka access phase --- apisix/init.lua | 166 ++++++++++++++++-------------------------------- 1 file changed, 56 insertions(+), 110 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index 2a6dd7b6d4ca..96095f568833 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -40,12 +40,9 @@ local apisix_ssl = require("apisix.ssl") local upstream_util = require("apisix.utils.upstream") local xrpc = require("apisix.stream.xrpc") local ctxdump = require("resty.ctxdump") -local ws_server = require("resty.websocket.server") local ngx_balancer = require("ngx.balancer") local debug = require("apisix.debug") local kafka_bconsumer = require("resty.kafka.basic-consumer") -local protoc = require("protoc") -local pb = require("pb") local ffi = require("ffi") local ngx = ngx local get_method = ngx.req.get_method @@ -273,17 +270,16 @@ local function common_phase(phase_name) return plugin.run_plugin(phase_name, nil, api_ctx) end -protoc.reload() -local pubsub_protoc = protoc.new() -pb.option("int64_as_string") + ffi.cdef[[ int64_t atoll(const char *num); ]] local function kafka_access_phase(api_ctx) - local ws, err = ws_server:new() - if not ws then - ngx.log(ngx.ERR, "failed to new websocket: ", err) - return ngx.exit(444) + local pubsub, err = core.pubsub.new() + if not pubsub then + core.log.error("failed to initialize pub-sub module, err: ", err) + core.response.exit(400) + return end local up_nodes = api_ctx.matched_upstream.nodes @@ -314,116 +310,66 @@ local function kafka_access_phase(api_ctx) -- load and create the consumer instance when it is determined -- that the websocket connection was created successfully - local c = kafka_bconsumer:new(broker_list, client_config) - - -- compile the protobuf file on initial connection - -- ensure that each worker is loaded once - if not pubsub_protoc.loaded["pubsub.proto"] then - pubsub_protoc:addpath("apisix") - local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") - if not ok then - pubsub_protoc:reset() - ngx.log(ngx.ERR, "failed to load pubsub protocol, err:", err) - return ngx.exit(444) + local consumer = kafka_bconsumer:new(broker_list, client_config) + + pubsub:on("cmd_kafka_list_offset", function (params) + -- The timestamp parameter uses a 64-bit integer, which is difficult + -- for luaKit to handle well, so the int64_as_string pattern in + -- lua-protobuf is used here. Smaller numbers will be decoded as + -- lua number, while overly long integers will be decoded as strings + -- in the format #number, where the # sign at the beginning of the + -- string will be removed and converted to int64_t with the atoll function. + local timestamp = type(params.timestamp) == "string" and + C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp + + local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) + + if not offset then + return nil, "failed to list offset, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err end - end - while true do - -- read raw data frames from websocket connection - local raw_data, raw_type, err = ws:recv_frame() + offset = tostring(offset) + return { + kafka_list_offset_resp = { + offset = str_sub(offset, 1, #offset - 2) + } + } + end) - if err then - ws:send_close() - core.log.error("failed to receive frame from kafka client, err: ", err) - return - end + pubsub:on("cmd_kafka_fetch", function (params) + local offset = type(params.offset) == "string" and + C.atoll(str_sub(params.offset, 2, #params.offset)) or params.offset - -- handle client close connection - if raw_type == "close" then - ws:send_close() - core.log.info("kafka client close connection, status code: ", err) - return + local ret, err = consumer:fetch(params.topic, params.partition, offset) + if not ret then + return nil, "failed to fetch message, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err end - -- decode req - if raw_type ~= "binary" then - ws:send_close() - core.log.error("receive error type message from kafka client, err: ", err) - return - end + -- split into multiple messages when the amount of data in + -- a single batch is too large + local messages = ret.records - local data = pb.decode("PubSubReq", raw_data) - local sequence = data.sequence - - -- list offset command - if data.cmd_kafka_list_offset then - local params = data.cmd_kafka_list_offset - local timestamp = type(params.timestamp) == "string" and - C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp - - local offset, err = c:list_offset(params.topic, params.partition, timestamp) - if not offset then - ws:send_binary(pb.encode("PubSubResp", { - sequence = sequence, - error_resp = { - code = 0, - message = "failed to list offset, topic: " .. params.topic .. - ", partition: " .. params.partition .. ", err: " .. err, - } - })) - goto continue - end - - offset = tostring(offset) - ws:send_binary(pb.encode("PubSubResp", { - sequence = sequence, - kafka_list_offset_resp = { - offset = str_sub(offset, 1, #offset - 2) - } - })) - goto continue + -- special handling of int64 for luajit compatibility + for _, message in ipairs(messages) do + local timestamp = tostring(message.timestamp) + message.timestamp = str_sub(timestamp, 1, #timestamp - 2) + local offset = tostring(message.offset) + message.offset = str_sub(offset, 1, #offset - 2) end - if data.cmd_kafka_fetch then - ngx.log(ngx.ERR, core.json.encode(data.cmd_kafka_fetch)) - local params = data.cmd_kafka_fetch - local offset = type(params.offset) == "string" and - C.atoll(str_sub(params.offset, 2, #params.offset)) or params.offset - - local ret, err = c:fetch(params.topic, params.partition, offset) - if not ret then - ws:send_binary(pb.encode("PubSubResp", { - sequence = sequence, - error_resp = { - code = 0, - message = "failed to fetch message, topic: " .. params.topic .. - ", partition: " .. params.partition .. ", err: " .. err, - } - })) - goto continue - end - - -- split into multiple messages when the amount of data in - -- a single batch is too large - local messages = ret.records - - -- special handling of int64 for luajit compatibility - for _, message in ipairs(messages) do - local timestamp = tostring(message.timestamp) - message.timestamp = str_sub(timestamp, 1, #timestamp - 2) - local offset = tostring(message.offset) - message.offset = str_sub(offset, 1, #offset - 2) - end - - ws:send_binary(pb.encode("PubSubResp", { - sequence = sequence, - kafka_fetch_resp = { - messages = messages, - }, - })) - end + return { + kafka_fetch_resp = { + messages = messages, + }, + } + end) - ::continue:: + -- start processing client commands + local err = pubsub:wait() + if err then + core.log.error("failed to handle pub-sub command, err: ", err) end end From 3a22a263b8d6e12653cf7295273350a9ac0d37b5 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:37:36 +0800 Subject: [PATCH 09/96] fix: typo --- apisix/init.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index 96095f568833..376a8dd43e44 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -314,10 +314,10 @@ local function kafka_access_phase(api_ctx) pubsub:on("cmd_kafka_list_offset", function (params) -- The timestamp parameter uses a 64-bit integer, which is difficult - -- for luaKit to handle well, so the int64_as_string pattern in + -- for luajit to handle well, so the int64_as_string option in -- lua-protobuf is used here. Smaller numbers will be decoded as - -- lua number, while overly long integers will be decoded as strings - -- in the format #number, where the # sign at the beginning of the + -- lua number, while overly larger numbers will be decoded as strings + -- in the format #number, where the # symbol at the beginning of the -- string will be removed and converted to int64_t with the atoll function. local timestamp = type(params.timestamp) == "string" and C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp From 705841709891ae24ec574bc9f21d63c75ca3f0c0 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:39:13 +0800 Subject: [PATCH 10/96] fix: locally global var --- apisix/init.lua | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apisix/init.lua b/apisix/init.lua index 376a8dd43e44..223868694c37 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -55,6 +55,8 @@ local ngx_var = ngx.var local str_byte = string.byte local str_sub = string.sub local tonumber = tonumber +local tostring = tostring +local type = type local pairs = pairs local C = ffi.C local control_api_router From 6f84c48708560deac7a5b637088f163a8ec7760d Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:43:35 +0800 Subject: [PATCH 11/96] fix: ensure install pubsub.proto --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 39131e20c69e..1e4bfdb9ecff 100644 --- a/Makefile +++ b/Makefile @@ -263,6 +263,7 @@ install: runtime # Lua directories listed in alphabetical order $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix $(ENV_INSTALL) apisix/*.lua $(ENV_INST_LUADIR)/apisix/ + $(ENV_INSTALL) apisix/*.proto $(ENV_INST_LUADIR)/apisix/ $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/admin $(ENV_INSTALL) apisix/admin/*.lua $(ENV_INST_LUADIR)/apisix/admin/ From 0eea32b93d6d81e9e9ee8d8e9f6ece7f0408b3d1 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:46:11 +0800 Subject: [PATCH 12/96] fix --- apisix/core/pubsub.lua | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua index de143ea43715..02ba3aa9fefa 100644 --- a/apisix/core/pubsub.lua +++ b/apisix/core/pubsub.lua @@ -15,10 +15,13 @@ -- limitations under the License. -- -local core = require("apisix.core") -local ws_server = require("resty.websocket.server") -local protoc = require("protoc") -local pb = require("pb") +local core = require("apisix.core") +local ws_server = require("resty.websocket.server") +local protoc = require("protoc") +local pb = require("pb") +local setmetatable = setmetatable +local pcall = pcall +local pairs = pairs protoc.reload() pb.option("int64_as_string") From 3cb6506c7fc52864492beaaf95c546c1dce476f7 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Fri, 6 May 2022 23:48:49 +0800 Subject: [PATCH 13/96] fix: lint --- apisix/plugins/kafka-consumer.lua | 2 +- apisix/pubsub.proto | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua index eca1f7b1d81c..645ffc9fb78f 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-consumer.lua @@ -67,4 +67,4 @@ function _M.access(conf, ctx) end -return _M \ No newline at end of file +return _M diff --git a/apisix/pubsub.proto b/apisix/pubsub.proto index a8ab6cf6a5c4..bb3a3d14074d 100644 --- a/apisix/pubsub.proto +++ b/apisix/pubsub.proto @@ -68,4 +68,4 @@ message PubSubResp { KafkaFetchResp kafka_fetch_resp = 32; KafkaListOffsetResp kafka_list_offset_resp = 33; }; -} \ No newline at end of file +} From eacdc7a4f09bde43fafb34484bc95c4e466f7869 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 00:03:03 +0800 Subject: [PATCH 14/96] fix: compatible upstream tls verify --- apisix/upstream.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apisix/upstream.lua b/apisix/upstream.lua index da88e06e2ac4..604271b96f46 100644 --- a/apisix/upstream.lua +++ b/apisix/upstream.lua @@ -435,7 +435,7 @@ local function check_upstream_conf(in_dp, conf) end end - if conf.tls then + if conf.tls and conf.tls.client_cert and conf.tls.client_key then local cert = conf.tls.client_cert local key = conf.tls.client_key local ok, err = apisix_ssl.validate(cert, key) From 0871a900d35bf34e65eda0098601542fd7d3dbe8 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 00:51:30 +0800 Subject: [PATCH 15/96] fix: update schema check rule --- apisix/schema_def.lua | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 2b300ff84009..c6b6ee21cb1c 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -498,9 +498,13 @@ local upstream_schema = { id = id_schema, }, oneOf = { - {required = {"type", "nodes"}}, + { + anyOf = { + {required = {"type", "nodes"}}, + {required = {"scheme", "nodes"}}, + } + }, {required = {"type", "service_name", "discovery_type"}}, - {required = {"scheme", "nodes"}} }, } From fd2dd12da01e6228c996976be56e0ede4ef1aba7 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 01:26:31 +0800 Subject: [PATCH 16/96] fix: recovery check rule --- apisix/schema_def.lua | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index c6b6ee21cb1c..cc7e8acd15db 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -498,12 +498,7 @@ local upstream_schema = { id = id_schema, }, oneOf = { - { - anyOf = { - {required = {"type", "nodes"}}, - {required = {"scheme", "nodes"}}, - } - }, + {required = {"type", "nodes"}}, {required = {"type", "service_name", "discovery_type"}}, }, } From 07162a8064e205fabe0d8e59f18a717e88ed6e92 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 02:35:31 +0800 Subject: [PATCH 17/96] test --- t/plugin/kafka-consumer.t | 61 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 t/plugin/kafka-consumer.t diff --git a/t/plugin/kafka-consumer.t b/t/plugin/kafka-consumer.t new file mode 100644 index 000000000000..27572276064e --- /dev/null +++ b/t/plugin/kafka-consumer.t @@ -0,0 +1,61 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if ((!defined $block->error_log) && (!defined $block->no_error_log)) { + $block->set_value("no_error_log", "[error]"); + } + + if (!defined $block->request) { + $block->set_value("request", "GET /t"); + } +}); + +run_tests(); + +__DATA__ + +=== TEST 1: sanity +--- config + location /t { + content_by_lua_block { + local test_cases = { + {}, + {enable_sasl = true, sasl_username = "user", sasl_password = "pwd"}, + {enable_sasl = false}, + {enable_sasl = true}, + } + local plugin = require("apisix.plugins.kafka-consumer") + + for _, case in ipairs(test_cases) do + local ok, err = plugin.check_schema(case) + ngx.say(ok and "done" or err) + end + } + } +--- response_body +done +done +done +done From abf9bee44165f146e071cb9702fcef2b72b2e740 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 10:20:02 +0800 Subject: [PATCH 18/96] fix: avoid matched_upstream check errors --- apisix/init.lua | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index 223868694c37..cd8154eb24ee 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -549,11 +549,7 @@ function _M.http_access_phase() end -- load balancer is not required by kafka upstream - if api_ctx.matched_upstream.scheme == "kafka" then - if not api_ctx.kafka_consumer_enabled then - core.log.error("need to configure the kafka-consumer plugin for kafka upstream") - return core.response.exit(501) - end + if api_ctx.matched_upstream and api_ctx.matched_upstream.scheme == "kafka" then return kafka_access_phase(api_ctx) end From 646a656d1b9a50e14c9badf48e7e45314299c002 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 10:22:28 +0800 Subject: [PATCH 19/96] chore: update kafka consumer config define --- apisix/plugins/kafka-consumer.lua | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua index 645ffc9fb78f..a80ea2042bf3 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-consumer.lua @@ -20,14 +20,6 @@ local core = require("apisix.core") local schema = { type = "object", properties = { - enable_tls = { - type = "boolean", - default = false, - }, - ssl_verify = { - type = "boolean", - default = true, - }, enable_sasl = { type = "boolean", default = false, @@ -58,7 +50,7 @@ end function _M.access(conf, ctx) - ctx.kafka_consumer_enabled = true + ctx.kafka_consumer_plugin_enabled = true -- write kafka-related configuration ctx.kafka_consumer_enable_sasl = conf.enable_sasl From 621be005c3c0bbc27d0f80334ab84f5f5e3e860f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 13:00:06 +0800 Subject: [PATCH 20/96] test: support upstream tls --- t/node/upstream-mtls.t | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/node/upstream-mtls.t b/t/node/upstream-mtls.t index 9c0a49d9f713..7168f3816667 100644 --- a/t/node/upstream-mtls.t +++ b/t/node/upstream-mtls.t @@ -77,7 +77,7 @@ __DATA__ GET /t --- error_code: 400 --- response_body -{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: property \"client_key\" is required"} +{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: object matches none of the required: [\"client_cert\",\"client_key\"] or [\"verify\"]"} From f6c60d36d3d01f60e009c6aa18e661e881ec56a1 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 13:42:48 +0800 Subject: [PATCH 21/96] test: improve plugin schema check --- apisix/plugins/kafka-consumer.lua | 9 +++++++++ t/plugin/kafka-consumer.t | 8 +++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua index a80ea2042bf3..47dd39acad98 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-consumer.lua @@ -45,6 +45,15 @@ local _M = { function _M.check_schema(conf) + if conf.enable_sasl then + if not conf.sasl_username or conf.sasl_username == "" then + return false, "need to set sasl username when enabling kafka sasl authentication" + end + if not conf.sasl_password or conf.sasl_password == "" then + return false, "need to set sasl password when enabling kafka sasl authentication" + end + end + return core.schema.check(schema, conf) end diff --git a/t/plugin/kafka-consumer.t b/t/plugin/kafka-consumer.t index 27572276064e..01a82b809f39 100644 --- a/t/plugin/kafka-consumer.t +++ b/t/plugin/kafka-consumer.t @@ -45,6 +45,9 @@ __DATA__ {enable_sasl = true, sasl_username = "user", sasl_password = "pwd"}, {enable_sasl = false}, {enable_sasl = true}, + {enable_sasl = true, sasl_username = "user"}, + {enable_sasl = true, sasl_username = 123, sasl_password = "123"}, + {enable_sasl = true, sasl_username = "123", sasl_password = 123}, } local plugin = require("apisix.plugins.kafka-consumer") @@ -58,4 +61,7 @@ __DATA__ done done done -done +need to set sasl username when enabling kafka sasl authentication +need to set sasl password when enabling kafka sasl authentication +property "sasl_username" validation failed: wrong type: expected string, got number +property "sasl_password" validation failed: wrong type: expected string, got number From 37539f1a410f068a2c80456c05f7d6d8d43512fe Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 15:21:13 +0800 Subject: [PATCH 22/96] test: add kafka upstream base cases --- t/node/upstream-kafka.t | 104 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 t/node/upstream-kafka.t diff --git a/t/node/upstream-kafka.t b/t/node/upstream-kafka.t new file mode 100644 index 000000000000..3bdfd28342e1 --- /dev/null +++ b/t/node/upstream-kafka.t @@ -0,0 +1,104 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if ((!defined $block->error_log) && (!defined $block->no_error_log)) { + $block->set_value("no_error_log", "[error]"); + } + + if (!defined $block->request) { + $block->set_value("request", "GET /t"); + } +}); + +run_tests(); + +__DATA__ + +=== TEST 1: success +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin") + local code, body = t.test("/apisix/admin/upstreams/kafka", ngx.HTTP_PUT, [[{ + "nodes": { + "127.0.0.1:9092": 1 + }, + "type": "none", + "scheme": "kafka" + }]]) + + ngx.say(code..body) + } + } +--- response_body +201passed + + + +=== TEST 2: success with tls +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin") + local code, body = t.test("/apisix/admin/upstreams/kafka-tls", ngx.HTTP_PUT, [[{ + "nodes": { + "127.0.0.1:9092": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": true + } + }]]) + + ngx.say(code..body) + } + } +--- response_body +201passed + + + +=== TEST 3: wrong tls verify type +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin") + local code, body = t.test("/apisix/admin/upstreams/kafka-tls-error-type", ngx.HTTP_PUT, [[{ + "nodes": { + "127.0.0.1:9092": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": "none" + } + }]]) + + ngx.print(code..body) + } + } +--- response_body +400{"error_msg":"invalid configuration: property \"tls\" validation failed: property \"verify\" validation failed: wrong type: expected boolean, got string"} From 7a3ead79496190eb0bd632d8abc42fb0baf5a7e5 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:04:09 +0800 Subject: [PATCH 23/96] test: create test conusmer topic --- ci/linux-ci-init-service.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index 5f468502304d..c719636e2861 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -19,6 +19,13 @@ docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 +docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test-consumer + +# create messages for test-consumer +for ((i=1; i<=135; i++)) +do + docker exec -i apache-apisix_kafka-server1_1 echo "testmsg" | kafka-console-producer --broker-list kafka-server1:9092 --topic test-consumer +done # prepare openwhisk env docker pull openwhisk/action-nodejs-v14:nightly From 5ac5b3843d3cfb691ce7d0f2b594041bb185817f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:15:26 +0800 Subject: [PATCH 24/96] test: reduce testmsg count and add msgid --- ci/linux-ci-init-service.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index c719636e2861..f3a47544be1c 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -22,9 +22,9 @@ docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test-consumer # create messages for test-consumer -for ((i=1; i<=135; i++)) +for ((i=1; i<=30; i++)) do - docker exec -i apache-apisix_kafka-server1_1 echo "testmsg" | kafka-console-producer --broker-list kafka-server1:9092 --topic test-consumer + docker exec -i apache-apisix_kafka-server1_1 echo "testmsg$i" | kafka-console-producer --broker-list kafka-server1:9092 --topic test-consumer done # prepare openwhisk env From 20eedff88e8c7e816cd8690316c3106f5706fa72 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:15:59 +0800 Subject: [PATCH 25/96] test: add kafka consumer cases --- t/pubsub/kafka.t | 194 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 194 insertions(+) create mode 100644 t/pubsub/kafka.t diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t new file mode 100644 index 000000000000..671ec5c1100a --- /dev/null +++ b/t/pubsub/kafka.t @@ -0,0 +1,194 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if ((!defined $block->error_log) && (!defined $block->no_error_log)) { + $block->set_value("no_error_log", "[error]"); + } + + if (!defined $block->request) { + $block->set_value("request", "GET /t"); + } +}); + +run_tests(); + +__DATA__ + +=== TEST 1: setup all-in-one test +--- config + location /t { + content_by_lua_block { + local data = { + { + url = "/apisix/admin/routes/kafka", + data = [[{ + "upstream": { + "nodes": { + "127.0.0.1:9092": 1 + }, + "type": "none", + "scheme": "kafka" + }, + "uri": "/kafka" + }]], + }, + { + url = "/apisix/admin/upstreams/kafka-tls", + data = [[{ + "nodes": { + "127.0.0.1:9092": 1 + }, + "tls": { + "verify": true + }, + "type": "none", + "scheme": "kafka" + }]], + }, + } + + local t = require("lib.test_admin").test + + for _, data in ipairs(data) do + local code, body = t(data.url, ngx.HTTP_PUT, data.data) + ngx.say(code..body) + end + } + } +--- response_body eval +"201passed\n"x2 + + + +=== TEST 2: hit route (with HTTP request) +--- request +GET /kafka +--- error_code: 400 +--- error_log +failed to initialize pub-sub module, err: bad "upgrade" request header: nil + + + +=== TEST 3: hit route (with HTTP emulation websocket) +--- config + location /t { + content_by_lua_block { + local protoc = require("protoc") + local pb = require("pb") + protoc.reload() + pb.option("int64_as_string") + local pubsub_protoc = protoc.new() + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + ngx.say("failed to load protocol: " .. err) + return + end + + local client = require "resty.websocket.client" + local ws, err = client:new() + local ok, err = ws:connect("ws://127.0.0.1:1984/kafka") + if not ok then + ngx.say("failed to connect: " .. err) + return + end + + local data = { + { + sequence = 0, + cmd_kafka_list_offset = { + topic = "not-exist", + partition = 0, + timestamp = -1, + }, + }, + { + sequence = 1, + cmd_kafka_fetch = { + topic = "not-exist", + partition = 0, + offset = 0, + }, + }, + { + sequence = 2, + cmd_kafka_list_offset = { + topic = "test-consumer", + partition = 0, + timestamp = -2, + }, + }, + { + sequence = 3, + cmd_kafka_list_offset = { + topic = "test-consumer", + partition = 0, + timestamp = -1, + }, + }, + { + sequence = 4, + cmd_kafka_fetch = { + topic = "test-consumer", + partition = 0, + offset = 14, + }, + } + } + + for i = 1, #data do + local _, err = ws:send_binary(pb.encode("PubSubReq", data[i])) + local raw_data, raw_type, err = ws:recv_frame() + if not raw_data then + ngx.say("failed to receive the frame: ", err) + return + end + local data, err = pb.decode("PubSubResp", raw_data) + if not data then + ngx.say("failed to decode the frame: ", err) + return + end + + if data.error_resp then + ngx.say(data.sequence..data.error_resp.message) + end + if data.kafka_list_offset_resp then + ngx.say(data.sequence.."offset: "..data.kafka_list_offset_resp.offset) + end + if data.kafka_fetch_resp then + ngx.say(data.sequence.."offset: "..data.kafka_fetch_resp.messages[1].offset.. + " msg: "..data.kafka_fetch_resp.messages[1].value) + end + end + + ws:send_close() + } + } +--- response_body +0failed to list offset, topic: not-exist, partition: 0, err: not found topic +1failed to fetch message, topic: not-exist, partition: 0, err: not found topic +2offset: 0 +3offset: 30 +4offset: 14 msg: testmsg14 From be28226543f55cc0d792d1a141df4ab95e3304f0 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:20:24 +0800 Subject: [PATCH 26/96] feat: let kafka upstream obey tls --- apisix/init.lua | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index cd8154eb24ee..f62ce4bac74b 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -285,6 +285,7 @@ local function kafka_access_phase(api_ctx) end local up_nodes = api_ctx.matched_upstream.nodes + local up_tls = api_ctx.matched_upstream.tls -- kafka client broker-related configuration local broker_list = {} @@ -304,11 +305,13 @@ local function kafka_access_phase(api_ctx) end -- kafka client socket-related configuration - local client_config = { - ssl = api_ctx.kafka_consumer_enable_tls, - ssl_verify = api_ctx.kafka_consumer_ssl_verify, - refresh_interval = 30 * 60 * 1000 - } + local client_config = {refresh_interval = 30 * 60 * 1000} + if up_tls and type(up_tls.verify) ~= nil then + client_config = { + ssl = up_tls, + ssl_verify = up_tls.verify, + } + end -- load and create the consumer instance when it is determined -- that the websocket connection was created successfully From ca99c68379ce1f64cfc81f3279d057d9ca87e6e5 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:20:47 +0800 Subject: [PATCH 27/96] chore: remove unused ctx var --- apisix/plugins/kafka-consumer.lua | 2 -- 1 file changed, 2 deletions(-) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-consumer.lua index 47dd39acad98..5a614e4002b9 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-consumer.lua @@ -59,8 +59,6 @@ end function _M.access(conf, ctx) - ctx.kafka_consumer_plugin_enabled = true - -- write kafka-related configuration ctx.kafka_consumer_enable_sasl = conf.enable_sasl ctx.kafka_consumer_sasl_username = conf.sasl_username From 1cf7b4cc4a52b05e8dac2a2373d7af45f6263946 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:22:28 +0800 Subject: [PATCH 28/96] fix: lint --- t/pubsub/kafka.t | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index 671ec5c1100a..d440daeca3f8 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -170,7 +170,7 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil ngx.say("failed to decode the frame: ", err) return end - + if data.error_resp then ngx.say(data.sequence..data.error_resp.message) end From d6434c2c18125507b976f591bdcefdb080b5f4fa Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:37:34 +0800 Subject: [PATCH 29/96] fix: create test messages --- ci/linux-ci-init-service.sh | 2 +- t/pubsub/kafka.t | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index f3a47544be1c..82e2dcd4ad41 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -22,7 +22,7 @@ docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test-consumer # create messages for test-consumer -for ((i=1; i<=30; i++)) +for i in `seq 30` do docker exec -i apache-apisix_kafka-server1_1 echo "testmsg$i" | kafka-console-producer --broker-list kafka-server1:9092 --topic test-consumer done diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index d440daeca3f8..c91172c356c2 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -191,4 +191,4 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil 1failed to fetch message, topic: not-exist, partition: 0, err: not found topic 2offset: 0 3offset: 30 -4offset: 14 msg: testmsg14 +4offset: 14 msg: testmsg15 From b07dc70ace21b19883c6bb1700396fe17a4680cc Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:47:06 +0800 Subject: [PATCH 30/96] feat: add pub-sub test in ci --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a47e69741b7a..502dfe0cfb79 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -27,7 +27,7 @@ jobs: - linux_openresty - linux_openresty_1_17 test_dir: - - t/plugin + - t/plugin t/pubsub - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc From dbec6461251f6deefef1b68c332616c420a8b498 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 17:47:39 +0800 Subject: [PATCH 31/96] fix: ci --- ci/linux-ci-init-service.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index 82e2dcd4ad41..97895a189bad 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -24,7 +24,7 @@ docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics # create messages for test-consumer for i in `seq 30` do - docker exec -i apache-apisix_kafka-server1_1 echo "testmsg$i" | kafka-console-producer --broker-list kafka-server1:9092 --topic test-consumer + docker exec -i apache-apisix_kafka-server1_1 echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --broker-list kafka-server1:9092 --topic test-consumer done # prepare openwhisk env From 826f89a176c90a28d2cc8baf3023df32a8934b44 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 21:24:42 +0800 Subject: [PATCH 32/96] fix: ci --- ci/linux-ci-init-service.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index 97895a189bad..cc863add295a 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -24,7 +24,7 @@ docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics # create messages for test-consumer for i in `seq 30` do - docker exec -i apache-apisix_kafka-server1_1 echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --broker-list kafka-server1:9092 --topic test-consumer + docker exec -i apache-apisix_kafka-server1_1 bash -c "echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --broker-list kafka-server1:9092 --topic test-consumer" done # prepare openwhisk env From d6c8628cef085c67ffeb6307a380385d44beb6a6 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 22:18:54 +0800 Subject: [PATCH 33/96] test: add pubsub in centos7 --- .github/workflows/centos7-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml index f5485dc1329c..f2c7651c2b0d 100644 --- a/.github/workflows/centos7-ci.yml +++ b/.github/workflows/centos7-ci.yml @@ -25,7 +25,7 @@ jobs: fail-fast: false matrix: test_dir: - - t/plugin + - t/plugin t/pubsub - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library From 7de4bceb605d73f2a143b29f9a3bc21601740362 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 23:14:14 +0800 Subject: [PATCH 34/96] chore: change plugin to kafka-proxy --- .../{kafka-consumer.lua => kafka-proxy.lua} | 2 +- docs/en/latest/plugins/kafka-proxy.md | 82 +++++++++++++++++++ t/plugin/{kafka-consumer.t => kafka-proxy.t} | 2 +- 3 files changed, 84 insertions(+), 2 deletions(-) rename apisix/plugins/{kafka-consumer.lua => kafka-proxy.lua} (98%) create mode 100644 docs/en/latest/plugins/kafka-proxy.md rename t/plugin/{kafka-consumer.t => kafka-proxy.t} (97%) diff --git a/apisix/plugins/kafka-consumer.lua b/apisix/plugins/kafka-proxy.lua similarity index 98% rename from apisix/plugins/kafka-consumer.lua rename to apisix/plugins/kafka-proxy.lua index 5a614e4002b9..06c124585e4b 100644 --- a/apisix/plugins/kafka-consumer.lua +++ b/apisix/plugins/kafka-proxy.lua @@ -39,7 +39,7 @@ local schema = { local _M = { version = 0.1, priority = 508, - name = "kafka-consumer", + name = "kafka-proxy", schema = schema, } diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md new file mode 100644 index 000000000000..7a1df202b18b --- /dev/null +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -0,0 +1,82 @@ +--- +title: kafka-proxy +keywords: + - APISIX + - Plugin + - Kafka + - consumer +description: This document contains information about the Apache APISIX kafka-proxy Plugin. +--- + + + +## Description + +The `kafka-proxy` plugin can be used to configure advanced parameters for the kafka upstream of Apache APISIX, such as SASL authentication. + +## Attributes + +| Name | Type | Required | Default | Valid values | Description | +|-------------------|---------|----------|---------|---------------|------------------------------| +| enable_sasl | boolean | False | false | | enable SASL authentication | +| sasl_username | string | False | "" | | SASL authentication username | +| sasl_password | string | False | "" | | SASL authentication passwrod | + +:::note +If SASL authentication is enabled, the `sasl_username` and `sasl_password` must be set. +::: + +## Example usage + +When we use scheme as the upstream of kafka, we can add kafka authentication configuration to it through this plugin. + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/r1' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "plugins": { + "kafka-proxy": { + "enable_sasl": true, + "sasl_username": "user", + "sasl_password": "pwd" + } + }, + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": true + } + } +}' +``` + +Now, we can test it by connecting to the `/kafka` endpoint via websocket. + +## Disable Plugin + +To disable the `kafka-proxy` Plugin, you can delete the corresponding JSON configuration from the Plugin configuration. APISIX will automatically reload and you do not have to restart for this to take effect. diff --git a/t/plugin/kafka-consumer.t b/t/plugin/kafka-proxy.t similarity index 97% rename from t/plugin/kafka-consumer.t rename to t/plugin/kafka-proxy.t index 01a82b809f39..4574d6e39fdc 100644 --- a/t/plugin/kafka-consumer.t +++ b/t/plugin/kafka-proxy.t @@ -49,7 +49,7 @@ __DATA__ {enable_sasl = true, sasl_username = 123, sasl_password = "123"}, {enable_sasl = true, sasl_username = "123", sasl_password = 123}, } - local plugin = require("apisix.plugins.kafka-consumer") + local plugin = require("apisix.plugins.kafka-proxy") for _, case in ipairs(test_cases) do local ok, err = plugin.check_schema(case) From 464a0289a6d06520dd05b9102fafa9b3f3a72d97 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 23:32:25 +0800 Subject: [PATCH 35/96] chore: recover tls config --- apisix/schema_def.lua | 5 +---- t/node/upstream-mtls.t | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index cc7e8acd15db..1a2acafb8b68 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -411,10 +411,7 @@ local upstream_schema = { description = "Turn on server certificate verification", }, }, - anyOf = { - {required = {"client_cert", "client_key"}}, - {required = {"verify"}}, - }, + required = {"client_cert", "client_key"} }, keepalive_pool = { type = "object", diff --git a/t/node/upstream-mtls.t b/t/node/upstream-mtls.t index 7168f3816667..9c0a49d9f713 100644 --- a/t/node/upstream-mtls.t +++ b/t/node/upstream-mtls.t @@ -77,7 +77,7 @@ __DATA__ GET /t --- error_code: 400 --- response_body -{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: object matches none of the required: [\"client_cert\",\"client_key\"] or [\"verify\"]"} +{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: property \"client_key\" is required"} From 4238e62d1ca82e5dbc94a211088e8cd35856ce1c Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 23:38:38 +0800 Subject: [PATCH 36/96] feat: move tls and verify to plugin --- apisix/init.lua | 7 +++---- apisix/plugins/kafka-proxy.lua | 11 ++++++++++- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/apisix/init.lua b/apisix/init.lua index f62ce4bac74b..89c9d8a9da4a 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -285,7 +285,6 @@ local function kafka_access_phase(api_ctx) end local up_nodes = api_ctx.matched_upstream.nodes - local up_tls = api_ctx.matched_upstream.tls -- kafka client broker-related configuration local broker_list = {} @@ -306,10 +305,10 @@ local function kafka_access_phase(api_ctx) -- kafka client socket-related configuration local client_config = {refresh_interval = 30 * 60 * 1000} - if up_tls and type(up_tls.verify) ~= nil then + if api_ctx.kafka_consumer_enable_tls then client_config = { - ssl = up_tls, - ssl_verify = up_tls.verify, + ssl = api_ctx.kafka_consumer_enable_tls, + ssl_verify = api_ctx.kafka_consumer_ssl_verify, } end diff --git a/apisix/plugins/kafka-proxy.lua b/apisix/plugins/kafka-proxy.lua index 06c124585e4b..341728604721 100644 --- a/apisix/plugins/kafka-proxy.lua +++ b/apisix/plugins/kafka-proxy.lua @@ -20,6 +20,14 @@ local core = require("apisix.core") local schema = { type = "object", properties = { + enable_tls = { + type = "boolean", + default = false, + }, + ssl_verify = { + type = "boolean", + default = true, + }, enable_sasl = { type = "boolean", default = false, @@ -59,7 +67,8 @@ end function _M.access(conf, ctx) - -- write kafka-related configuration + ctx.kafka_consumer_enable_tls = conf.enable_tls + ctx.kafka_consumer_ssl_verify = conf.ssl_verify ctx.kafka_consumer_enable_sasl = conf.enable_sasl ctx.kafka_consumer_sasl_username = conf.sasl_username ctx.kafka_consumer_sasl_password = conf.sasl_password From 491a16b0cfd824e3710eaad202895ddfa8d3164b Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 23:41:05 +0800 Subject: [PATCH 37/96] docs: add plugin to sidebar --- docs/en/latest/config.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json index 6ea5784bd979..ff799b306c59 100644 --- a/docs/en/latest/config.json +++ b/docs/en/latest/config.json @@ -172,7 +172,8 @@ "label": "Other Protocols", "items": [ "plugins/dubbo-proxy", - "plugins/mqtt-proxy" + "plugins/mqtt-proxy", + "plugins/kafka-proxy" ] } ] From 0bdfe23e43905b56bc5c1d2d04869fb41b73cfc5 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sat, 7 May 2022 23:45:39 +0800 Subject: [PATCH 38/96] docs: add tls to kafka-proxy plugin --- docs/en/latest/plugins/kafka-proxy.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md index 7a1df202b18b..5dd792161ac9 100644 --- a/docs/en/latest/plugins/kafka-proxy.md +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -33,11 +33,13 @@ The `kafka-proxy` plugin can be used to configure advanced parameters for the ka ## Attributes -| Name | Type | Required | Default | Valid values | Description | -|-------------------|---------|----------|---------|---------------|------------------------------| -| enable_sasl | boolean | False | false | | enable SASL authentication | -| sasl_username | string | False | "" | | SASL authentication username | -| sasl_password | string | False | "" | | SASL authentication passwrod | +| Name | Type | Required | Default | Valid values | Description | +|-------------------|---------|----------|---------|---------------|-----------------------------------| +| enable_tls | boolean | False | false | | Enable TLS for Kafka client | +| ssl_verify | boolean | False | true | | Enable TLS certificate validation | +| enable_sasl | boolean | False | false | | Enable SASL authentication | +| sasl_username | string | False | "" | | SASL authentication username | +| sasl_password | string | False | "" | | SASL authentication passwrod | :::note If SASL authentication is enabled, the `sasl_username` and `sasl_password` must be set. From 4eadcb7ef14f470860394d298bfb43a640d19097 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sun, 8 May 2022 12:05:10 +0800 Subject: [PATCH 39/96] fix: ci --- t/node/upstream-kafka.t | 48 ----------------------------------------- t/pubsub/kafka.t | 13 ----------- 2 files changed, 61 deletions(-) diff --git a/t/node/upstream-kafka.t b/t/node/upstream-kafka.t index 3bdfd28342e1..cf5c1dcb9e0d 100644 --- a/t/node/upstream-kafka.t +++ b/t/node/upstream-kafka.t @@ -54,51 +54,3 @@ __DATA__ } --- response_body 201passed - - - -=== TEST 2: success with tls ---- config - location /t { - content_by_lua_block { - local t = require("lib.test_admin") - local code, body = t.test("/apisix/admin/upstreams/kafka-tls", ngx.HTTP_PUT, [[{ - "nodes": { - "127.0.0.1:9092": 1 - }, - "type": "none", - "scheme": "kafka", - "tls": { - "verify": true - } - }]]) - - ngx.say(code..body) - } - } ---- response_body -201passed - - - -=== TEST 3: wrong tls verify type ---- config - location /t { - content_by_lua_block { - local t = require("lib.test_admin") - local code, body = t.test("/apisix/admin/upstreams/kafka-tls-error-type", ngx.HTTP_PUT, [[{ - "nodes": { - "127.0.0.1:9092": 1 - }, - "type": "none", - "scheme": "kafka", - "tls": { - "verify": "none" - } - }]]) - - ngx.print(code..body) - } - } ---- response_body -400{"error_msg":"invalid configuration: property \"tls\" validation failed: property \"verify\" validation failed: wrong type: expected boolean, got string"} diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index c91172c356c2..bf04cc8353fd 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -54,19 +54,6 @@ __DATA__ "uri": "/kafka" }]], }, - { - url = "/apisix/admin/upstreams/kafka-tls", - data = [[{ - "nodes": { - "127.0.0.1:9092": 1 - }, - "tls": { - "verify": true - }, - "type": "none", - "scheme": "kafka" - }]], - }, } local t = require("lib.test_admin").test From a7b4b635bc31902e7cf68a6d14ac4cd89fd0c331 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sun, 8 May 2022 12:16:19 +0800 Subject: [PATCH 40/96] fix: typo --- docs/en/latest/plugins/kafka-proxy.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md index 5dd792161ac9..a281d37a1779 100644 --- a/docs/en/latest/plugins/kafka-proxy.md +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -39,7 +39,7 @@ The `kafka-proxy` plugin can be used to configure advanced parameters for the ka | ssl_verify | boolean | False | true | | Enable TLS certificate validation | | enable_sasl | boolean | False | false | | Enable SASL authentication | | sasl_username | string | False | "" | | SASL authentication username | -| sasl_password | string | False | "" | | SASL authentication passwrod | +| sasl_password | string | False | "" | | SASL authentication password | :::note If SASL authentication is enabled, the `sasl_username` and `sasl_password` must be set. From ef152fe1239d978711aa6dca73113bc235edc865 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Sun, 8 May 2022 17:14:15 +0800 Subject: [PATCH 41/96] fix: ci --- t/pubsub/kafka.t | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index bf04cc8353fd..3feb2b1b4dd7 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -64,8 +64,8 @@ __DATA__ end } } ---- response_body eval -"201passed\n"x2 +--- response_body +201passed From 0efa8f601828427e80db5fa4f71ad15b6445e42f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 02:36:46 +0800 Subject: [PATCH 42/96] docs: add pubsub module documentations --- docs/assets/images/pubsub-architecture.svg | 4 + docs/en/latest/pubsub.md | 242 +++++++++++++++++++++ docs/zh/latest/pubsub.md | 241 ++++++++++++++++++++ 3 files changed, 487 insertions(+) create mode 100644 docs/assets/images/pubsub-architecture.svg create mode 100644 docs/en/latest/pubsub.md create mode 100644 docs/zh/latest/pubsub.md diff --git a/docs/assets/images/pubsub-architecture.svg b/docs/assets/images/pubsub-architecture.svg new file mode 100644 index 000000000000..84fbc59350b4 --- /dev/null +++ b/docs/assets/images/pubsub-architecture.svg @@ -0,0 +1,4 @@ + + + +
Subscribe
Subscribe
Client
Client
APISIX
APISIX
MQ Broker
MQ Broker
Fetch
Fetch
Push
Push
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/en/latest/pubsub.md b/docs/en/latest/pubsub.md new file mode 100644 index 000000000000..fa6cc1d38abf --- /dev/null +++ b/docs/en/latest/pubsub.md @@ -0,0 +1,242 @@ +--- +title: Pub-Sub +keywords: + - APISIX + - Pub-Sub +description: This document contains information about the Apache APISIX pub-sub framework. +--- + + + +## What is Pub-Sub + +Publish-subscribe is a messaging paradigm in which message producers do not send messages directly to message consumers, but are relayed by a specific broker that caches messages sent by producers and then actively pushes them to subscribed consumers or pulls them by consumers. This pattern is often used in system architectures for system decoupling or to handle high traffic scenarios. + +In Apache APISIX, the most common scenario is for handling north-south traffic from the server to the client. If we can combine it with a publish-subscribe scenario, we can achieve more powerful features, such as real-time collaboration on online documents, online games, etc. + +## Architecture + +![pub-sub architecture](../../assets/images/pubsub-architecture.svg) + +Currently, Apache APISIX supports WebSocket communication with the client, which can be any application that supports WebSocket, with a custom Protocol Buffer as the application layer communication protocol, see the [protocol definition](../../../apisix/pubsub.proto). + +## Supported messaging systems + +- [Aapche Kafka](pubsub/kafka.md) + +## How to support other messaging systems + +An extensible pubsub module is implemented in Apache APISIX, which is responsible for starting the WebSocket server, coding and decoding communication protocols, handling client commands, and through which new messaging system support can be simply added. + +### Basic Steps + +- Add new commands and response body definitions to `pubsub.proto` +- Add a new option to the `scheme` configuration item in upstream +- Add a new `scheme` judgment branch to `http_access_phase` +- Implement the required message system instruction processing functions +- Optional: Create plugins to support advanced configurations of this messaging system + +### the example of Apache Kafka + +#### Add new commands and response body definitions to `pubsub.proto` + +The core of the protocol definition in `pubsub.proto` is the two parts `PubSubReq` and `PubSubResp`. + +First, create the `CmdKafkaFetch` command and add the required parameters. Then, register this command in the list of commands for `req` in `PubSubReq`, which is named `cmd_kafka_fetch`. + +```protobuf +message CmdKafkaFetch { + string topic = 1; + int32 partition = 2; + int64 offset = 3; +} + +message PubSubReq { + int64 sequence = 1; + oneof req { + CmdKafkaFetch cmd_kafka_fetch = 31; + // more commands + }; +} +``` + +Then create the corresponding response body `KafkaFetchResp` and register it in the `resp` of `PubSubResp`, named `kafka_fetch_resp`. + +```protobuf +message KafkaFetchResp { + repeated KafkaMessage messages = 1; +} + +message PubSubResp { + int64 sequence = 1; + oneof resp { + ErrorResp error_resp = 31; + KafkaFetchResp kafka_fetch_resp = 32; + // more responses + }; +} +``` + +#### Add a new option to the `scheme` configuration item in upstream + +Add a new option `kafka` to the `scheme` field enumeration in the `upstream` of `apisix/schema_def.lua`. + +```lua +scheme = { + enum = {"grpc", "grpcs", "http", "https", "tcp", "tls", "udp", "kafka"}, + -- other +} +``` + +#### Add a new `scheme` judgment branch to `http_access_phase` + +Add a `scheme` judgment branch to the `http_access_phase` function in `apisix/init.lua` to support the processing of `kafka` type upstreams. Because of Apache Kafka has its own clustering and partition scheme, we do not need to use the Apache APISIX built-in load balancing algorithm, so we intercept and take over the processing flow before selecting the upstream node, here using the `kafka_access_phase` function. + +```lua +-- load balancer is not required by kafka upstream +if api_ctx.matched_upstream and api_ctx.matched_upstream.scheme == "kafka" then + return kafka_access_phase(api_ctx) +end +``` + +#### Implement the required message system commands processing functions + +```lua +local function kafka_access_phase(api_ctx) + local pubsub, err = core.pubsub.new() + + -- omit kafka client initialization code here + + pubsub:on("cmd_kafka_list_offset", function (params) + -- call kafka client to get data + end) + + pubsub:wait() +end +``` + +First, create an instance of the `pubsub` module, which is provided in the `core` package. + +```lua +local pubsub, err = core.pubsub.new() +``` + +Then, an instance of the Apache Kafka client is created, and this code is omitted here. + +Next, add the command registered in the protocol definition above to the `pubsub` instance, which will provide a callback function that provides the parameters parsed from the communication protocol, in which the developer needs to call the kafka client to get the data and return it to the `pubsub` module as the function return value. + +```lua +pubsub:on("cmd_kafka_list_offset", function (params) + +end) +``` + +:::note Callback function prototype +The `params` is the data in the protocol definition; the first return value is the data, which needs to contain the fields in the response body definition, and returns the `nil` value when there is an error; the second return value is the error, and returns the error string when there is an error +```lua +function (params) + return data, err +end +``` +::: + +Finally, it enters the loop to wait for client commands and when an error occurs it returns the error and stops the processing flow. + +```lua +local err = pubsub:wait() +``` + +#### Optional: Create plugins to support advanced configurations of this messaging system + +Add the required fields to the plugin schema definition and write them to the context of the current request in the `access` function. + +```lua +local schema = { + type = "object", + properties = { + enable_tls = { + type = "boolean", + default = false, + }, + -- more properties + }, +} + +local _M = { + version = 0.1, + priority = 508, + name = "kafka-proxy", + schema = schema, +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + +function _M.access(conf, ctx) + ctx.kafka_consumer_enable_tls = conf.enable_tls + ctx.kafka_consumer_ssl_verify = conf.ssl_verify + ctx.kafka_consumer_enable_sasl = conf.enable_sasl + ctx.kafka_consumer_sasl_username = conf.sasl_username + ctx.kafka_consumer_sasl_password = conf.sasl_password +end +``` + +Add this plugin to the list of plugins in the APISIX configuration file. + +```yaml +# config-default.yaml +plugins: + - kafka-proxy +``` + +#### Results + +After this is done, create a route like the one below to connect to this messaging system via APISIX using the WebSocket. + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "plugins": { + "kafka-proxy": { + "enable_tls": true, + "ssl_verify": true, + "enable_sasl": true, + "sasl_username": "user", + "sasl_password": "pwd" + } + }, + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": true + } + } +}' +``` diff --git a/docs/zh/latest/pubsub.md b/docs/zh/latest/pubsub.md new file mode 100644 index 000000000000..1c57e0b03e46 --- /dev/null +++ b/docs/zh/latest/pubsub.md @@ -0,0 +1,241 @@ +--- +title: 发布订阅框架 +keywords: + - APISIX + - Pub-Sub +description: This document contains information about the Apache APISIX pub-sub framework. +--- + + + +## 摘要 + +发布订阅是一种消息范式,消息生产者不直接将消息发送给消息消费者,而是由特定的代理进行中转,代理会将生产者发送的消息缓存下来,之后主动推送至订阅的消费者或由消费者拉取。在系统架构中通常使用这种模式进行系统解耦,或是处理大流量场景。 + +在 Apache APISIX 中,最常用的场景是用于处理服务器至客户端的南北向流量,如果可以结合发布订阅场景,我们可以实现更为强大的功能,例如在线文档实时协作、在线游戏等。 + +## 架构 + +![pub-sub architecture](../../assets/images/pubsub-architecture.svg) + +当前,Apache APISIX 支持以 WebSocket 与客户端通信,客户端可以是任何支持 WebSocket 的程序,以自定义 Protocol Buffer 为应用层通信协议,查看[协议定义](../../../apisix/pubsub.proto)。 + +## 当前支持的消息系统 + +- [Aapche Kafka](pubsub/kafka.md) + +## 如何支持其他消息系统 + +Apache APISIX 中为此实现了一个可扩展的 pubsub 模块,它负责启动 WebSocket 服务器、通信协议编解码、处理客户端指令,通过它可以简单的添加新的消息系统支持。 + +### 基本步骤 + +- 向`pubsub.proto`中添加新的指令和响应体定义 +- 向上游中`scheme`配置项添加新的选项 +- 向`http_access_phase`中添加新的`scheme`判断分支 +- 实现所需消息系统指令处理函数 +- 可选:创建插件以支持该消息系统的高级配置 + +### 以 Apache Kafka 为例 + +#### 向`pubsub.proto`中添加新的指令和响应体定义 + +`pubsub.proto`中协议定义的核心为`PubSubReq`和`PubSubResp`这两个部分。 + +首先,创建`CmdKafkaFetch`指令,添加所需的参数。而后,在`PubSubReq`中 req 的指令列表中注册这条指令,其命名为`cmd_kafka_fetch`。 + +```protobuf +message CmdKafkaFetch { + string topic = 1; + int32 partition = 2; + int64 offset = 3; +} + +message PubSubReq { + int64 sequence = 1; + oneof req { + CmdKafkaFetch cmd_kafka_fetch = 31; + // more commands + }; +} +``` + +接着创建对应的响应体`KafkaFetchResp`并在`PubSubResp`的 resp 中注册它,其命名为`kafka_fetch_resp`。 + +```protobuf +message KafkaFetchResp { + repeated KafkaMessage messages = 1; +} + +message PubSubResp { + int64 sequence = 1; + oneof resp { + ErrorResp error_resp = 31; + KafkaFetchResp kafka_fetch_resp = 32; + // more responses + }; +} +``` + +#### 向上游中`scheme`配置项添加新的选项 + +在`apisix/schema_def.lua`的`upstream`中`scheme`字段枚举中添加新的选项`kafka`。 + +```lua +scheme = { + enum = {"grpc", "grpcs", "http", "https", "tcp", "tls", "udp", "kafka"}, + -- other +} +``` + +#### 向`http_access_phase`中添加新的`scheme`判断分支 + +在`apisix/init.lua`的`http_access_phase`函数中添加`scheme`的判断分支,以支持`kafka`类型的上游的处理。因为 Apache Kafka 有其自己的集群与分片方案,我们不需要使用 Apache APISIX 内置的负载均衡算法,因此在选择上游节点前拦截并接管处理流程,此处使用`kafka_access_phase`函数。 + +```lua +-- load balancer is not required by kafka upstream +if api_ctx.matched_upstream and api_ctx.matched_upstream.scheme == "kafka" then + return kafka_access_phase(api_ctx) +end +``` + +#### 实现所需消息系统指令处理函数 + +```lua +local function kafka_access_phase(api_ctx) + local pubsub, err = core.pubsub.new() + + -- omit kafka client initialization code here + + pubsub:on("cmd_kafka_list_offset", function (params) + -- call kafka client to get data + end) + + pubsub:wait() +end +``` + +首先,创建`pubsub`模块实例,它在`core`包中提供。 + +```lua +local pubsub, err = core.pubsub.new() +``` + +创建需要的 Apache Kafka 客户端实例,此处省略这部分代码。 + +接着,在`pubsub`实例中添加在上面协议定义中注册的指令,其中将提供一个回调函数,它的提供从通信协议中解析出的参数,开发者需要在这个回调函数中调用 kafka 客户端获取数据,并作为函数返回值返回至`pubsub`模块。 + +```lua +pubsub:on("cmd_kafka_list_offset", function (params) +end) +``` + +:::note 回调函数原型 +params为协议定义中的数据;第一个返回值为数据,它需要包含响应体定义中的字段,当出现错误时则返回`nil`值;第二个返回值为错误,当出现错误时返回错误字符串 +```lua +function (params) + return data, err +end +``` +::: + +最终,进入循环等待客户端指令,当出现错误时它将返回错误并停止处理流程。 + +```lua +local err = pubsub:wait() +``` + +#### 可选:创建`kafka-proxy`插件以支持其鉴权配置 + +在插件 schema 定义中添加所需的字段,而后在 `access` 处理函数中将它们写入当前请求的上下文中。 + +```lua +local schema = { + type = "object", + properties = { + enable_tls = { + type = "boolean", + default = false, + }, + -- more properties + }, +} + +local _M = { + version = 0.1, + priority = 508, + name = "kafka-proxy", + schema = schema, +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + +function _M.access(conf, ctx) + ctx.kafka_consumer_enable_tls = conf.enable_tls + ctx.kafka_consumer_ssl_verify = conf.ssl_verify + ctx.kafka_consumer_enable_sasl = conf.enable_sasl + ctx.kafka_consumer_sasl_username = conf.sasl_username + ctx.kafka_consumer_sasl_password = conf.sasl_password +end +``` + +最后,需要将此插件注册至 APISIX 配置文件中的插件列表。 + +```yaml +# config-default.yaml +plugins: + - kafka-proxy +``` + +#### 成果 + +在完成上述工作后,创建下面这样的路由,即可通过 APISIX 以 WebSocket 连接这种消息系统。 + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "plugins": { + "kafka-proxy": { + "enable_tls": true, + "ssl_verify": true, + "enable_sasl": true, + "sasl_username": "user", + "sasl_password": "pwd" + } + }, + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": true + } + } +}' +``` From 448b9bc879718dca4cfe5dc527d8bc1cfc67d485 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 02:43:30 +0800 Subject: [PATCH 43/96] docs: update typo --- docs/en/latest/plugins/kafka-proxy.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md index a281d37a1779..44cc54e4488c 100644 --- a/docs/en/latest/plugins/kafka-proxy.md +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -57,6 +57,8 @@ curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/r1' \ "uri": "/kafka", "plugins": { "kafka-proxy": { + "enable_tls": true, + "ssl_verify": true, "enable_sasl": true, "sasl_username": "user", "sasl_password": "pwd" @@ -69,10 +71,7 @@ curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/r1' \ "kafka-server3:9092": 1 }, "type": "none", - "scheme": "kafka", - "tls": { - "verify": true - } + "scheme": "kafka" } }' ``` From 0ce6fba4a7f6ed577218739e80a9ea3a40bb34f0 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:30:22 +0800 Subject: [PATCH 44/96] docs: add kafka pubsub documentations --- docs/en/latest/pubsub/kafka.md | 119 +++++++++++++++++++++++++++++++++ docs/zh/latest/pubsub/kafka.md | 117 ++++++++++++++++++++++++++++++++ 2 files changed, 236 insertions(+) create mode 100644 docs/en/latest/pubsub/kafka.md create mode 100644 docs/zh/latest/pubsub/kafka.md diff --git a/docs/en/latest/pubsub/kafka.md b/docs/en/latest/pubsub/kafka.md new file mode 100644 index 000000000000..91ce6ce92252 --- /dev/null +++ b/docs/en/latest/pubsub/kafka.md @@ -0,0 +1,119 @@ +--- +title: Apache Kafka +keywords: + - APISIX + - Pub-Sub + - Kafka +description: This document contains information about the Apache APISIX kafka pub-sub scenario. +--- + + + +## Connect to Apache Kafka + +Connecting to Apache Kafka in Apache APISIX is very simple. + +Currently, we implement a relatively simple function to list offset (ListOffsets), fetch message (Fetch) function, does not support the Apache Kafka consumer group, can not be manage offset by Kafka. + +### Limitations + +- Offsets need to be managed manually +They can be stored by a custom backend service or obtained via the list_offset command before starting to fetch the message, which can use timestamp to get the starting offset, or to get the initial and end offsets. +- Unsupport batch data acquisition +A single instruction can only obtain the data of a Topic Partition, does not support batch data acquisition through a single instruction + +### Prepare + +First, it is necessary to compile the [communication protocol](../../../../apisix/pubsub.proto) as a language-specific SDK using the `protoc`, which provides the command and response definitions to connect to Kafka via APISIX using the WebSocket. + +The `sequence` field in the protocol is used to associate the request with the response, they will correspond one to one, the client can manage it in the way they want, APISIX will not modify it, only pass it back to the client through the response body. + +The following commands are currently used by Apache Kafka connect: + +- CmdKafkaFetch +- CmdKafkaListOffset +> The `timestamp` field in the `CmdKafkaListOffset` command supports the following value: +> - `unix timestamp`: Offset of the first message after the specified timestamp +> - `-1`:Offset of the last message of the current Partition +> - `-2`:Offset of the first message of current Partition +> For more information, see [Apache Kafka Protocol Documentation](https://kafka.apache.org/protocol.html#The_Messages_ListOffsets) + +Possible response body: When an error occurs, `ErrorResp` will be returned, which includes the error string; the rest of the response will be returned after the execution of the particular command. + +- ErrorResp +- KafkaFetchResp +- KafkaListOffsetResp + +### How to use + +#### Create route + +Create a route, set the upstream `scheme` field to `kafka`, and configure `nodes` to be the address of the Kafka broker. + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka" + } +}' +``` + +After configuring the route, you can use this feature. + +#### Enabling TLS and authentication + +Simply turn on the `kafka-proxy` plugin on the created route and enable the Kafka TLS handshake and SASL authentication through the configuration, which can be found in the [plugin documentation](../../../en/latest/plugins/kafka-proxy.md). + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "plugins": { + "kafka-proxy": { + "enable_tls": true, + "ssl_verify": true, + "enable_sasl": true, + "sasl_username": "user", + "sasl_password": "pwd" + } + }, + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka" + } +}' +``` diff --git a/docs/zh/latest/pubsub/kafka.md b/docs/zh/latest/pubsub/kafka.md new file mode 100644 index 000000000000..58300aae754c --- /dev/null +++ b/docs/zh/latest/pubsub/kafka.md @@ -0,0 +1,117 @@ +--- +title: Apache Kafka +keywords: + - APISIX + - Pub-Sub + - Kafka +description: This document contains information about the Apache APISIX kafka pub-sub scenario. +--- + + + +## 连接至 Apache Kafka + +在 Apache APISIX 中连接 Apache Kafka 非常简单。 + +当前我们实现的功能较为简单,可以实现获取偏移量(ListOffsets)、获取消息(Fetch)的功能,暂不支持 Apache Kafka 的消费者组功能,无法由 Kafka 管理偏移量。 + +### 局限性 + +- 用户需要手动管理偏移量:可以由自定义后端服务存储,或在开始获取消息前通过 List Offset 命令获取,它可以使用时间戳获取起始偏移量,或是获取初始、末尾偏移量。 +- 单条指令仅可获取一个 Topic Partition 的数据:暂不支持通过单条指令批量获取数据 + +### 准备 + +首先,需要使用`protoc`将[通信协议](../../../../apisix/pubsub.proto)编译为特定语言SDK,它提供指令和响应定义,即可通过 APISIX 以 WebSocket 连接至 Kafka。 + +协议中`sequence`字段用来关联请求与响应,它们将一一对应,客户端可以以自己需要的方式管理它,APISIX将不会对其进行修改,仅通过响应体透传回客户端。 + +当前 Apache Kafka 使用以下指令:这些指令都是针对某个特定的 Topic 和 Partition,暂不支持 + +- CmdKafkaFetch +- CmdKafkaListOffset +> `CmdKafkaListOffset`指令中的`timestamp`字段支持以下情况: +> - 时间戳:获取指定时间戳后的首条消息偏移量 +> - `-1`:当前 Partition 最后一条消息偏移量 +> - `-2`:当前 Partition 首条消息偏移量 +> 更多信息参考 [Apache Kafka 协议文档](https://kafka.apache.org/protocol.html#The_Messages_ListOffsets) + +可能的响应体:当出现错误时,将返回`ErrorResp`,它包括错误字符串;其余响应将在执行特定命令后返回。 + +- ErrorResp +- KafkaFetchResp +- KafkaListOffsetResp + +### 使用方法 + +#### 创建路由 + +创建一个路由,将上游的`scheme`字段设置为`kafka`,并将`nodes`配置为 Kafka broker 的地址。 + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka" + } +}' +``` + +配置路由后,就可以使用这一功能了。 + +#### 开启TLS和鉴权 + +仅需在创建的路由上开启kafka-proxy插件,通过配置即可开启与 Kafka TLS 握手和 SASL 鉴权,该插件配置可以参考 [插件文档](../../../en/latest/plugins/kafka-proxy.md)。 + +```shell +curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ + -H 'X-API-KEY: ' \ + -H 'Content-Type: application/json' \ + -d '{ + "uri": "/kafka", + "plugins": { + "kafka-proxy": { + "enable_tls": true, + "ssl_verify": true, + "enable_sasl": true, + "sasl_username": "user", + "sasl_password": "pwd" + } + }, + "upstream": { + "nodes": { + "kafka-server1:9092": 1, + "kafka-server2:9092": 1, + "kafka-server3:9092": 1 + }, + "type": "none", + "scheme": "kafka" + } +}' +``` From 56bb7a60f9100201daa1dd4a067d8c480382a38e Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:35:05 +0800 Subject: [PATCH 45/96] fix: lint --- docs/en/latest/pubsub.md | 4 +++- docs/en/latest/pubsub/kafka.md | 3 +++ docs/zh/latest/pubsub.md | 4 +++- docs/zh/latest/pubsub/kafka.md | 11 +++++++---- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/docs/en/latest/pubsub.md b/docs/en/latest/pubsub.md index fa6cc1d38abf..dc06c930c5c5 100644 --- a/docs/en/latest/pubsub.md +++ b/docs/en/latest/pubsub.md @@ -121,7 +121,7 @@ end ```lua local function kafka_access_phase(api_ctx) local pubsub, err = core.pubsub.new() - + -- omit kafka client initialization code here pubsub:on("cmd_kafka_list_offset", function (params) @@ -150,11 +150,13 @@ end) :::note Callback function prototype The `params` is the data in the protocol definition; the first return value is the data, which needs to contain the fields in the response body definition, and returns the `nil` value when there is an error; the second return value is the error, and returns the error string when there is an error + ```lua function (params) return data, err end ``` + ::: Finally, it enters the loop to wait for client commands and when an error occurs it returns the error and stops the processing flow. diff --git a/docs/en/latest/pubsub/kafka.md b/docs/en/latest/pubsub/kafka.md index 91ce6ce92252..8aba46c25361 100644 --- a/docs/en/latest/pubsub/kafka.md +++ b/docs/en/latest/pubsub/kafka.md @@ -49,10 +49,13 @@ The following commands are currently used by Apache Kafka connect: - CmdKafkaFetch - CmdKafkaListOffset + > The `timestamp` field in the `CmdKafkaListOffset` command supports the following value: +> > - `unix timestamp`: Offset of the first message after the specified timestamp > - `-1`:Offset of the last message of the current Partition > - `-2`:Offset of the first message of current Partition +> > For more information, see [Apache Kafka Protocol Documentation](https://kafka.apache.org/protocol.html#The_Messages_ListOffsets) Possible response body: When an error occurs, `ErrorResp` will be returned, which includes the error string; the rest of the response will be returned after the execution of the particular command. diff --git a/docs/zh/latest/pubsub.md b/docs/zh/latest/pubsub.md index 1c57e0b03e46..04fabfc3cac2 100644 --- a/docs/zh/latest/pubsub.md +++ b/docs/zh/latest/pubsub.md @@ -121,7 +121,7 @@ end ```lua local function kafka_access_phase(api_ctx) local pubsub, err = core.pubsub.new() - + -- omit kafka client initialization code here pubsub:on("cmd_kafka_list_offset", function (params) @@ -149,11 +149,13 @@ end) :::note 回调函数原型 params为协议定义中的数据;第一个返回值为数据,它需要包含响应体定义中的字段,当出现错误时则返回`nil`值;第二个返回值为错误,当出现错误时返回错误字符串 + ```lua function (params) return data, err end ``` + ::: 最终,进入循环等待客户端指令,当出现错误时它将返回错误并停止处理流程。 diff --git a/docs/zh/latest/pubsub/kafka.md b/docs/zh/latest/pubsub/kafka.md index 58300aae754c..07e62fc5e6e5 100644 --- a/docs/zh/latest/pubsub/kafka.md +++ b/docs/zh/latest/pubsub/kafka.md @@ -39,18 +39,21 @@ description: This document contains information about the Apache APISIX kafka pu ### 准备 -首先,需要使用`protoc`将[通信协议](../../../../apisix/pubsub.proto)编译为特定语言SDK,它提供指令和响应定义,即可通过 APISIX 以 WebSocket 连接至 Kafka。 +首先,需要使用`protoc`将[通信协议](../../../../apisix/pubsub.proto)编译为特定语言 SDK,它提供指令和响应定义,即可通过 APISIX 以 WebSocket 连接至 Kafka。 -协议中`sequence`字段用来关联请求与响应,它们将一一对应,客户端可以以自己需要的方式管理它,APISIX将不会对其进行修改,仅通过响应体透传回客户端。 +协议中`sequence`字段用来关联请求与响应,它们将一一对应,客户端可以以自己需要的方式管理它,APISIX 将不会对其进行修改,仅通过响应体透传回客户端。 当前 Apache Kafka 使用以下指令:这些指令都是针对某个特定的 Topic 和 Partition,暂不支持 - CmdKafkaFetch - CmdKafkaListOffset + > `CmdKafkaListOffset`指令中的`timestamp`字段支持以下情况: +> > - 时间戳:获取指定时间戳后的首条消息偏移量 > - `-1`:当前 Partition 最后一条消息偏移量 > - `-2`:当前 Partition 首条消息偏移量 +> > 更多信息参考 [Apache Kafka 协议文档](https://kafka.apache.org/protocol.html#The_Messages_ListOffsets) 可能的响应体:当出现错误时,将返回`ErrorResp`,它包括错误字符串;其余响应将在执行特定命令后返回。 @@ -85,9 +88,9 @@ curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ 配置路由后,就可以使用这一功能了。 -#### 开启TLS和鉴权 +#### 开启 TLS 和鉴权 -仅需在创建的路由上开启kafka-proxy插件,通过配置即可开启与 Kafka TLS 握手和 SASL 鉴权,该插件配置可以参考 [插件文档](../../../en/latest/plugins/kafka-proxy.md)。 +仅需在创建的路由上开启`kafka-proxy`插件,通过配置即可开启与 Kafka TLS 握手和 SASL 鉴权,该插件配置可以参考 [插件文档](../../../en/latest/plugins/kafka-proxy.md)。 ```shell curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ From dce5b4f7be526c68eeff51e9cbb06694d4538f7f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:37:48 +0800 Subject: [PATCH 46/96] docs: add to sidebar --- docs/en/latest/config.json | 8 ++++++++ docs/zh/latest/config.json | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json index ff799b306c59..773ce862a590 100644 --- a/docs/en/latest/config.json +++ b/docs/en/latest/config.json @@ -213,6 +213,14 @@ "discovery/kubernetes" ] }, + { + "type": "category", + "label": "Pub-Sub", + "items": [ + "pubsub", + "pubsub/kafka" + ] + }, { "type": "doc", "id": "health-check" diff --git a/docs/zh/latest/config.json b/docs/zh/latest/config.json index e5134d0ab74c..bad0ed6c6962 100644 --- a/docs/zh/latest/config.json +++ b/docs/zh/latest/config.json @@ -201,6 +201,14 @@ "discovery/kubernetes" ] }, + { + "type": "category", + "label": "发布订阅", + "items": [ + "pubsub", + "pubsub/kafka" + ] + }, { "type": "doc", "id": "external-plugin" From 92270838d9da28a7c9747a136219e289625d5b94 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:38:28 +0800 Subject: [PATCH 47/96] docs: fix typo --- docs/en/latest/pubsub/kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/latest/pubsub/kafka.md b/docs/en/latest/pubsub/kafka.md index 8aba46c25361..c3d421302ee6 100644 --- a/docs/en/latest/pubsub/kafka.md +++ b/docs/en/latest/pubsub/kafka.md @@ -36,7 +36,7 @@ Currently, we implement a relatively simple function to list offset (ListOffsets - Offsets need to be managed manually They can be stored by a custom backend service or obtained via the list_offset command before starting to fetch the message, which can use timestamp to get the starting offset, or to get the initial and end offsets. -- Unsupport batch data acquisition +- Unsupported batch data acquisition A single instruction can only obtain the data of a Topic Partition, does not support batch data acquisition through a single instruction ### Prepare From 7a6854fbe6b6454587df552061c61b44b76ac0ea Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:44:52 +0800 Subject: [PATCH 48/96] chore: add plugin to config-default --- conf/config-default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/conf/config-default.yaml b/conf/config-default.yaml index 0888743dd84c..0abdd2d65d4a 100644 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -374,6 +374,7 @@ plugins: # plugin list (sorted by priority) - traffic-split # priority: 966 - redirect # priority: 900 - response-rewrite # priority: 899 + - kafka-proxy # priority: 508 #- dubbo-proxy # priority: 507 - grpc-transcode # priority: 506 - grpc-web # priority: 505 From 5a1f388bcee718ee424e3badbb48f5ddbf2a01ce Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:45:15 +0800 Subject: [PATCH 49/96] fix: typo --- docs/en/latest/config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json index 42fd2ef76665..1a1b5735ae46 100644 --- a/docs/en/latest/config.json +++ b/docs/en/latest/config.json @@ -227,7 +227,7 @@ "items": [ "xrpc" ] - } + }, { "type": "doc", "id": "health-check" From 08bf6b8420ae1a8fed4ff939624412e3fc361ebe Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:49:25 +0800 Subject: [PATCH 50/96] docs: update architecture image --- docs/assets/images/pubsub-architecture.svg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/assets/images/pubsub-architecture.svg b/docs/assets/images/pubsub-architecture.svg index 84fbc59350b4..4c8a630f0724 100644 --- a/docs/assets/images/pubsub-architecture.svg +++ b/docs/assets/images/pubsub-architecture.svg @@ -1,4 +1,4 @@ -
Subscribe
Subscribe
Client
Client
APISIX
APISIX
MQ Broker
MQ Broker
Fetch
Fetch
Push
Push
Text is not SVG - cannot display
\ No newline at end of file +
Client
Client
APISIX
APISIX
MQ Broker
MQ Broker
Fetch
Fetch
Push
Push
Subscribe
Subscribe
Text is not SVG - cannot display
\ No newline at end of file From 0b8ed1707e1d327113da92b380894f0b52fae9ee Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 03:56:45 +0800 Subject: [PATCH 51/96] docs: update architecture image --- docs/assets/images/pubsub-architecture.svg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/assets/images/pubsub-architecture.svg b/docs/assets/images/pubsub-architecture.svg index 4c8a630f0724..9bce53bb1ed8 100644 --- a/docs/assets/images/pubsub-architecture.svg +++ b/docs/assets/images/pubsub-architecture.svg @@ -1,4 +1,4 @@ -
Client
Client
APISIX
APISIX
MQ Broker
MQ Broker
Fetch
Fetch
Push
Push
Subscribe
Subscribe
Text is not SVG - cannot display
\ No newline at end of file +
Client
Client
APISIX
APISIX
MQ Broker
MQ Broker
Fetch
Fetch
Push
Push
Subscribe
Subscribe
Text is not SVG - cannot display
\ No newline at end of file From 244a293f3123f92a44aace5292c74833513f84bf Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 04:24:53 +0800 Subject: [PATCH 52/96] test: fix plugin list --- t/admin/plugins.t | 1 + 1 file changed, 1 insertion(+) diff --git a/t/admin/plugins.t b/t/admin/plugins.t index 2557da7505c5..8a254407f0e4 100644 --- a/t/admin/plugins.t +++ b/t/admin/plugins.t @@ -102,6 +102,7 @@ server-info traffic-split redirect response-rewrite +kafka-proxy grpc-transcode grpc-web public-api From c75f91803e0143ad007d1b7f5a4f84ec8e3e8a9e Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 09:47:21 +0800 Subject: [PATCH 53/96] docs: fix review --- docs/zh/latest/pubsub.md | 30 +++++++++++++++--------------- docs/zh/latest/pubsub/kafka.md | 12 ++++++------ 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/docs/zh/latest/pubsub.md b/docs/zh/latest/pubsub.md index 04fabfc3cac2..2d0fd9d2f629 100644 --- a/docs/zh/latest/pubsub.md +++ b/docs/zh/latest/pubsub.md @@ -47,19 +47,19 @@ Apache APISIX 中为此实现了一个可扩展的 pubsub 模块,它负责启 ### 基本步骤 -- 向`pubsub.proto`中添加新的指令和响应体定义 -- 向上游中`scheme`配置项添加新的选项 -- 向`http_access_phase`中添加新的`scheme`判断分支 +- 向 `pubsub.proto` 中添加新的指令和响应体定义 +- 向上游中 `scheme` 配置项添加新的选项 +- 向 `http_access_phase` 中添加新的 `scheme` 判断分支 - 实现所需消息系统指令处理函数 - 可选:创建插件以支持该消息系统的高级配置 ### 以 Apache Kafka 为例 -#### 向`pubsub.proto`中添加新的指令和响应体定义 +#### 向 `pubsub.proto` 中添加新的指令和响应体定义 -`pubsub.proto`中协议定义的核心为`PubSubReq`和`PubSubResp`这两个部分。 +`pubsub.proto` 中协议定义的核心为 `PubSubReq` 和 `PubSubResp` 这两个部分。 -首先,创建`CmdKafkaFetch`指令,添加所需的参数。而后,在`PubSubReq`中 req 的指令列表中注册这条指令,其命名为`cmd_kafka_fetch`。 +首先,创建 `CmdKafkaFetch` 指令,添加所需的参数。而后,在 `PubSubReq` 中 req 的指令列表中注册这条指令,其命名为 `cmd_kafka_fetch`。 ```protobuf message CmdKafkaFetch { @@ -77,7 +77,7 @@ message PubSubReq { } ``` -接着创建对应的响应体`KafkaFetchResp`并在`PubSubResp`的 resp 中注册它,其命名为`kafka_fetch_resp`。 +接着创建对应的响应体 `KafkaFetchResp` 并在 `PubSubResp` 的 resp 中注册它,其命名为 `kafka_fetch_resp`。 ```protobuf message KafkaFetchResp { @@ -94,9 +94,9 @@ message PubSubResp { } ``` -#### 向上游中`scheme`配置项添加新的选项 +#### 向上游中 `scheme` 配置项添加新的选项 -在`apisix/schema_def.lua`的`upstream`中`scheme`字段枚举中添加新的选项`kafka`。 +在 `apisix/schema_def.lua` 的 `upstream` 中 `scheme` 字段枚举中添加新的选项 `kafka`。 ```lua scheme = { @@ -105,9 +105,9 @@ scheme = { } ``` -#### 向`http_access_phase`中添加新的`scheme`判断分支 +#### 向 `http_access_phase` 中添加新的 `scheme` 判断分支 -在`apisix/init.lua`的`http_access_phase`函数中添加`scheme`的判断分支,以支持`kafka`类型的上游的处理。因为 Apache Kafka 有其自己的集群与分片方案,我们不需要使用 Apache APISIX 内置的负载均衡算法,因此在选择上游节点前拦截并接管处理流程,此处使用`kafka_access_phase`函数。 +在 `apisix/init.lua` 的 `http_access_phase` 函数中添加 `scheme` 的判断分支,以支持 `kafka` 类型的上游的处理。因为 Apache Kafka 有其自己的集群与分片方案,我们不需要使用 Apache APISIX 内置的负载均衡算法,因此在选择上游节点前拦截并接管处理流程,此处使用 `kafka_access_phase` 函数。 ```lua -- load balancer is not required by kafka upstream @@ -132,7 +132,7 @@ local function kafka_access_phase(api_ctx) end ``` -首先,创建`pubsub`模块实例,它在`core`包中提供。 +首先,创建 `pubsub` 模块实例,它在 `core` 包中提供。 ```lua local pubsub, err = core.pubsub.new() @@ -140,7 +140,7 @@ local pubsub, err = core.pubsub.new() 创建需要的 Apache Kafka 客户端实例,此处省略这部分代码。 -接着,在`pubsub`实例中添加在上面协议定义中注册的指令,其中将提供一个回调函数,它的提供从通信协议中解析出的参数,开发者需要在这个回调函数中调用 kafka 客户端获取数据,并作为函数返回值返回至`pubsub`模块。 +接着,在 `pubsub` 实例中添加在上面协议定义中注册的指令,其中将提供一个回调函数,它的提供从通信协议中解析出的参数,开发者需要在这个回调函数中调用 kafka 客户端获取数据,并作为函数返回值返回至 `pubsub` 模块。 ```lua pubsub:on("cmd_kafka_list_offset", function (params) @@ -148,7 +148,7 @@ end) ``` :::note 回调函数原型 -params为协议定义中的数据;第一个返回值为数据,它需要包含响应体定义中的字段,当出现错误时则返回`nil`值;第二个返回值为错误,当出现错误时返回错误字符串 +params为协议定义中的数据;第一个返回值为数据,它需要包含响应体定义中的字段,当出现错误时则返回 `nil` 值;第二个返回值为错误,当出现错误时返回错误字符串 ```lua function (params) @@ -164,7 +164,7 @@ end local err = pubsub:wait() ``` -#### 可选:创建`kafka-proxy`插件以支持其鉴权配置 +#### 可选:创建 `kafka-proxy` 插件以支持其鉴权配置 在插件 schema 定义中添加所需的字段,而后在 `access` 处理函数中将它们写入当前请求的上下文中。 diff --git a/docs/zh/latest/pubsub/kafka.md b/docs/zh/latest/pubsub/kafka.md index 07e62fc5e6e5..16d4057d25c8 100644 --- a/docs/zh/latest/pubsub/kafka.md +++ b/docs/zh/latest/pubsub/kafka.md @@ -39,16 +39,16 @@ description: This document contains information about the Apache APISIX kafka pu ### 准备 -首先,需要使用`protoc`将[通信协议](../../../../apisix/pubsub.proto)编译为特定语言 SDK,它提供指令和响应定义,即可通过 APISIX 以 WebSocket 连接至 Kafka。 +首先,需要使用 `protoc` 将[通信协议](../../../../apisix/pubsub.proto)编译为特定语言 SDK,它提供指令和响应定义,即可通过 APISIX 以 WebSocket 连接至 Kafka。 -协议中`sequence`字段用来关联请求与响应,它们将一一对应,客户端可以以自己需要的方式管理它,APISIX 将不会对其进行修改,仅通过响应体透传回客户端。 +协议中 `sequence` 字段用来关联请求与响应,它们将一一对应,客户端可以以自己需要的方式管理它,APISIX 将不会对其进行修改,仅通过响应体透传回客户端。 当前 Apache Kafka 使用以下指令:这些指令都是针对某个特定的 Topic 和 Partition,暂不支持 - CmdKafkaFetch - CmdKafkaListOffset -> `CmdKafkaListOffset`指令中的`timestamp`字段支持以下情况: +> `CmdKafkaListOffset` 指令中的 `timestamp` 字段支持以下情况: > > - 时间戳:获取指定时间戳后的首条消息偏移量 > - `-1`:当前 Partition 最后一条消息偏移量 @@ -56,7 +56,7 @@ description: This document contains information about the Apache APISIX kafka pu > > 更多信息参考 [Apache Kafka 协议文档](https://kafka.apache.org/protocol.html#The_Messages_ListOffsets) -可能的响应体:当出现错误时,将返回`ErrorResp`,它包括错误字符串;其余响应将在执行特定命令后返回。 +可能的响应体:当出现错误时,将返回 `ErrorResp`,它包括错误字符串;其余响应将在执行特定命令后返回。 - ErrorResp - KafkaFetchResp @@ -66,7 +66,7 @@ description: This document contains information about the Apache APISIX kafka pu #### 创建路由 -创建一个路由,将上游的`scheme`字段设置为`kafka`,并将`nodes`配置为 Kafka broker 的地址。 +创建一个路由,将上游的 `scheme` 字段设置为 `kafka`,并将 `nodes` 配置为 Kafka broker 的地址。 ```shell curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ @@ -90,7 +90,7 @@ curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ #### 开启 TLS 和鉴权 -仅需在创建的路由上开启`kafka-proxy`插件,通过配置即可开启与 Kafka TLS 握手和 SASL 鉴权,该插件配置可以参考 [插件文档](../../../en/latest/plugins/kafka-proxy.md)。 +仅需在创建的路由上开启 `kafka-proxy` 插件,通过配置即可开启与 Kafka TLS 握手和 SASL 鉴权,该插件配置可以参考 [插件文档](../../../en/latest/plugins/kafka-proxy.md)。 ```shell curl -X PUT 'http://127.0.0.1:9080/apisix/admin/routes/kafka' \ From 3eace42eda95e73438ed494ac4b3cd38a44f01b9 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 09:56:13 +0800 Subject: [PATCH 54/96] docs: adjust desc --- docs/en/latest/pubsub/kafka.md | 4 +++- docs/zh/latest/pubsub/kafka.md | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/en/latest/pubsub/kafka.md b/docs/en/latest/pubsub/kafka.md index c3d421302ee6..4a827001ac7e 100644 --- a/docs/en/latest/pubsub/kafka.md +++ b/docs/en/latest/pubsub/kafka.md @@ -30,7 +30,9 @@ description: This document contains information about the Apache APISIX kafka pu Connecting to Apache Kafka in Apache APISIX is very simple. -Currently, we implement a relatively simple function to list offset (ListOffsets), fetch message (Fetch) function, does not support the Apache Kafka consumer group, can not be manage offset by Kafka. +当前我们提供一种较为简单的方式来进行集成,通过获取偏移量(ListOffsets)与获取消息(Fetch)这两个 API 结合,可以快速实现拉取 Kafka 消息的功能,但暂不支持 Apache Kafka 的消费者组功能,无法由 Kafka 管理偏移量。 + +Currently we provide a simpler way to integrate by combining two APIs, ListOffsets and Fetch, to quickly implement the ability to pull Kafka messages, but do not support Apache Kafka's consumer group feature for now, and cannot be managed by Kafka for offsets. ### Limitations diff --git a/docs/zh/latest/pubsub/kafka.md b/docs/zh/latest/pubsub/kafka.md index 16d4057d25c8..15343069c0c9 100644 --- a/docs/zh/latest/pubsub/kafka.md +++ b/docs/zh/latest/pubsub/kafka.md @@ -30,7 +30,7 @@ description: This document contains information about the Apache APISIX kafka pu 在 Apache APISIX 中连接 Apache Kafka 非常简单。 -当前我们实现的功能较为简单,可以实现获取偏移量(ListOffsets)、获取消息(Fetch)的功能,暂不支持 Apache Kafka 的消费者组功能,无法由 Kafka 管理偏移量。 +当前我们提供一种较为简单的方式来进行集成,通过获取偏移量(ListOffsets)与获取消息(Fetch)这两个 API 结合,可以快速实现拉取 Kafka 消息的功能,但暂不支持 Apache Kafka 的消费者组功能,无法由 Kafka 管理偏移量。 ### 局限性 From dc6832e053953d054f5ac502013ff4a5e555752e Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 10:00:31 +0800 Subject: [PATCH 55/96] docs: fix typo --- docs/en/latest/pubsub/kafka.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/en/latest/pubsub/kafka.md b/docs/en/latest/pubsub/kafka.md index 4a827001ac7e..d70fa68a6674 100644 --- a/docs/en/latest/pubsub/kafka.md +++ b/docs/en/latest/pubsub/kafka.md @@ -30,8 +30,6 @@ description: This document contains information about the Apache APISIX kafka pu Connecting to Apache Kafka in Apache APISIX is very simple. -当前我们提供一种较为简单的方式来进行集成,通过获取偏移量(ListOffsets)与获取消息(Fetch)这两个 API 结合,可以快速实现拉取 Kafka 消息的功能,但暂不支持 Apache Kafka 的消费者组功能,无法由 Kafka 管理偏移量。 - Currently we provide a simpler way to integrate by combining two APIs, ListOffsets and Fetch, to quickly implement the ability to pull Kafka messages, but do not support Apache Kafka's consumer group feature for now, and cannot be managed by Kafka for offsets. ### Limitations From 501efb6b36483fd1f61a97a548aabf08db5b5e4f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 15:18:30 +0800 Subject: [PATCH 56/96] feat: support tls and sasl in kafka test server --- ci/pod/docker-compose.yml | 4 ++++ ci/pod/kafka/kafka-server/env/common.env | 7 ++++++- ci/pod/kafka/kafka-server/selfsigned.jks | Bin 0 -> 2559 bytes 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 ci/pod/kafka/kafka-server/selfsigned.jks diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index 1804fbf2ea9d..8ec0ca9eebb9 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -73,11 +73,15 @@ services: restart: unless-stopped ports: - "9092:9092" + - "9093:9093" + - "9094:9094" depends_on: - zookeeper-server1 - zookeeper-server2 networks: kafka_net: + volumes: + - ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro kafka-server2: image: bitnami/kafka:2.8.1 diff --git a/ci/pod/kafka/kafka-server/env/common.env b/ci/pod/kafka/kafka-server/env/common.env index 06200b9b0042..b8465521dea6 100644 --- a/ci/pod/kafka/kafka-server/env/common.env +++ b/ci/pod/kafka/kafka-server/env/common.env @@ -1,3 +1,8 @@ ALLOW_PLAINTEXT_LISTENER=yes KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true -KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092 +KAFKA_CFG_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 +KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 +KAFKA_CLIENT_USERS=testuser +KAFKA_CLIENT_PASSWORDS=testpwd +KAFKA_CERTIFICATE_PASSWORD=changeit +KAFKA_TLS_TYPE=JKS \ No newline at end of file diff --git a/ci/pod/kafka/kafka-server/selfsigned.jks b/ci/pod/kafka/kafka-server/selfsigned.jks new file mode 100644 index 0000000000000000000000000000000000000000..9729f406fb03c569a0af40b748a4f826a675e437 GIT binary patch literal 2559 zcma)+c{mh`8pdbF7{)SojV$44Y=arQNSQEWmqsHUlYL(zArS_HEJqki!Vsc5F%cq5 zvW~K+WQ$Od$)t=U=Y78S_kHi5FNy?Zfq)z+60{KlRUzZZd;CBy zAe98I0+XQSzw8neiSxe^=P$(h3vvDv6+w9a`r_jOf~X|U)4xn3l=t6ZocyQ@DE)u0 zd?;10sF+Ua5-tWB2tMdqcrzhid^3EM1p+Awf`GCpJ_z@}5a}>J>Y`9Z7S&p)U1Yev zqa2ynVg30ht(jfidkxOk=xsl_i+|u;sj7T$==z1%-=YQolx?YW9n@^I!QQ<%E*jA& z4}*NR&<0 zxWnDd^UNFGkEI1gNIT2t)WUBce+f%kop|DrDKYFgLK)a4uLhU9*Sv+FFVwWf0C1=a z8?CI#{<6`;?UdgFxO)q(@~JPr14%UO(=R^KzGG-y^36m%?}%|HTsb>moZL=eR1P&} zcDZ*``J>MpOS&`*?K|za-`%)&$0__K%&B%mUnp6M$47&Go2DgI(=N=Cp`kTbQ`*Y@+It zhIe#39XcSyRV}4nL8^L5oQ&0(d+j`QS*0PFuL(_99ki zm-t=!i0i57mp`xrk3+Z&JL8?b30_>*Hd?e7p(;XO2@=ql5y7$4`*g-i%GZG*CAI8` z>AX_tD*6ezZv-34#?K$yWq2thzK^`toW1c%c4STDgf!VNlu!PTRmH+o z+l=+bF+b0hqUTl&1=maj2L&pO-%2Y9d-OSS$Dt=aOvM5t$*tFH1tp>C?iLDu!rr34 zRc^G5W2>38y95~e8BxO(C&{gf7n=Ocz8E`W(2=|>?XLQCJ=3Gyd{wX zb(`uJdQJc?wCzrx$Kp&g^y;{g^7cq^?K-DnROX=Hku77jsMS6>fiCv)ho!9CJ3OyR zMGL(A^gd+&i4$j))YI1)HqbRrrFJe;6mCS**8#i5=V#6;1G07+Dbp(_$PRhTOIeD- zDuk6gOZRG~U;g2{>wva;#6OYxEf$PWoo_M-?i^0NEk0b>Wj(xj<9eClQ_HB}%An0$ z46d*9zJ z{N)x6Y8*FW>l3h)X5nfV7HbF~EqM3ef?Ni3DuXWg*9 zKp&xK6D6j5tpK7ZmQxOKyP}cKwBDEY)k|RU`~4t-J+XIkD+09N3Aad&c6fv8J~IYt zOmeMn7>ZTZtuLBQ19y4Q`5w|+^KW07_H4<(6t$;MFI1sq0p?#XRR_H9vG0pf=nIzr zQf`bkN6GzNmrzl(sEjZG4bTMW0n`DgzXtuECt_$(urS^)$V(QjtD}k1GceHB(b3dL zkvJCr&f$VkNgU(9tYHuk@JsstO@RNatR3AhNb4DT~(j8sjRqU(K%r6KRNa};W6 zb)IySyzGaURi(TpbKhWH0#NrQ*1DfrIvl$6FeF)1L2mAsU{7*1FwlfvFT21BWy^~v zQ<}Xv6(}t|_uUUKMi=dgsl=rlWL!x3{hM?f3}c6=5++c#t98wGP~O*KVKP!5PF0Ml zY&>I+R%3a}2;h^Xp{L>%@e#IC)dzEl^bNo8noO5t3k8*{K|%*ZUZ)G~-=+k~$*MK$ zf|`7Ma%XQ1oEZ-4pk`Syryey82;Ry=0+gdmXT-+?j;p^mPP2a-VX?*hvaDn?p}Xu6 zu2&D}cYX%ogYkF^II(>SiUA)-n5h#Etp+=fCLcdo=N<_5k9gG#u1b(({_wmg>`#i; zIW+95UP52D$ls?z;{(e@lt&uMwhluNW+Ab>OPTJjJB3X`krR4T*X0_&Q?dPiM7_7);O;$Db34IU@qMLE1}*QD$(S*hbZ1^%R<^Zk9G zt$5_D16y)%3@xRuz|kT_h_(_S7DmHDY)R&c7v4~`Id7>?eJy*kSsqQrx~ji^TO(?$Y4qM<`+~%@mee_CqLY zMqa9X-P;*&8CEr~TDffq*`uS{`a3hiA%H>8x!hcfndE&#@==fJCAbW2%Dtv%y`uV+ zg$1VZDG$?u&k4{$|C+9T+OF(tfd&rvspJaAp@LK79u9CWbe^4vHkdu<)1iKzhashu zCUBQ+CGc^cH0O@uIiqr)&(W@textuidaZ4vE}>X?Rfwl0X0luCo>eVJRa9XWFCLvYI-%(O<-@ zh6?h}q7nOTO|)Qu Date: Mon, 9 May 2022 15:19:05 +0800 Subject: [PATCH 57/96] fix: ci --- ci/pod/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index 8ec0ca9eebb9..6a1ec2f30d86 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -97,6 +97,8 @@ services: - zookeeper-server2 networks: kafka_net: + volumes: + - ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro ## Eureka From 4cecaf190a3da74144b81b1895d33f31b6632ce8 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 15:19:27 +0800 Subject: [PATCH 58/96] fix: ci --- ci/pod/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index 6a1ec2f30d86..54a496dcea50 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -92,6 +92,8 @@ services: restart: unless-stopped ports: - "19092:9092" + - "19093:9093" + - "19094:9094" depends_on: - zookeeper-server1 - zookeeper-server2 From 7529e7b2d19db3e46d65a6a4f0a792e69dd81652 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 15:47:31 +0800 Subject: [PATCH 59/96] fix: ci --- ci/pod/docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index 54a496dcea50..c93b1ec41eaa 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -81,7 +81,7 @@ services: networks: kafka_net: volumes: - - ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro kafka-server2: image: bitnami/kafka:2.8.1 @@ -100,7 +100,7 @@ services: networks: kafka_net: volumes: - - ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro ## Eureka From db34949db745f847eb5017fb25cc73f6e5cddd54 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 15:50:32 +0800 Subject: [PATCH 60/96] test: add tls cases in kafka-proxy --- t/plugin/kafka-proxy.t | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/t/plugin/kafka-proxy.t b/t/plugin/kafka-proxy.t index 4574d6e39fdc..38438a18e10e 100644 --- a/t/plugin/kafka-proxy.t +++ b/t/plugin/kafka-proxy.t @@ -42,6 +42,9 @@ __DATA__ content_by_lua_block { local test_cases = { {}, + {enable_tls = true, ssl_verify = true}, + {enable_tls = "none"}, + {enable_tls = true, ssl_verify = "none"}, {enable_sasl = true, sasl_username = "user", sasl_password = "pwd"}, {enable_sasl = false}, {enable_sasl = true}, @@ -60,6 +63,9 @@ __DATA__ --- response_body done done +property "enable_tls" validation failed: wrong type: expected boolean, got string +property "ssl_verify" validation failed: wrong type: expected boolean, got string +done done need to set sasl username when enabling kafka sasl authentication need to set sasl password when enabling kafka sasl authentication From f423ae490ffc2aac1b9368bf93aeb34ed199e413 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 16:43:26 +0800 Subject: [PATCH 61/96] feat: sasl change to table --- apisix/plugins/kafka-proxy.lua | 35 ++++++++++++++++++---------------- t/plugin/kafka-proxy.t | 14 ++++++-------- 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/apisix/plugins/kafka-proxy.lua b/apisix/plugins/kafka-proxy.lua index 341728604721..2ca8c55758c2 100644 --- a/apisix/plugins/kafka-proxy.lua +++ b/apisix/plugins/kafka-proxy.lua @@ -32,13 +32,19 @@ local schema = { type = "boolean", default = false, }, - sasl_username = { - type = "string", - default = "", - }, - sasl_password = { - type = "string", - default = "", + sasl = { + type = "object", + properties = { + username = { + type = "string", + default = "", + }, + password = { + type = "string", + default = "", + }, + }, + required = {"username", "password"}, }, }, } @@ -53,13 +59,8 @@ local _M = { function _M.check_schema(conf) - if conf.enable_sasl then - if not conf.sasl_username or conf.sasl_username == "" then - return false, "need to set sasl username when enabling kafka sasl authentication" - end - if not conf.sasl_password or conf.sasl_password == "" then - return false, "need to set sasl password when enabling kafka sasl authentication" - end + if conf.enable_sasl and not conf.sasl then + return false, "need to set sasl configuration when enabling kafka sasl authentication" end return core.schema.check(schema, conf) @@ -70,8 +71,10 @@ function _M.access(conf, ctx) ctx.kafka_consumer_enable_tls = conf.enable_tls ctx.kafka_consumer_ssl_verify = conf.ssl_verify ctx.kafka_consumer_enable_sasl = conf.enable_sasl - ctx.kafka_consumer_sasl_username = conf.sasl_username - ctx.kafka_consumer_sasl_password = conf.sasl_password + if conf.enable_sasl then + ctx.kafka_consumer_sasl_username = conf.sasl.username + ctx.kafka_consumer_sasl_password = conf.sasl.password + end end diff --git a/t/plugin/kafka-proxy.t b/t/plugin/kafka-proxy.t index 38438a18e10e..825dd385c45a 100644 --- a/t/plugin/kafka-proxy.t +++ b/t/plugin/kafka-proxy.t @@ -45,12 +45,11 @@ __DATA__ {enable_tls = true, ssl_verify = true}, {enable_tls = "none"}, {enable_tls = true, ssl_verify = "none"}, - {enable_sasl = true, sasl_username = "user", sasl_password = "pwd"}, + {enable_sasl = true, sasl = {username = "user", password = "pwd"}}, {enable_sasl = false}, {enable_sasl = true}, - {enable_sasl = true, sasl_username = "user"}, - {enable_sasl = true, sasl_username = 123, sasl_password = "123"}, - {enable_sasl = true, sasl_username = "123", sasl_password = 123}, + {enable_sasl = true, sasl = {username = "user"}}, + {enable_sasl = true, sasl = {username = 1234}}, } local plugin = require("apisix.plugins.kafka-proxy") @@ -67,7 +66,6 @@ property "enable_tls" validation failed: wrong type: expected boolean, got strin property "ssl_verify" validation failed: wrong type: expected boolean, got string done done -need to set sasl username when enabling kafka sasl authentication -need to set sasl password when enabling kafka sasl authentication -property "sasl_username" validation failed: wrong type: expected string, got number -property "sasl_password" validation failed: wrong type: expected string, got number +need to set sasl configuration when enabling kafka sasl authentication +property "sasl" validation failed: property "password" is required +property "sasl" validation failed: property "username" validation failed: wrong type: expected string, got number From 359418b65e301ee5842efe29acc8b0a825bdd5cc Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:18:13 +0800 Subject: [PATCH 62/96] docs: add ldoc for pubsub module --- apisix/core/pubsub.lua | 38 ++++++++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua index 02ba3aa9fefa..0c0b5c6b1959 100644 --- a/apisix/core/pubsub.lua +++ b/apisix/core/pubsub.lua @@ -15,6 +15,10 @@ -- limitations under the License. -- +--- Extensible framework to support publish-and-subscribe scenarios +-- +-- @module core.pubsub + local core = require("apisix.core") local ws_server = require("resty.websocket.server") local protoc = require("protoc") @@ -33,6 +37,14 @@ local _M = { version = 0.1 } local mt = { __index = _M } +--- +-- Create pubsub module instance +-- +-- @function core.pubsub.new +-- @treturn pubsub module instance +-- @treturn string|nil error message if present +-- @usage +-- local pubsub, err = core.pubsub.new() function _M.new() -- compile the protobuf file on initial load module -- ensure that each worker is loaded once @@ -59,15 +71,33 @@ function _M.new() end --- add command callback function --- handler is function(params) --- return value is resp, err +--- +-- Add command callbacks to pubsub module instances +-- +-- The callback function prototype: function (params) +-- The params in the parameters contain the data defined in the requested command. +-- Its first return value is the data, which needs to contain the data needed for +-- the particular resp, returns nil if an error exists. +-- Its second return value is a string type error message, no need to return when +-- no error exists. +-- +-- @function core.pubsub.on +-- @usage +-- pubsub:on(command, function (params) +-- return data, err +-- end) function _M.on(self, command, handler) self.cmd_handler[command] = handler end --- enter the message receiving loop and wait for client data +--- +-- Put the pubsub instance into an event loop, waiting to process client commands +-- +-- @function core.pubsub.wait +-- @treturn string|nil error message if present, will terminate the event loop +-- @usage +-- local err = pubsub:wait() function _M.wait(self) local ws = self.ws_server while true do From 66b175939b2023e413bf0b1cb420ffcd0e6db96a Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:18:44 +0800 Subject: [PATCH 63/96] docs: fix typo --- apisix/core/pubsub.lua | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua index 0c0b5c6b1959..44cdbe281424 100644 --- a/apisix/core/pubsub.lua +++ b/apisix/core/pubsub.lua @@ -31,8 +31,7 @@ protoc.reload() pb.option("int64_as_string") local pubsub_protoc = protoc.new() --- This module is used to handle ws server command --- processing in pub-sub scenarios. + local _M = { version = 0.1 } local mt = { __index = _M } From a467ba4d7039afd124053485ba5090eaded8f2cb Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:30:25 +0800 Subject: [PATCH 64/96] chore: move kafka_access to separate module --- apisix/init.lua | 113 +-------------------------------- apisix/pubsub/kafka.lua | 136 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 138 insertions(+), 111 deletions(-) create mode 100644 apisix/pubsub/kafka.lua diff --git a/apisix/init.lua b/apisix/init.lua index 930ceb09e57c..61eccd55ba39 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -42,8 +42,7 @@ local xrpc = require("apisix.stream.xrpc") local ctxdump = require("resty.ctxdump") local ngx_balancer = require("ngx.balancer") local debug = require("apisix.debug") -local kafka_bconsumer = require("resty.kafka.basic-consumer") -local ffi = require("ffi") +local pubsub_kafka = require("apisix.pubsub.kafka") local ngx = ngx local get_method = ngx.req.get_method local ngx_exit = ngx.exit @@ -56,10 +55,7 @@ local re_split = require("ngx.re").split local str_byte = string.byte local str_sub = string.sub local tonumber = tonumber -local tostring = tostring -local type = type local pairs = pairs -local C = ffi.C local control_api_router local is_http = false @@ -323,111 +319,6 @@ local function common_phase(phase_name) end -ffi.cdef[[ - int64_t atoll(const char *num); -]] -local function kafka_access_phase(api_ctx) - local pubsub, err = core.pubsub.new() - if not pubsub then - core.log.error("failed to initialize pub-sub module, err: ", err) - core.response.exit(400) - return - end - - local up_nodes = api_ctx.matched_upstream.nodes - - -- kafka client broker-related configuration - local broker_list = {} - for i, node in ipairs(up_nodes) do - broker_list[i] = { - host = node.host, - port = node.port, - } - - if api_ctx.kafka_consumer_enable_sasl then - broker_list[i].sasl_config = { - mechanism = "PLAIN", - user = api_ctx.kafka_consumer_sasl_username, - password = api_ctx.kafka_consumer_sasl_password, - } - end - end - - -- kafka client socket-related configuration - local client_config = {refresh_interval = 30 * 60 * 1000} - if api_ctx.kafka_consumer_enable_tls then - client_config = { - ssl = api_ctx.kafka_consumer_enable_tls, - ssl_verify = api_ctx.kafka_consumer_ssl_verify, - } - end - - -- load and create the consumer instance when it is determined - -- that the websocket connection was created successfully - local consumer = kafka_bconsumer:new(broker_list, client_config) - - pubsub:on("cmd_kafka_list_offset", function (params) - -- The timestamp parameter uses a 64-bit integer, which is difficult - -- for luajit to handle well, so the int64_as_string option in - -- lua-protobuf is used here. Smaller numbers will be decoded as - -- lua number, while overly larger numbers will be decoded as strings - -- in the format #number, where the # symbol at the beginning of the - -- string will be removed and converted to int64_t with the atoll function. - local timestamp = type(params.timestamp) == "string" and - C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp - - local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) - - if not offset then - return nil, "failed to list offset, topic: " .. params.topic .. - ", partition: " .. params.partition .. ", err: " .. err - end - - offset = tostring(offset) - return { - kafka_list_offset_resp = { - offset = str_sub(offset, 1, #offset - 2) - } - } - end) - - pubsub:on("cmd_kafka_fetch", function (params) - local offset = type(params.offset) == "string" and - C.atoll(str_sub(params.offset, 2, #params.offset)) or params.offset - - local ret, err = consumer:fetch(params.topic, params.partition, offset) - if not ret then - return nil, "failed to fetch message, topic: " .. params.topic .. - ", partition: " .. params.partition .. ", err: " .. err - end - - -- split into multiple messages when the amount of data in - -- a single batch is too large - local messages = ret.records - - -- special handling of int64 for luajit compatibility - for _, message in ipairs(messages) do - local timestamp = tostring(message.timestamp) - message.timestamp = str_sub(timestamp, 1, #timestamp - 2) - local offset = tostring(message.offset) - message.offset = str_sub(offset, 1, #offset - 2) - end - - return { - kafka_fetch_resp = { - messages = messages, - }, - } - end) - - -- start processing client commands - local err = pubsub:wait() - if err then - core.log.error("failed to handle pub-sub command, err: ", err) - end -end - - function _M.http_access_phase() local ngx_ctx = ngx.ctx @@ -616,7 +507,7 @@ function _M.http_access_phase() -- load balancer is not required by kafka upstream if api_ctx.matched_upstream and api_ctx.matched_upstream.scheme == "kafka" then - return kafka_access_phase(api_ctx) + return pubsub_kafka.access(api_ctx) end local code, err = set_upstream(route, api_ctx) diff --git a/apisix/pubsub/kafka.lua b/apisix/pubsub/kafka.lua new file mode 100644 index 000000000000..eca66efe200b --- /dev/null +++ b/apisix/pubsub/kafka.lua @@ -0,0 +1,136 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local bconsumer = require("resty.kafka.basic-consumer") +local ffi = require("ffi") +local C = ffi.C +local tostring = tostring +local type = type +local str_sub = string.sub + +ffi.cdef[[ + int64_t atoll(const char *num); +]] + + +local _M = {} + +-- Takes over requests of type kafka upstream in the http_access phase. +function _M.access(api_ctx) + local pubsub, err = core.pubsub.new() + if not pubsub then + core.log.error("failed to initialize pub-sub module, err: ", err) + core.response.exit(400) + return + end + + local up_nodes = api_ctx.matched_upstream.nodes + + -- kafka client broker-related configuration + local broker_list = {} + for i, node in ipairs(up_nodes) do + broker_list[i] = { + host = node.host, + port = node.port, + } + + if api_ctx.kafka_consumer_enable_sasl then + broker_list[i].sasl_config = { + mechanism = "PLAIN", + user = api_ctx.kafka_consumer_sasl_username, + password = api_ctx.kafka_consumer_sasl_password, + } + end + end + + -- kafka client socket-related configuration + local client_config = {refresh_interval = 30 * 60 * 1000} + if api_ctx.kafka_consumer_enable_tls then + client_config = { + ssl = api_ctx.kafka_consumer_enable_tls, + ssl_verify = api_ctx.kafka_consumer_ssl_verify, + } + end + + -- load and create the consumer instance when it is determined + -- that the websocket connection was created successfully + local consumer = bconsumer:new(broker_list, client_config) + + pubsub:on("cmd_kafka_list_offset", function (params) + -- The timestamp parameter uses a 64-bit integer, which is difficult + -- for luajit to handle well, so the int64_as_string option in + -- lua-protobuf is used here. Smaller numbers will be decoded as + -- lua number, while overly larger numbers will be decoded as strings + -- in the format #number, where the # symbol at the beginning of the + -- string will be removed and converted to int64_t with the atoll function. + local timestamp = type(params.timestamp) == "string" and + C.atoll(str_sub(params.timestamp, 2, #params.timestamp)) or params.timestamp + + local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) + + if not offset then + return nil, "failed to list offset, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err + end + + offset = tostring(offset) + return { + kafka_list_offset_resp = { + offset = str_sub(offset, 1, #offset - 2) + } + } + end) + + pubsub:on("cmd_kafka_fetch", function (params) + local offset = type(params.offset) == "string" and + C.atoll(str_sub(params.offset, 2, #params.offset)) or params.offset + + local ret, err = consumer:fetch(params.topic, params.partition, offset) + if not ret then + return nil, "failed to fetch message, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err + end + + -- split into multiple messages when the amount of data in + -- a single batch is too large + local messages = ret.records + + -- special handling of int64 for luajit compatibility + for _, message in ipairs(messages) do + local timestamp = tostring(message.timestamp) + message.timestamp = str_sub(timestamp, 1, #timestamp - 2) + local offset = tostring(message.offset) + message.offset = str_sub(offset, 1, #offset - 2) + end + + return { + kafka_fetch_resp = { + messages = messages, + }, + } + end) + + -- start processing client commands + local err = pubsub:wait() + if err then + core.log.error("failed to handle pub-sub command, err: ", err) + end +end + + +return _M From 5792c59b98949d221c3c47b0338bd452bca85d10 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:33:07 +0800 Subject: [PATCH 65/96] docs: change category name --- docs/en/latest/config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json index 1a1b5735ae46..c2e05bfcf58f 100644 --- a/docs/en/latest/config.json +++ b/docs/en/latest/config.json @@ -215,7 +215,7 @@ }, { "type": "category", - "label": "Pub-Sub", + "label": "PubSub", "items": [ "pubsub", "pubsub/kafka" From 73840421b329f80564ad4b935d0bf7f3247f9c0c Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:35:48 +0800 Subject: [PATCH 66/96] docs: update kafka-proxy keyword --- docs/en/latest/plugins/kafka-proxy.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md index 44cc54e4488c..253c07bd4370 100644 --- a/docs/en/latest/plugins/kafka-proxy.md +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -3,8 +3,7 @@ title: kafka-proxy keywords: - APISIX - Plugin - - Kafka - - consumer + - Kafka proxy description: This document contains information about the Apache APISIX kafka-proxy Plugin. --- From 58a973db7ca351f900010d4ab6443c0fb603d6cf Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 17:39:57 +0800 Subject: [PATCH 67/96] docs: update --- docs/en/latest/pubsub.md | 2 +- docs/zh/latest/pubsub.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/en/latest/pubsub.md b/docs/en/latest/pubsub.md index dc06c930c5c5..701d00d43e0b 100644 --- a/docs/en/latest/pubsub.md +++ b/docs/en/latest/pubsub.md @@ -35,7 +35,7 @@ In Apache APISIX, the most common scenario is for handling north-south traffic f ![pub-sub architecture](../../assets/images/pubsub-architecture.svg) -Currently, Apache APISIX supports WebSocket communication with the client, which can be any application that supports WebSocket, with a custom Protocol Buffer as the application layer communication protocol, see the [protocol definition](../../../apisix/pubsub.proto). +Currently, Apache APISIX supports WebSocket communication with the client, which can be any application that supports WebSocket, with Protocol Buffer as the serialization mechanism, see the [protocol definition](../../../apisix/pubsub.proto). ## Supported messaging systems diff --git a/docs/zh/latest/pubsub.md b/docs/zh/latest/pubsub.md index 2d0fd9d2f629..efadd7486cb5 100644 --- a/docs/zh/latest/pubsub.md +++ b/docs/zh/latest/pubsub.md @@ -35,7 +35,7 @@ description: This document contains information about the Apache APISIX pub-sub ![pub-sub architecture](../../assets/images/pubsub-architecture.svg) -当前,Apache APISIX 支持以 WebSocket 与客户端通信,客户端可以是任何支持 WebSocket 的程序,以自定义 Protocol Buffer 为应用层通信协议,查看[协议定义](../../../apisix/pubsub.proto)。 +当前,Apache APISIX 支持以 WebSocket 与客户端通信,客户端可以是任何支持 WebSocket 的程序,以 Protocol Buffer 作为序列化机制,查看[协议定义](../../../apisix/pubsub.proto)。 ## 当前支持的消息系统 From c732581d0d90ef50e3fe5db251863e261b9a6ff8 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Mon, 9 May 2022 22:55:51 +0800 Subject: [PATCH 68/96] docs: add comments to pubsub proto --- apisix/pubsub.proto | 52 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/apisix/pubsub.proto b/apisix/pubsub.proto index bb3a3d14074d..ced5a20d6ba2 100644 --- a/apisix/pubsub.proto +++ b/apisix/pubsub.proto @@ -17,21 +17,44 @@ syntax = "proto3"; -option go_package = "pubsub"; +option java_package = "org.apache.apisix.api.pubsub"; +option java_outer_classname = "PubSubProto"; +option java_multiple_files = true; +option go_package = "github.com/apache/apisix/api/pubsub;pubsub"; -// request +/** + * Get the offset of the specified topic partition from Apache Kafka. + */ message CmdKafkaListOffset { string topic = 1; int32 partition = 2; int64 timestamp = 3; } +/** + * Fetch messages of the specified topic partition from Apache Kafka. + */ message CmdKafkaFetch { string topic = 1; int32 partition = 2; int64 offset = 3; } +/** + * Client request definition for pubsub scenarios + * + * The sequence field is used to associate requests and responses. + * Apache APISIX will set a consistent sequence for the associated + * requests and responses, and the client can explicitly know the + * response corresponding to any of the requests. + * + * The req field is the command data sent by the client, and its + * type will be chosen from any of the lists in the definition. + * + * Field numbers 1 to 30 in the definition are used to define basic + * information and future extensions, and numbers after 30 are used + * to define commands. + */ message PubSubReq { int64 sequence = 1; oneof req { @@ -40,7 +63,10 @@ message PubSubReq { }; } -// response +/** + * The definition of a message in Kafka with the current message + * offset, production timestamp, Key, and message content. + */ message KafkaMessage { int64 offset = 1; int64 timestamp = 2; @@ -48,19 +74,39 @@ message KafkaMessage { bytes value = 4; } +/** + * The response body of the service when an error occurs, + * containing the error code and the error message. + */ message ErrorResp { int32 code = 1; string message = 2; } +/** + * The response of Fetch messages from Apache Kafka. + */ message KafkaFetchResp { repeated KafkaMessage messages = 1; } +/** + * The response of list offset from Apache Kafka. + */ message KafkaListOffsetResp { int64 offset = 1; } +/** + * Server response definition for pubsub scenarios + * + * The sequence field will be the same as the value in the + * request, which is used to associate the associated request + * and response. + * + * The resp field is the response data sent by the server, and + * its type will be chosen from any of the lists in the definition. + */ message PubSubResp { int64 sequence = 1; oneof resp { From 0130cf344dcfb9ded042e59fc800bafa5dc68f6c Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 00:13:12 +0800 Subject: [PATCH 69/96] fix: kafka tls and sasl --- ci/pod/docker-compose.yml | 6 +++++- ci/pod/kafka/kafka-server/kafka_jaas.conf | 6 ++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 ci/pod/kafka/kafka-server/kafka_jaas.conf diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index c93b1ec41eaa..e5053d7e89d7 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -81,8 +81,10 @@ services: networks: kafka_net: volumes: + - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro - + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro + kafka-server2: image: bitnami/kafka:2.8.1 env_file: @@ -100,7 +102,9 @@ services: networks: kafka_net: volumes: + - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro ## Eureka diff --git a/ci/pod/kafka/kafka-server/kafka_jaas.conf b/ci/pod/kafka/kafka-server/kafka_jaas.conf new file mode 100644 index 000000000000..883325971701 --- /dev/null +++ b/ci/pod/kafka/kafka-server/kafka_jaas.conf @@ -0,0 +1,6 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret"; +}; From 78b601ac552f70e56513f68d0ac6260b071940b0 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 00:18:59 +0800 Subject: [PATCH 70/96] fix: lint --- apisix/pubsub/kafka.lua | 1 + ci/pod/kafka/kafka-server/kafka_jaas.conf | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/apisix/pubsub/kafka.lua b/apisix/pubsub/kafka.lua index eca66efe200b..711f66da6e7d 100644 --- a/apisix/pubsub/kafka.lua +++ b/apisix/pubsub/kafka.lua @@ -21,6 +21,7 @@ local ffi = require("ffi") local C = ffi.C local tostring = tostring local type = type +local ipairs = ipairs local str_sub = string.sub ffi.cdef[[ diff --git a/ci/pod/kafka/kafka-server/kafka_jaas.conf b/ci/pod/kafka/kafka-server/kafka_jaas.conf index 883325971701..3653dd289ed7 100644 --- a/ci/pod/kafka/kafka-server/kafka_jaas.conf +++ b/ci/pod/kafka/kafka-server/kafka_jaas.conf @@ -1,3 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + KafkaServer { org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" From 0ed9d7cfe263cb2e0cee144a8177a6167bfaa81d Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 00:31:11 +0800 Subject: [PATCH 71/96] chore: install pubsub package --- Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Makefile b/Makefile index 9f8a4ca327d7..25878a2f9f67 100644 --- a/Makefile +++ b/Makefile @@ -341,6 +341,9 @@ install: runtime $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/zipkin $(ENV_INSTALL) apisix/plugins/zipkin/*.lua $(ENV_INST_LUADIR)/apisix/plugins/zipkin/ + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/pubsub + $(ENV_INSTALL) apisix/pubsub/*.lua $(ENV_INST_LUADIR)/apisix/pubsub/ + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/ssl/router $(ENV_INSTALL) apisix/ssl/router/*.lua $(ENV_INST_LUADIR)/apisix/ssl/router/ From a572035a46326af12212294a6cfaaf0cc1151642 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 01:33:23 +0800 Subject: [PATCH 72/96] fix: lint --- apisix/pubsub.proto | 10 ++--- ci/pod/docker-compose.yml | 2 +- ci/pod/kafka/kafka-server/env/common.env | 2 +- ci/pod/kafka/kafka-server/kafka_jaas.conf | 46 +++++++++++------------ 4 files changed, 30 insertions(+), 30 deletions(-) diff --git a/apisix/pubsub.proto b/apisix/pubsub.proto index ced5a20d6ba2..326d4affa254 100644 --- a/apisix/pubsub.proto +++ b/apisix/pubsub.proto @@ -42,15 +42,15 @@ message CmdKafkaFetch { /** * Client request definition for pubsub scenarios - * + * * The sequence field is used to associate requests and responses. * Apache APISIX will set a consistent sequence for the associated - * requests and responses, and the client can explicitly know the + * requests and responses, and the client can explicitly know the * response corresponding to any of the requests. * * The req field is the command data sent by the client, and its * type will be chosen from any of the lists in the definition. - * + * * Field numbers 1 to 30 in the definition are used to define basic * information and future extensions, and numbers after 30 are used * to define commands. @@ -99,9 +99,9 @@ message KafkaListOffsetResp { /** * Server response definition for pubsub scenarios - * + * * The sequence field will be the same as the value in the - * request, which is used to associate the associated request + * request, which is used to associate the associated request * and response. * * The resp field is the response data sent by the server, and diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml index e5053d7e89d7..68dab85c539b 100644 --- a/ci/pod/docker-compose.yml +++ b/ci/pod/docker-compose.yml @@ -84,7 +84,7 @@ services: - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro - + kafka-server2: image: bitnami/kafka:2.8.1 env_file: diff --git a/ci/pod/kafka/kafka-server/env/common.env b/ci/pod/kafka/kafka-server/env/common.env index b8465521dea6..68d5508977ff 100644 --- a/ci/pod/kafka/kafka-server/env/common.env +++ b/ci/pod/kafka/kafka-server/env/common.env @@ -5,4 +5,4 @@ KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,S KAFKA_CLIENT_USERS=testuser KAFKA_CLIENT_PASSWORDS=testpwd KAFKA_CERTIFICATE_PASSWORD=changeit -KAFKA_TLS_TYPE=JKS \ No newline at end of file +KAFKA_TLS_TYPE=JKS diff --git a/ci/pod/kafka/kafka-server/kafka_jaas.conf b/ci/pod/kafka/kafka-server/kafka_jaas.conf index 3653dd289ed7..77e1d06ba975 100644 --- a/ci/pod/kafka/kafka-server/kafka_jaas.conf +++ b/ci/pod/kafka/kafka-server/kafka_jaas.conf @@ -1,23 +1,23 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -KafkaServer { - org.apache.kafka.common.security.plain.PlainLoginModule required - username="admin" - password="admin-secret" - user_admin="admin-secret"; -}; +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret"; +}; From 0a7a1f64c0042d8f0539e461842d34940e621971 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 02:07:52 +0800 Subject: [PATCH 73/96] fix: lint --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index aa3b4c10a56b..ec453ef787dd 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -42,4 +42,4 @@ jobs: - name: Run eclint run: | - eclint check + eclint check [!]ci/pod/kafka/kafka-server/selfsigned.jks From 181d31de4780d0251fbd3d4c86bf68dbb07757a6 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 02:12:15 +0800 Subject: [PATCH 74/96] chore: change to dynamic generate kafka cert --- .github/workflows/lint.yml | 2 +- ci/linux_openresty_common_runner.sh | 3 +++ ci/pod/kafka/kafka-server/selfsigned.jks | Bin 2559 -> 0 bytes 3 files changed, 4 insertions(+), 1 deletion(-) delete mode 100644 ci/pod/kafka/kafka-server/selfsigned.jks diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ec453ef787dd..aa3b4c10a56b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -42,4 +42,4 @@ jobs: - name: Run eclint run: | - eclint check [!]ci/pod/kafka/kafka-server/selfsigned.jks + eclint check diff --git a/ci/linux_openresty_common_runner.sh b/ci/linux_openresty_common_runner.sh index 3fae0797f15e..0a1fd68e7cc6 100755 --- a/ci/linux_openresty_common_runner.sh +++ b/ci/linux_openresty_common_runner.sh @@ -21,6 +21,9 @@ before_install() { sudo cpanm --notest Test::Nginx >build.log 2>&1 || (cat build.log && exit 1) + # generating SSL certificates for Kafka + keytool -genkeypair -keyalg RSA -dname "CN=127.0.0.1" -alias 127.0.0.1 -keystore ./ci/pod/kafka/kafka-server/selfsigned.jks -validity 365 -keysize 2048 -storepass changeit + # launch deps env make ci-env-up ./ci/linux-ci-init-service.sh diff --git a/ci/pod/kafka/kafka-server/selfsigned.jks b/ci/pod/kafka/kafka-server/selfsigned.jks deleted file mode 100644 index 9729f406fb03c569a0af40b748a4f826a675e437..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2559 zcma)+c{mh`8pdbF7{)SojV$44Y=arQNSQEWmqsHUlYL(zArS_HEJqki!Vsc5F%cq5 zvW~K+WQ$Od$)t=U=Y78S_kHi5FNy?Zfq)z+60{KlRUzZZd;CBy zAe98I0+XQSzw8neiSxe^=P$(h3vvDv6+w9a`r_jOf~X|U)4xn3l=t6ZocyQ@DE)u0 zd?;10sF+Ua5-tWB2tMdqcrzhid^3EM1p+Awf`GCpJ_z@}5a}>J>Y`9Z7S&p)U1Yev zqa2ynVg30ht(jfidkxOk=xsl_i+|u;sj7T$==z1%-=YQolx?YW9n@^I!QQ<%E*jA& z4}*NR&<0 zxWnDd^UNFGkEI1gNIT2t)WUBce+f%kop|DrDKYFgLK)a4uLhU9*Sv+FFVwWf0C1=a z8?CI#{<6`;?UdgFxO)q(@~JPr14%UO(=R^KzGG-y^36m%?}%|HTsb>moZL=eR1P&} zcDZ*``J>MpOS&`*?K|za-`%)&$0__K%&B%mUnp6M$47&Go2DgI(=N=Cp`kTbQ`*Y@+It zhIe#39XcSyRV}4nL8^L5oQ&0(d+j`QS*0PFuL(_99ki zm-t=!i0i57mp`xrk3+Z&JL8?b30_>*Hd?e7p(;XO2@=ql5y7$4`*g-i%GZG*CAI8` z>AX_tD*6ezZv-34#?K$yWq2thzK^`toW1c%c4STDgf!VNlu!PTRmH+o z+l=+bF+b0hqUTl&1=maj2L&pO-%2Y9d-OSS$Dt=aOvM5t$*tFH1tp>C?iLDu!rr34 zRc^G5W2>38y95~e8BxO(C&{gf7n=Ocz8E`W(2=|>?XLQCJ=3Gyd{wX zb(`uJdQJc?wCzrx$Kp&g^y;{g^7cq^?K-DnROX=Hku77jsMS6>fiCv)ho!9CJ3OyR zMGL(A^gd+&i4$j))YI1)HqbRrrFJe;6mCS**8#i5=V#6;1G07+Dbp(_$PRhTOIeD- zDuk6gOZRG~U;g2{>wva;#6OYxEf$PWoo_M-?i^0NEk0b>Wj(xj<9eClQ_HB}%An0$ z46d*9zJ z{N)x6Y8*FW>l3h)X5nfV7HbF~EqM3ef?Ni3DuXWg*9 zKp&xK6D6j5tpK7ZmQxOKyP}cKwBDEY)k|RU`~4t-J+XIkD+09N3Aad&c6fv8J~IYt zOmeMn7>ZTZtuLBQ19y4Q`5w|+^KW07_H4<(6t$;MFI1sq0p?#XRR_H9vG0pf=nIzr zQf`bkN6GzNmrzl(sEjZG4bTMW0n`DgzXtuECt_$(urS^)$V(QjtD}k1GceHB(b3dL zkvJCr&f$VkNgU(9tYHuk@JsstO@RNatR3AhNb4DT~(j8sjRqU(K%r6KRNa};W6 zb)IySyzGaURi(TpbKhWH0#NrQ*1DfrIvl$6FeF)1L2mAsU{7*1FwlfvFT21BWy^~v zQ<}Xv6(}t|_uUUKMi=dgsl=rlWL!x3{hM?f3}c6=5++c#t98wGP~O*KVKP!5PF0Ml zY&>I+R%3a}2;h^Xp{L>%@e#IC)dzEl^bNo8noO5t3k8*{K|%*ZUZ)G~-=+k~$*MK$ zf|`7Ma%XQ1oEZ-4pk`Syryey82;Ry=0+gdmXT-+?j;p^mPP2a-VX?*hvaDn?p}Xu6 zu2&D}cYX%ogYkF^II(>SiUA)-n5h#Etp+=fCLcdo=N<_5k9gG#u1b(({_wmg>`#i; zIW+95UP52D$ls?z;{(e@lt&uMwhluNW+Ab>OPTJjJB3X`krR4T*X0_&Q?dPiM7_7);O;$Db34IU@qMLE1}*QD$(S*hbZ1^%R<^Zk9G zt$5_D16y)%3@xRuz|kT_h_(_S7DmHDY)R&c7v4~`Id7>?eJy*kSsqQrx~ji^TO(?$Y4qM<`+~%@mee_CqLY zMqa9X-P;*&8CEr~TDffq*`uS{`a3hiA%H>8x!hcfndE&#@==fJCAbW2%Dtv%y`uV+ zg$1VZDG$?u&k4{$|C+9T+OF(tfd&rvspJaAp@LK79u9CWbe^4vHkdu<)1iKzhashu zCUBQ+CGc^cH0O@uIiqr)&(W@textuidaZ4vE}>X?Rfwl0X0luCo>eVJRa9XWFCLvYI-%(O<-@ zh6?h}q7nOTO|)Qu Date: Tue, 10 May 2022 02:17:06 +0800 Subject: [PATCH 75/96] ci: debug --- ci/linux-ci-init-service.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index cc863add295a..1f947b16385e 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -16,6 +16,8 @@ # limitations under the License. # +docker logs apache-apisix_kafka-server1_1 + docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 From b1ddf5338ddea577c80cfebd2fb58a48ce39e40e Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 02:46:46 +0800 Subject: [PATCH 76/96] fix: ci --- ci/pod/kafka/kafka-server/kafka_jaas.conf | 32 +++++++++++------------ 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/ci/pod/kafka/kafka-server/kafka_jaas.conf b/ci/pod/kafka/kafka-server/kafka_jaas.conf index 77e1d06ba975..4bc193869dbf 100644 --- a/ci/pod/kafka/kafka-server/kafka_jaas.conf +++ b/ci/pod/kafka/kafka-server/kafka_jaas.conf @@ -1,19 +1,19 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# +// +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// KafkaServer { org.apache.kafka.common.security.plain.PlainLoginModule required From 8d5b8f867e0115e8d9e8c6c011a0eb478664238f Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 03:14:40 +0800 Subject: [PATCH 77/96] ci: debug --- ci/linux-ci-init-service.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index 1f947b16385e..06de5ed49e94 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -16,8 +16,6 @@ # limitations under the License. # -docker logs apache-apisix_kafka-server1_1 - docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 @@ -26,7 +24,7 @@ docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics # create messages for test-consumer for i in `seq 30` do - docker exec -i apache-apisix_kafka-server1_1 bash -c "echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --broker-list kafka-server1:9092 --topic test-consumer" + docker exec -i apache-apisix_kafka-server1_1 bash -c "echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --bootstrap-server 127.0.0.1:9092 --topic test-consumer" done # prepare openwhisk env From 9eef1548dd345410bd5ce39f02e55f360d4c534c Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 10:34:32 +0800 Subject: [PATCH 78/96] ci: add tips --- ci/linux-ci-init-service.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ci/linux-ci-init-service.sh b/ci/linux-ci-init-service.sh index 06de5ed49e94..73477a5febca 100755 --- a/ci/linux-ci-init-service.sh +++ b/ci/linux-ci-init-service.sh @@ -25,7 +25,9 @@ docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics for i in `seq 30` do docker exec -i apache-apisix_kafka-server1_1 bash -c "echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --bootstrap-server 127.0.0.1:9092 --topic test-consumer" + echo "Produces messages to the test-consumer topic, msg: testmsg$i" done +echo "Kafka service initialization completed" # prepare openwhisk env docker pull openwhisk/action-nodejs-v14:nightly From 8e15ebdcd906e53b43a3f40c2f5663361dc069d8 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 14:24:46 +0800 Subject: [PATCH 79/96] fix: ci --- ci/pod/kafka/kafka-server/env/common.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/pod/kafka/kafka-server/env/common.env b/ci/pod/kafka/kafka-server/env/common.env index 68d5508977ff..b7a13e9ea038 100644 --- a/ci/pod/kafka/kafka-server/env/common.env +++ b/ci/pod/kafka/kafka-server/env/common.env @@ -1,6 +1,6 @@ ALLOW_PLAINTEXT_LISTENER=yes KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true -KAFKA_CFG_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 +KAFKA_CFG_LISTENERS=PLAINTEXT://0.0.0.0:9092,SSL://0.0.0.0:9093,SASL_PLAINTEXT://0.0.0.0:9094 KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 KAFKA_CLIENT_USERS=testuser KAFKA_CLIENT_PASSWORDS=testpwd From b7e288d3ae194c3ee7b3dab3c73f21d186996c40 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 15:02:52 +0800 Subject: [PATCH 80/96] test: move kafka to admin case --- .../upstream-kafka.t => admin/upstream5.t} | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) rename t/{node/upstream-kafka.t => admin/upstream5.t} (79%) diff --git a/t/node/upstream-kafka.t b/t/admin/upstream5.t similarity index 79% rename from t/node/upstream-kafka.t rename to t/admin/upstream5.t index cf5c1dcb9e0d..bd88cf94b8a8 100644 --- a/t/node/upstream-kafka.t +++ b/t/admin/upstream5.t @@ -19,24 +19,26 @@ use t::APISIX 'no_plan'; repeat_each(1); no_long_string(); no_root_location(); +no_shuffle(); +log_level("info"); add_block_preprocessor(sub { my ($block) = @_; - if ((!defined $block->error_log) && (!defined $block->no_error_log)) { - $block->set_value("no_error_log", "[error]"); + if (!$block->request) { + $block->set_value("request", "GET /t"); } - if (!defined $block->request) { - $block->set_value("request", "GET /t"); + if (!$block->no_error_log && !$block->error_log) { + $block->set_value("no_error_log", "[error]\n[alert]"); } }); -run_tests(); +run_tests; __DATA__ -=== TEST 1: success +=== TEST 1: set upstream(kafka scheme) --- config location /t { content_by_lua_block { @@ -49,8 +51,12 @@ __DATA__ "scheme": "kafka" }]]) - ngx.say(code..body) + if code >= 300 then + ngx.status = code + end + ngx.say(body) } } +--- error_code: 200 --- response_body -201passed +passed From 49e30b17e5667896e028e138a4e04416d0735921 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 15:34:49 +0800 Subject: [PATCH 81/96] feat: move back tls to upstream --- apisix/schema_def.lua | 8 ++++++-- t/node/upstream-mtls.t | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 1a2acafb8b68..60688abb2468 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -408,10 +408,14 @@ local upstream_schema = { client_key = private_key_schema, verify = { type = "boolean", - description = "Turn on server certificate verification", + description = "Turn on server certificate verification, ".. + "currently only kafka upstream is supported", }, }, - required = {"client_cert", "client_key"} + anyOf = { + {required = {"client_cert", "client_key"}}, + {required = {"verify"}} + } }, keepalive_pool = { type = "object", diff --git a/t/node/upstream-mtls.t b/t/node/upstream-mtls.t index 9c0a49d9f713..7168f3816667 100644 --- a/t/node/upstream-mtls.t +++ b/t/node/upstream-mtls.t @@ -77,7 +77,7 @@ __DATA__ GET /t --- error_code: 400 --- response_body -{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: property \"client_key\" is required"} +{"error_msg":"invalid configuration: property \"upstream\" validation failed: property \"tls\" validation failed: object matches none of the required: [\"client_cert\",\"client_key\"] or [\"verify\"]"} From c8b66967ed762f3541b1117d9a5376de92a90dd6 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 15:39:03 +0800 Subject: [PATCH 82/96] chore: remove tls in kafka-proxy --- apisix/plugins/kafka-proxy.lua | 10 ---------- t/plugin/kafka-proxy.t | 6 ------ 2 files changed, 16 deletions(-) diff --git a/apisix/plugins/kafka-proxy.lua b/apisix/plugins/kafka-proxy.lua index 2ca8c55758c2..9332eb141d34 100644 --- a/apisix/plugins/kafka-proxy.lua +++ b/apisix/plugins/kafka-proxy.lua @@ -20,14 +20,6 @@ local core = require("apisix.core") local schema = { type = "object", properties = { - enable_tls = { - type = "boolean", - default = false, - }, - ssl_verify = { - type = "boolean", - default = true, - }, enable_sasl = { type = "boolean", default = false, @@ -68,8 +60,6 @@ end function _M.access(conf, ctx) - ctx.kafka_consumer_enable_tls = conf.enable_tls - ctx.kafka_consumer_ssl_verify = conf.ssl_verify ctx.kafka_consumer_enable_sasl = conf.enable_sasl if conf.enable_sasl then ctx.kafka_consumer_sasl_username = conf.sasl.username diff --git a/t/plugin/kafka-proxy.t b/t/plugin/kafka-proxy.t index 825dd385c45a..590730fc8e17 100644 --- a/t/plugin/kafka-proxy.t +++ b/t/plugin/kafka-proxy.t @@ -42,9 +42,6 @@ __DATA__ content_by_lua_block { local test_cases = { {}, - {enable_tls = true, ssl_verify = true}, - {enable_tls = "none"}, - {enable_tls = true, ssl_verify = "none"}, {enable_sasl = true, sasl = {username = "user", password = "pwd"}}, {enable_sasl = false}, {enable_sasl = true}, @@ -62,9 +59,6 @@ __DATA__ --- response_body done done -property "enable_tls" validation failed: wrong type: expected boolean, got string -property "ssl_verify" validation failed: wrong type: expected boolean, got string -done done need to set sasl configuration when enabling kafka sasl authentication property "sasl" validation failed: property "password" is required From 73bed0abad74a970ba16c2442b19b2db48911e9e Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 15:41:55 +0800 Subject: [PATCH 83/96] ci: move pubsub cases --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e5ff86636748..6714e427791f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -30,8 +30,8 @@ jobs: - linux_openresty - linux_openresty_1_17 test_dir: - - t/plugin t/pubsub - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc + - t/plugin + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc t/pubsub - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc runs-on: ${{ matrix.platform }} From ad8facfa7ba4e93e19a0e6ea5b8b29ddc6179248 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 15:43:28 +0800 Subject: [PATCH 84/96] ci: move pubsub cases --- .github/workflows/centos7-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml index b03e24a10db0..03ae393b0d50 100644 --- a/.github/workflows/centos7-ci.yml +++ b/.github/workflows/centos7-ci.yml @@ -28,8 +28,8 @@ jobs: fail-fast: false matrix: test_dir: - - t/plugin t/pubsub - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc + - t/plugin + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc t/pubsub - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library steps: From 1d1206c399ced114b550641135f36cabf9190082 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 16:16:35 +0800 Subject: [PATCH 85/96] fix: ci --- .github/workflows/centos7-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml index 03ae393b0d50..5230acda2b20 100644 --- a/.github/workflows/centos7-ci.yml +++ b/.github/workflows/centos7-ci.yml @@ -79,6 +79,8 @@ jobs: - name: Run other docker containers for test run: | + # generating SSL certificates for Kafka + keytool -genkeypair -keyalg RSA -dname "CN=127.0.0.1" -alias 127.0.0.1 -keystore ./ci/pod/kafka/kafka-server/selfsigned.jks -validity 365 -keysize 2048 -storepass changeit make ci-env-up ./ci/linux-ci-init-service.sh From 6d72f210af54ee51da9a06c1b1d0c7caccfc707d Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 16:27:34 +0800 Subject: [PATCH 86/96] feat: config socket tls by upstream --- apisix/pubsub/kafka.lua | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/apisix/pubsub/kafka.lua b/apisix/pubsub/kafka.lua index 711f66da6e7d..85a625d763de 100644 --- a/apisix/pubsub/kafka.lua +++ b/apisix/pubsub/kafka.lua @@ -59,12 +59,12 @@ function _M.access(api_ctx) end end - -- kafka client socket-related configuration + -- kafka client socket-related configuration (TLS, ssl verify) local client_config = {refresh_interval = 30 * 60 * 1000} - if api_ctx.kafka_consumer_enable_tls then + if api_ctx.matched_upstream.tls then client_config = { - ssl = api_ctx.kafka_consumer_enable_tls, - ssl_verify = api_ctx.kafka_consumer_ssl_verify, + ssl = true, + ssl_verify = api_ctx.matched_upstream.tls.verify, } end From 53d21e7ba7961e9a2e9bf86555190d4d076cee32 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Tue, 10 May 2022 16:46:25 +0800 Subject: [PATCH 87/96] test: tls cases --- .github/workflows/build.yml | 3 +- t/pubsub/kafka.t | 125 +++++++++++++++++++++++++++++++++++- 2 files changed, 124 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6714e427791f..d73be802f5e9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -31,7 +31,8 @@ jobs: - linux_openresty_1_17 test_dir: - t/plugin - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc t/pubsub + - t/pubsub + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc runs-on: ${{ matrix.platform }} diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index 3feb2b1b4dd7..171d09d470f7 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -54,6 +54,60 @@ __DATA__ "uri": "/kafka" }]], }, + { + url = "/apisix/admin/routes/kafka-tlsv", + data = [[{ + "upstream": { + "nodes": { + "127.0.0.1:9093": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": true + } + }, + "uri": "/kafka-tlsv" + }]], + }, + { + url = "/apisix/admin/routes/kafka-tls", + data = [[{ + "upstream": { + "nodes": { + "127.0.0.1:9093": 1 + }, + "type": "none", + "scheme": "kafka", + "tls": { + "verify": false + } + }, + "uri": "/kafka-tls" + }]], + }, + { + url = "/apisix/admin/routes/kafka-sasl", + data = [[{ + "upstream": { + "nodes": { + "127.0.0.1:9094": 1 + }, + "type": "none", + "scheme": "kafka" + }, + "uri": "/kafka-sasl", + "plugins": { + "kafka-proxy": { + "enable_sasl": true, + "sasl": { + "username": "testuser", + "password": "testpwd" + } + } + } + }]], + } } local t = require("lib.test_admin").test @@ -64,8 +118,8 @@ __DATA__ end } } ---- response_body -201passed +--- response_body eval +"201passed\n"x4 @@ -78,7 +132,7 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil -=== TEST 3: hit route (with HTTP emulation websocket) +=== TEST 3: hit route (normal Kafka) --- config location /t { content_by_lua_block { @@ -179,3 +233,68 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil 2offset: 0 3offset: 30 4offset: 14 msg: testmsg15 + + + +=== TEST 4: hit route (TLS with ssl verify Kafka) +--- config + location /t { + content_by_lua_block { + local protoc = require("protoc") + local pb = require("pb") + protoc.reload() + pb.option("int64_as_string") + local pubsub_protoc = protoc.new() + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + ngx.say("failed to load protocol: " .. err) + return + end + + local client = require "resty.websocket.client" + local ws, err = client:new() + local ok, err = ws:connect("ws://127.0.0.1:1984/kafka-tlsv") + if not ok then + ngx.say("failed to connect: " .. err) + return + end + + local data = { + { + sequence = 0, + cmd_kafka_list_offset = { + topic = "test-consumer", + partition = 0, + timestamp = -2, + }, + } + } + + for i = 1, #data do + local _, err = ws:send_binary(pb.encode("PubSubReq", data[i])) + local raw_data, raw_type, err = ws:recv_frame() + if not raw_data then + ngx.say("failed to receive the frame: ", err) + return + end + local data, err = pb.decode("PubSubResp", raw_data) + if not data then + ngx.say("failed to decode the frame: ", err) + return + end + + if data.kafka_list_offset_resp then + ngx.say(data.sequence.."offset: "..data.kafka_list_offset_resp.offset) + end + end + + ws:send_close() + } + } +--- response_body +0failed to list offset, topic: not-exist, partition: 0, err: not found topic +1failed to fetch message, topic: not-exist, partition: 0, err: not found topic +2offset: 0 +3offset: 30 +4offset: 14 msg: testmsg15 From c9c88a6de3016488925af3798b51b03b273abb0b Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 02:15:57 +0800 Subject: [PATCH 88/96] feat: add tls default value --- apisix/schema_def.lua | 1 + 1 file changed, 1 insertion(+) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 60688abb2468..4c3558642cde 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -410,6 +410,7 @@ local upstream_schema = { type = "boolean", description = "Turn on server certificate verification, ".. "currently only kafka upstream is supported", + default = false, }, }, anyOf = { From 4c75286fffec652891c23b374f491e8d40166696 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 02:16:40 +0800 Subject: [PATCH 89/96] test: add tls and sasl cases --- ci/pod/kafka/kafka-server/env/common.env | 10 +- t/pubsub/kafka.t | 136 +++++++++++++++++++++-- 2 files changed, 132 insertions(+), 14 deletions(-) diff --git a/ci/pod/kafka/kafka-server/env/common.env b/ci/pod/kafka/kafka-server/env/common.env index b7a13e9ea038..9d90a2049bec 100644 --- a/ci/pod/kafka/kafka-server/env/common.env +++ b/ci/pod/kafka/kafka-server/env/common.env @@ -1,8 +1,8 @@ ALLOW_PLAINTEXT_LISTENER=yes -KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true +KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false KAFKA_CFG_LISTENERS=PLAINTEXT://0.0.0.0:9092,SSL://0.0.0.0:9093,SASL_PLAINTEXT://0.0.0.0:9094 KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 -KAFKA_CLIENT_USERS=testuser -KAFKA_CLIENT_PASSWORDS=testpwd -KAFKA_CERTIFICATE_PASSWORD=changeit -KAFKA_TLS_TYPE=JKS +KAFKA_CFG_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM= +KAFKA_CFG_SSL_KEYSTORE_LOCATION=/opt/bitnami/kafka/config/certs/kafka.keystore.jks +KAFKA_CFG_SSL_KEYSTORE_PASSWORD=changeit +KAFKA_CFG_SSL_KEY_PASSWORD=changeit \ No newline at end of file diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index 171d09d470f7..cc2a60115272 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -101,8 +101,8 @@ __DATA__ "kafka-proxy": { "enable_sasl": true, "sasl": { - "username": "testuser", - "password": "testpwd" + "username": "admin", + "password": "admin-secret" } } } @@ -236,7 +236,68 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil -=== TEST 4: hit route (TLS with ssl verify Kafka) +=== TEST 4: hit route (Kafka with TLS) +--- config + location /t { + content_by_lua_block { + local protoc = require("protoc") + local pb = require("pb") + protoc.reload() + pb.option("int64_as_string") + local pubsub_protoc = protoc.new() + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + ngx.say("failed to load protocol: " .. err) + return + end + + local client = require "resty.websocket.client" + local ws, err = client:new() + local ok, err = ws:connect("ws://127.0.0.1:1984/kafka-tls") + if not ok then + ngx.say("failed to connect: " .. err) + return + end + + local data = { + { + sequence = 0, + cmd_kafka_list_offset = { + topic = "test-consumer", + partition = 0, + timestamp = -1, + }, + }, + } + + for i = 1, #data do + local _, err = ws:send_binary(pb.encode("PubSubReq", data[i])) + local raw_data, raw_type, err = ws:recv_frame() + if not raw_data then + ngx.say("failed to receive the frame: ", err) + return + end + local data, err = pb.decode("PubSubResp", raw_data) + if not data then + ngx.say("failed to decode the frame: ", err) + return + end + + if data.kafka_list_offset_resp then + ngx.say(data.sequence.."offset: "..data.kafka_list_offset_resp.offset) + end + end + + ws:send_close() + } + } +--- response_body +0offset: 30 + + + +=== TEST 5: hit route (Kafka with TLS + ssl verify) --- config location /t { content_by_lua_block { @@ -266,7 +327,68 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil cmd_kafka_list_offset = { topic = "test-consumer", partition = 0, - timestamp = -2, + timestamp = -1, + }, + }, + } + + for i = 1, #data do + local _, err = ws:send_binary(pb.encode("PubSubReq", data[i])) + local raw_data, raw_type, err = ws:recv_frame() + if not raw_data then + ngx.say("failed to receive the frame: ", err) + return + end + local data, err = pb.decode("PubSubResp", raw_data) + if not data then + ngx.say("failed to decode the frame: ", err) + return + end + + if data.kafka_list_offset_resp then + ngx.say(data.sequence.."offset: "..data.kafka_list_offset_resp.offset) + end + end + + ws:send_close() + } + } +--- error_log +self signed certificate + + + +=== TEST 6: hit route (Kafka with SASL) +--- config + location /t { + content_by_lua_block { + local protoc = require("protoc") + local pb = require("pb") + protoc.reload() + pb.option("int64_as_string") + local pubsub_protoc = protoc.new() + pubsub_protoc:addpath("apisix") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + ngx.say("failed to load protocol: " .. err) + return + end + + local client = require "resty.websocket.client" + local ws, err = client:new() + local ok, err = ws:connect("ws://127.0.0.1:1984/kafka-sasl") + if not ok then + ngx.say("failed to connect: " .. err) + return + end + + local data = { + { + sequence = 0, + cmd_kafka_list_offset = { + topic = "test-consumer", + partition = 0, + timestamp = -1, }, } } @@ -293,8 +415,4 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil } } --- response_body -0failed to list offset, topic: not-exist, partition: 0, err: not found topic -1failed to fetch message, topic: not-exist, partition: 0, err: not found topic -2offset: 0 -3offset: 30 -4offset: 14 msg: testmsg15 +0offset: 30 From 23c273d88dc942a6c8645f63f37521b0f25a054b Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 02:39:18 +0800 Subject: [PATCH 90/96] fix: ci --- .github/workflows/build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d73be802f5e9..6714e427791f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -31,8 +31,7 @@ jobs: - linux_openresty_1_17 test_dir: - t/plugin - - t/pubsub - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc t/pubsub - t/node t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc runs-on: ${{ matrix.platform }} From d79d4393c4033a7613b6b3514981a528437f2640 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 02:43:23 +0800 Subject: [PATCH 91/96] docs: remove enable tls --- docs/en/latest/plugins/kafka-proxy.md | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/docs/en/latest/plugins/kafka-proxy.md b/docs/en/latest/plugins/kafka-proxy.md index 253c07bd4370..a505a8b67c53 100644 --- a/docs/en/latest/plugins/kafka-proxy.md +++ b/docs/en/latest/plugins/kafka-proxy.md @@ -32,16 +32,15 @@ The `kafka-proxy` plugin can be used to configure advanced parameters for the ka ## Attributes -| Name | Type | Required | Default | Valid values | Description | -|-------------------|---------|----------|---------|---------------|-----------------------------------| -| enable_tls | boolean | False | false | | Enable TLS for Kafka client | -| ssl_verify | boolean | False | true | | Enable TLS certificate validation | -| enable_sasl | boolean | False | false | | Enable SASL authentication | -| sasl_username | string | False | "" | | SASL authentication username | -| sasl_password | string | False | "" | | SASL authentication password | +| Name | Type | Required | Default | Valid values | Description | +|-------------------|---------|----------|---------|---------------|------------------------------------| +| enable_sasl | boolean | False | false | | Enable SASL authentication | +| sasl_username | string | False | "" | | SASL/PLAIN authentication username | +| sasl_password | string | False | "" | | SASL/PLAIN authentication password | :::note If SASL authentication is enabled, the `sasl_username` and `sasl_password` must be set. +The current SASL authentication only supports PLAIN mode, which is the username password login method. ::: ## Example usage From 6fa0e26e19a1f35f9a34716cde9b8e3d1ae5ae07 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 03:04:41 +0800 Subject: [PATCH 92/96] chore: move pubsub proto --- apisix/core/pubsub.lua | 6 +++--- apisix/{ => include/apisix/model}/pubsub.proto | 0 t/pubsub/kafka.t | 12 ++++++------ 3 files changed, 9 insertions(+), 9 deletions(-) rename apisix/{ => include/apisix/model}/pubsub.proto (100%) diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua index 44cdbe281424..b8870ba1807f 100644 --- a/apisix/core/pubsub.lua +++ b/apisix/core/pubsub.lua @@ -19,7 +19,7 @@ -- -- @module core.pubsub -local core = require("apisix.core") +local log = require("apisix.core.log") local ws_server = require("resty.websocket.server") local protoc = require("protoc") local pb = require("pb") @@ -48,7 +48,7 @@ function _M.new() -- compile the protobuf file on initial load module -- ensure that each worker is loaded once if not pubsub_protoc.loaded["pubsub.proto"] then - pubsub_protoc:addpath("apisix") + pubsub_protoc:addpath("apisix/include/apisix/model") local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") if not ok then pubsub_protoc:reset() @@ -129,7 +129,7 @@ function _M.wait(self) if key ~= "sequence" then local handler = self.cmd_handler[key] if not handler then - core.log.error("handler not registered for the", + log.error("handler not registered for the", " current command, command: ", key) goto continue end diff --git a/apisix/pubsub.proto b/apisix/include/apisix/model/pubsub.proto similarity index 100% rename from apisix/pubsub.proto rename to apisix/include/apisix/model/pubsub.proto diff --git a/t/pubsub/kafka.t b/t/pubsub/kafka.t index cc2a60115272..c97df2a5169e 100644 --- a/t/pubsub/kafka.t +++ b/t/pubsub/kafka.t @@ -141,8 +141,8 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil protoc.reload() pb.option("int64_as_string") local pubsub_protoc = protoc.new() - pubsub_protoc:addpath("apisix") - local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + pubsub_protoc:addpath("apisix/include/apisix/model") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") if not ok then ngx.say("failed to load protocol: " .. err) return @@ -245,8 +245,8 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil protoc.reload() pb.option("int64_as_string") local pubsub_protoc = protoc.new() - pubsub_protoc:addpath("apisix") - local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + pubsub_protoc:addpath("apisix/include/apisix/model") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") if not ok then ngx.say("failed to load protocol: " .. err) return @@ -306,7 +306,7 @@ failed to initialize pub-sub module, err: bad "upgrade" request header: nil protoc.reload() pb.option("int64_as_string") local pubsub_protoc = protoc.new() - pubsub_protoc:addpath("apisix") + pubsub_protoc:addpath("apisix/include/apisix/model") local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") if not ok then ngx.say("failed to load protocol: " .. err) @@ -367,7 +367,7 @@ self signed certificate protoc.reload() pb.option("int64_as_string") local pubsub_protoc = protoc.new() - pubsub_protoc:addpath("apisix") + pubsub_protoc:addpath("apisix/include/apisix/model") local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") if not ok then ngx.say("failed to load protocol: " .. err) From 78cd57bfbe54d75450ae8894070fc98e3ae82e89 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 03:40:45 +0800 Subject: [PATCH 93/96] chore: adjust pubsub event loop error handle --- apisix/core/pubsub.lua | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/apisix/core/pubsub.lua b/apisix/core/pubsub.lua index b8870ba1807f..6a70018fa92b 100644 --- a/apisix/core/pubsub.lua +++ b/apisix/core/pubsub.lua @@ -103,8 +103,15 @@ function _M.wait(self) -- read raw data frames from websocket connection local raw_data, raw_type, err = ws:recv_frame() if err then - ws:send_close() - return "websocket server: "..err + -- terminate the event loop when a fatal error occurs + if ws.fatal then + ws:send_close() + return "websocket server: "..err + end + + -- skip this loop for non-fatal errors + log.error("failed to receive websocket frame: "..err) + goto continue end -- handle client close connection @@ -129,8 +136,8 @@ function _M.wait(self) if key ~= "sequence" then local handler = self.cmd_handler[key] if not handler then - log.error("handler not registered for the", - " current command, command: ", key) + log.error("callback handler not registered for the", + " this command, command: ", key) goto continue end From 6f651aab3506e320722bf14fef2e04c276705b48 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 12:30:46 +0800 Subject: [PATCH 94/96] fix: install --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 25878a2f9f67..d83a9785d7aa 100644 --- a/Makefile +++ b/Makefile @@ -263,7 +263,9 @@ install: runtime # Lua directories listed in alphabetical order $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix $(ENV_INSTALL) apisix/*.lua $(ENV_INST_LUADIR)/apisix/ - $(ENV_INSTALL) apisix/*.proto $(ENV_INST_LUADIR)/apisix/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/include/apisix/model + $(ENV_INSTALL) apisix/include/apisix/model/*.proto $(ENV_INST_LUADIR)/apisix/include/apisix/model/ $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/admin $(ENV_INSTALL) apisix/admin/*.lua $(ENV_INST_LUADIR)/apisix/admin/ From 9e9b7e289f2a5fac63d8bf7ef9aa316975a52368 Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 12:52:33 +0800 Subject: [PATCH 95/96] fix: lint --- ci/pod/kafka/kafka-server/env/common.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/pod/kafka/kafka-server/env/common.env b/ci/pod/kafka/kafka-server/env/common.env index 9d90a2049bec..adc9d7cad1f8 100644 --- a/ci/pod/kafka/kafka-server/env/common.env +++ b/ci/pod/kafka/kafka-server/env/common.env @@ -5,4 +5,4 @@ KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,S KAFKA_CFG_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM= KAFKA_CFG_SSL_KEYSTORE_LOCATION=/opt/bitnami/kafka/config/certs/kafka.keystore.jks KAFKA_CFG_SSL_KEYSTORE_PASSWORD=changeit -KAFKA_CFG_SSL_KEY_PASSWORD=changeit \ No newline at end of file +KAFKA_CFG_SSL_KEY_PASSWORD=changeit From f085dd9cd4e548586d6a214e67f3f1ba6371925b Mon Sep 17 00:00:00 2001 From: Zeping Bai Date: Wed, 11 May 2022 13:37:12 +0800 Subject: [PATCH 96/96] fix --- apisix/schema_def.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index 4c3558642cde..2e3cc5e5e002 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -415,7 +415,7 @@ local upstream_schema = { }, anyOf = { {required = {"client_cert", "client_key"}}, - {required = {"verify"}} + {required = {"verify"}}, } }, keepalive_pool = {