-
Notifications
You must be signed in to change notification settings - Fork 2.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(pubsub): support kafka #7032
Changes from 12 commits
4d3e996
cb5e29b
0db7413
ac25c0b
b2af519
d1f7280
fb1f202
797935d
141aaa6
b7bebcf
b9d3ace
8ed6673
78b7f88
3353581
c65dac2
aec71b7
438d982
bc475da
e5b258f
db49fcb
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,133 @@ | ||
-- | ||
-- Licensed to the Apache Software Foundation (ASF) under one or more | ||
-- contributor license agreements. See the NOTICE file distributed with | ||
-- this work for additional information regarding copyright ownership. | ||
-- The ASF licenses this file to You under the Apache License, Version 2.0 | ||
-- (the "License"); you may not use this file except in compliance with | ||
-- the License. You may obtain a copy of the License at | ||
-- | ||
-- http://www.apache.org/licenses/LICENSE-2.0 | ||
-- | ||
-- Unless required by applicable law or agreed to in writing, software | ||
-- distributed under the License is distributed on an "AS IS" BASIS, | ||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
-- See the License for the specific language governing permissions and | ||
-- limitations under the License. | ||
-- | ||
|
||
local core = require("apisix.core") | ||
local bconsumer = require("resty.kafka.basic-consumer") | ||
local ffi = require("ffi") | ||
local C = ffi.C | ||
local tostring = tostring | ||
local type = type | ||
local ipairs = ipairs | ||
local str_sub = string.sub | ||
|
||
ffi.cdef[[ | ||
int64_t atoll(const char *num); | ||
]] | ||
|
||
|
||
local _M = {} | ||
|
||
|
||
-- Handles the conversion of 64-bit integers in the lua-protobuf. | ||
-- | ||
-- Because of the limitations of luajit, we cannot use native 64-bit | ||
-- numbers, so pb decode converts int64 to a string in #xxx format | ||
-- to avoid loss of precision, by this function, we convert this | ||
-- string to int64 cdata numbers. | ||
local function pb_convert_to_int64(src) | ||
if type(src) == "string" then | ||
return C.atoll(ffi.cast("char *", src) + 1) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's check src length to avoid out of bound There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. added |
||
else | ||
return src | ||
end | ||
end | ||
|
||
|
||
-- Takes over requests of type kafka upstream in the http_access phase. | ||
function _M.access(api_ctx) | ||
local pubsub, err = core.pubsub.new() | ||
if not pubsub then | ||
core.log.error("failed to initialize pubsub module, err: ", err) | ||
core.response.exit(400) | ||
tzssangglass marked this conversation as resolved.
Show resolved
Hide resolved
|
||
return | ||
end | ||
|
||
local up_nodes = api_ctx.matched_upstream.nodes | ||
|
||
-- kafka client broker-related configuration | ||
local broker_list = {} | ||
for i, node in ipairs(up_nodes) do | ||
broker_list[i] = { | ||
host = node.host, | ||
port = node.port, | ||
} | ||
end | ||
|
||
local client_config = {refresh_interval = 30 * 60 * 1000} | ||
tzssangglass marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
-- load and create the consumer instance when it is determined | ||
-- that the websocket connection was created successfully | ||
local consumer = bconsumer:new(broker_list, client_config) | ||
tzssangglass marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
pubsub:on("cmd_kafka_list_offset", function (params) | ||
-- The timestamp parameter uses a 64-bit integer, which is difficult | ||
-- for luajit to handle well, so the int64_as_string option in | ||
-- lua-protobuf is used here. Smaller numbers will be decoded as | ||
-- lua number, while overly larger numbers will be decoded as strings | ||
-- in the format #number, where the # symbol at the beginning of the | ||
-- string will be removed and converted to int64_t with the atoll function. | ||
local timestamp = pb_convert_to_int64(params.timestamp) | ||
|
||
local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) | ||
|
||
if not offset then | ||
return nil, "failed to list offset, topic: " .. params.topic .. | ||
", partition: " .. params.partition .. ", err: " .. err | ||
end | ||
|
||
offset = tostring(offset) | ||
return { | ||
kafka_list_offset_resp = { | ||
offset = str_sub(offset, 1, #offset - 2) | ||
} | ||
} | ||
end) | ||
|
||
pubsub:on("cmd_kafka_fetch", function (params) | ||
local offset = pb_convert_to_int64(params.offset) | ||
|
||
local ret, err = consumer:fetch(params.topic, params.partition, offset) | ||
if not ret then | ||
return nil, "failed to fetch message, topic: " .. params.topic .. | ||
", partition: " .. params.partition .. ", err: " .. err | ||
end | ||
|
||
-- split into multiple messages when the amount of data in | ||
-- a single batch is too large | ||
local messages = ret.records | ||
|
||
-- special handling of int64 for luajit compatibility | ||
for _, message in ipairs(messages) do | ||
local timestamp = tostring(message.timestamp) | ||
message.timestamp = str_sub(timestamp, 1, #timestamp - 2) | ||
local offset = tostring(message.offset) | ||
message.offset = str_sub(offset, 1, #offset - 2) | ||
end | ||
|
||
return { | ||
kafka_fetch_resp = { | ||
messages = messages, | ||
}, | ||
} | ||
end) | ||
|
||
-- start processing client commands | ||
pubsub:wait() | ||
end | ||
|
||
|
||
return _M |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -216,7 +216,8 @@ | |
"type": "category", | ||
"label": "PubSub", | ||
"items": [ | ||
"pubsub" | ||
"pubsub", | ||
"pubsub/kafka" | ||
] | ||
}, | ||
{ | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -42,6 +42,10 @@ In Apache APISIX, the most common scenario is handling north-south traffic from | |
|
||
Currently, Apache APISIX supports WebSocket communication with the client, which can be any application that supports WebSocket, with Protocol Buffer as the serialization mechanism, see the [protocol definition](../../../apisix/pubsub.proto). | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's update the path of definition. We should use absolute path as it's not in website There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. changed |
||
|
||
## Supported messaging systems | ||
|
||
- [Aapche Kafka](pubsub/kafka.md) | ||
|
||
## How to support other messaging systems | ||
|
||
Apache APISIX implement an extensible pubsub module, which is responsible for starting the WebSocket server, coding and decoding communication protocols, handling client commands, and adding support for the new messaging system. | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We can remove cmd_empty which is test-only? Using
cmd_kafka_fetch
in pubsub.t is enough.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@spacewander This would make the
pubsub
module test the relevant code that relies onkafka
, and I'm not sure if I should do that.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
What about adding a comment to show that this cmd is test-only?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
After rechecking, I found that CmdEmpty has added a test-only flag.
apisix/apisix/include/apisix/model/pubsub.proto
Lines 35 to 39 in d955009