diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4137d03e7959..976b415b3dab 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,12 +2,12 @@ name: CI on: push: - branches: [master, 'release/**'] + branches: [master, 'chore/lj'] paths-ignore: - 'docs/**' - '**/*.md' pull_request: - branches: [master, 'release/**'] + branches: [master, 'chore/lj'] paths-ignore: - 'docs/**' - '**/*.md' @@ -28,7 +28,6 @@ jobs: - ubuntu-20.04 os_name: - linux_openresty - - linux_openresty_1_19 test_dir: - t/plugin/[a-k]* - t/plugin/[l-z]* @@ -40,6 +39,7 @@ jobs: env: SERVER_NAME: ${{ matrix.os_name }} OPENRESTY_VERSION: default + abt_branch: apisix-base/1.21.4.2.2 steps: - name: Check out code @@ -164,7 +164,7 @@ jobs: - name: Linux Install run: | - sudo --preserve-env=OPENRESTY_VERSION \ + sudo --preserve-env=OPENRESTY_VERSION,abt_branch \ ./ci/${{ matrix.os_name }}_runner.sh do_install - name: Linux Script diff --git a/Makefile b/Makefile index fb572714a156..ed911969fc06 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ ENV_TAR ?= tar ENV_INSTALL ?= install ENV_RM ?= rm -vf ENV_DOCKER ?= docker -ENV_DOCKER_COMPOSE ?= docker-compose --project-directory $(CURDIR) -p $(project_name) -f $(project_compose_ci) +ENV_DOCKER_COMPOSE ?= docker compose --project-directory $(CURDIR) -p $(project_name) -f $(project_compose_ci) ENV_NGINX ?= $(ENV_NGINX_EXEC) -p $(CURDIR) -c $(CURDIR)/conf/nginx.conf ENV_NGINX_EXEC := $(shell command -v openresty 2>/dev/null || command -v nginx 2>/dev/null) ENV_OPENSSL_PREFIX ?= $(addprefix $(ENV_NGINX_PREFIX), openssl) diff --git a/apisix/core/response.lua b/apisix/core/response.lua index cfbac1467341..baee97749598 100644 --- a/apisix/core/response.lua +++ b/apisix/core/response.lua @@ -70,7 +70,9 @@ function resp_exit(code, ...) error("failed to encode data: " .. err, -2) else idx = idx + 1 - t[idx] = body .. "\n" + t[idx] = body + idx = idx + 1 + t[idx] = "\n" end elseif v ~= nil then @@ -80,7 +82,7 @@ function resp_exit(code, ...) end if idx > 0 then - ngx_print(concat_tab(t, "", 1, idx)) + ngx_print(t) end if code then @@ -174,7 +176,7 @@ end -- final_body = transform(final_body) -- ngx.arg[1] = final_body -- ... -function _M.hold_body_chunk(ctx, hold_the_copy) +function _M.hold_body_chunk(ctx, hold_the_copy, max_resp_body_bytes) local body_buffer local chunk, eof = arg[1], arg[2] @@ -190,22 +192,32 @@ function _M.hold_body_chunk(ctx, hold_the_copy) n = 1 } ctx._body_buffer[ctx._plugin_name] = body_buffer + ctx._resp_body_bytes = #chunk else local n = body_buffer.n + 1 body_buffer.n = n body_buffer[n] = chunk + ctx._resp_body_bytes = ctx._resp_body_bytes + #chunk + end + if max_resp_body_bytes and ctx._resp_body_bytes >= max_resp_body_bytes then + local body_data = concat_tab(body_buffer, "", 1, body_buffer.n) + body_data = str_sub(body_data, 1, max_resp_body_bytes) + return body_data end end if eof then body_buffer = ctx._body_buffer[ctx._plugin_name] if not body_buffer then + if max_resp_body_bytes and #chunk >= max_resp_body_bytes then + chunk = str_sub(chunk, 1, max_resp_body_bytes) + end return chunk end - body_buffer = concat_tab(body_buffer, "", 1, body_buffer.n) + local body_data = concat_tab(body_buffer, "", 1, body_buffer.n) ctx._body_buffer[ctx._plugin_name] = nil - return body_buffer + return body_data end if not hold_the_copy then diff --git a/apisix/plugins/brotli.lua b/apisix/plugins/brotli.lua new file mode 100644 index 000000000000..031bd8ea9fd3 --- /dev/null +++ b/apisix/plugins/brotli.lua @@ -0,0 +1,248 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local ngx_re_gmatch = ngx.re.gmatch +local ngx_header = ngx.header +local req_http_version = ngx.req.http_version +local str_sub = string.sub +local ipairs = ipairs +local tonumber = tonumber +local type = type +local is_loaded, brotli = pcall(require, "brotli") + + +local schema = { + type = "object", + properties = { + types = { + anyOf = { + { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 1, + }, + }, + { + enum = {"*"} + } + }, + default = {"text/html"} + }, + min_length = { + type = "integer", + minimum = 1, + default = 20, + }, + mode = { + type = "integer", + minimum = 0, + maximum = 2, + default = 0, + -- 0: MODE_GENERIC (default), + -- 1: MODE_TEXT (for UTF-8 format text input) + -- 2: MODE_FONT (for WOFF 2.0) + }, + comp_level = { + type = "integer", + minimum = 0, + maximum = 11, + default = 6, + -- follow the default value from ngx_brotli brotli_comp_level + }, + lgwin = { + type = "integer", + default = 19, + -- follow the default value from ngx_brotli brotli_window + enum = {0,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24}, + }, + lgblock = { + type = "integer", + default = 0, + enum = {0,16,17,18,19,20,21,22,23,24}, + }, + http_version = { + enum = {1.1, 1.0}, + default = 1.1, + }, + vary = { + type = "boolean", + } + }, +} + + +local _M = { + version = 0.1, + priority = 996, + name = "brotli", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function create_brotli_compressor(mode, comp_level, lgwin, lgblock) + local options = { + mode = mode, + quality = comp_level, + lgwin = lgwin, + lgblock = lgblock, + } + return brotli.compressor:new(options) +end + + +local function check_accept_encoding(ctx) + local accept_encoding = core.request.header(ctx, "Accept-Encoding") + -- no Accept-Encoding + if not accept_encoding then + return false + end + + -- single Accept-Encoding + if accept_encoding == "*" or accept_encoding == "br" then + return true + end + + -- multi Accept-Encoding + local iterator, err = ngx_re_gmatch(accept_encoding, + [[([a-z\*]+)(;q=)?([0-9.]*)?]], "jo") + if not iterator then + core.log.error("gmatch failed, error: ", err) + return false + end + + local captures + while true do + captures, err = iterator() + if not captures then + break + end + if err then + core.log.error("iterator failed, error: ", err) + return false + end + if (captures[1] == "br" or captures[1] == "*") and + (not captures[2] or captures[3] ~= "0") then + return true + end + end + + return false +end + + +function _M.header_filter(conf, ctx) + if not is_loaded then + core.log.error("please check the brotli library") + return + end + + local allow_encoding = check_accept_encoding(ctx) + if not allow_encoding then + return + end + + local content_encoded = ngx_header["Content-Encoding"] + if content_encoded then + -- Don't compress if Content-Encoding is present in upstream data + return + end + + local types = conf.types + local content_type = ngx_header["Content-Type"] + if not content_type then + -- Like Nginx, don't compress if Content-Type is missing + return + end + + if type(types) == "table" then + local matched = false + local from = core.string.find(content_type, ";") + if from then + content_type = str_sub(content_type, 1, from - 1) + end + + for _, ty in ipairs(types) do + if content_type == ty then + matched = true + break + end + end + + if not matched then + return + end + end + + local content_length = tonumber(ngx_header["Content-Length"]) + if content_length then + local min_length = conf.min_length + if content_length < min_length then + return + end + -- Like Nginx, don't check min_length if Content-Length is missing + end + + local http_version = req_http_version() + if http_version < conf.http_version then + return + end + + if conf.vary then + core.response.add_header("Vary", "Accept-Encoding") + end + + local compressor = create_brotli_compressor(conf.mode, conf.comp_level, + conf.lgwin, conf.lgblock) + if not compressor then + core.log.error("failed to create brotli compressor") + return + end + + ctx.brotli_matched = true + ctx.compressor = compressor + core.response.clear_header_as_body_modified() + core.response.add_header("Content-Encoding", "br") +end + + +function _M.body_filter(conf, ctx) + if not ctx.brotli_matched then + return + end + + local chunk, eof = ngx.arg[1], ngx.arg[2] + if type(chunk) == "string" and chunk ~= "" then + local encode_chunk = ctx.compressor:compress(chunk) + ngx.arg[1] = encode_chunk .. ctx.compressor:flush() + end + + if eof then + -- overwriting the arg[1], results into partial response + ngx.arg[1] = ngx.arg[1] .. ctx.compressor:finish() + end +end + + +return _M diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua index 2abbd1fce8a8..adeec2921a35 100644 --- a/apisix/plugins/kafka-logger.lua +++ b/apisix/plugins/kafka-logger.lua @@ -14,6 +14,7 @@ -- See the License for the specific language governing permissions and -- limitations under the License. -- +local expr = require("resty.expr.v1") local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local producer = require ("resty.kafka.producer") @@ -22,6 +23,7 @@ local bp_manager_mod = require("apisix.utils.batch-processor-manager") local math = math local pairs = pairs local type = type +local req_read_body = ngx.req.read_body local plugin_name = "kafka-logger" local batch_processor_manager = bp_manager_mod.new("kafka logger") @@ -95,7 +97,7 @@ local schema = { required_acks = { type = "integer", default = 1, - enum = { 0, 1, -1 }, + enum = { 1, -1 }, }, key = {type = "string"}, timeout = {type = "integer", minimum = 1, default = 3}, @@ -115,6 +117,8 @@ local schema = { type = "array" } }, + max_req_body_bytes = {type = "integer", minimum = 1, default = 524288}, + max_resp_body_bytes = {type = "integer", minimum = 1, default = 524288}, -- in lua-resty-kafka, cluster_name is defined as number -- see https://github.com/doujiang24/lua-resty-kafka#new-1 cluster_name = {type = "integer", minimum = 1, default = 1}, @@ -134,7 +138,9 @@ local schema = { local metadata_schema = { type = "object", properties = { - log_format = log_util.metadata_schema_log_format, + log_format = { + type = "object" + } }, } @@ -208,6 +214,32 @@ local function send_kafka_data(conf, log_message, prod) end +function _M.access(conf, ctx) + if conf.include_req_body then + local should_read_body = true + if conf.include_req_body_expr then + if not conf.request_expr then + local request_expr, err = expr.new(conf.include_req_body_expr) + if not request_expr then + core.log.error('generate request expr err ', err) + return + end + conf.request_expr = request_expr + end + + local result = conf.request_expr:eval(ctx.var) + + if not result then + should_read_body = false + end + end + if should_read_body then + req_read_body() + end + end +end + + function _M.body_filter(conf, ctx) log_util.collect_body(conf, ctx) end diff --git a/apisix/utils/content-decode.lua b/apisix/utils/content-decode.lua new file mode 100644 index 000000000000..c22c965fd865 --- /dev/null +++ b/apisix/utils/content-decode.lua @@ -0,0 +1,112 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local pcall = pcall +local zlib = require("ffi-zlib") +local str_buffer = require("string.buffer") +local is_br_libs_loaded, brotli = pcall(require, "brotli") +local content_decode_funcs = {} +local _M = {} + + +local function inflate_gzip(data) + local inputs = str_buffer.new():set(data) + local outputs = str_buffer.new() + + local read_inputs = function(size) + local data = inputs:get(size) + if data == "" then + return nil + end + return data + end + + local write_outputs = function(data) + return outputs:put(data) + end + + local ok, err = zlib.inflateGzip(read_inputs, write_outputs) + if not ok then + return nil, "inflate gzip err: " .. err + end + + return outputs:get() +end +content_decode_funcs.gzip = inflate_gzip + + +local function brotli_stream_decode(read_inputs, write_outputs) + -- read 64k data per times + local read_size = 64 * 1024 + local decompressor = brotli.decompressor:new() + + local chunk, ok, res + repeat + chunk = read_inputs(read_size) + if chunk then + ok, res = pcall(function() + return decompressor:decompress(chunk) + end) + else + ok, res = pcall(function() + return decompressor:finish() + end) + end + if not ok then + return false, res + end + write_outputs(res) + until not chunk + + return true, nil +end + + +local function brotli_decode(data) + local inputs = str_buffer.new():set(data) + local outputs = str_buffer.new() + + local read_inputs = function(size) + local data = inputs:get(size) + if data == "" then + return nil + end + return data + end + + local write_outputs = function(data) + return outputs:put(data) + end + + local ok, err = brotli_stream_decode(read_inputs, write_outputs) + if not ok then + return nil, "brotli decode err: " .. err + end + + return outputs:get() +end + +if is_br_libs_loaded then + content_decode_funcs.br = brotli_decode +end + + +function _M.dispatch_decoder(response_encoding) + return content_decode_funcs[response_encoding] +end + + +return _M diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua index 1df687c7bcfd..3fd618ae1478 100644 --- a/apisix/utils/log-util.lua +++ b/apisix/utils/log-util.lua @@ -17,21 +17,29 @@ local core = require("apisix.core") local plugin = require("apisix.plugin") local expr = require("resty.expr.v1") -local ngx = ngx +local content_decode = require("apisix.utils.content-decode") +local ngx = ngx local pairs = pairs local ngx_now = ngx.now +local ngx_header = ngx.header local os_date = os.date local str_byte = string.byte +local str_sub = string.sub local math_floor = math.floor local ngx_update_time = ngx.update_time local req_get_body_data = ngx.req.get_body_data local is_http = ngx.config.subsystem == "http" +local req_get_body_file = ngx.req.get_body_file +local MAX_REQ_BODY = 524288 -- 512 KiB +local MAX_RESP_BODY = 524288 -- 512 KiB +local io = io local lru_log_format = core.lrucache.new({ ttl = 300, count = 512 }) local _M = {} + _M.metadata_schema_log_format = { type = "object", default = { @@ -42,80 +50,54 @@ _M.metadata_schema_log_format = { } -local function gen_log_format(format) - local log_format = {} - for k, var_name in pairs(format) do - if var_name:byte(1, 1) == str_byte("$") then - log_format[k] = {true, var_name:sub(2)} - else - log_format[k] = {false, var_name} +local function get_request_body(max_bytes) + local req_body = req_get_body_data() + if req_body then + if max_bytes and #req_body >= max_bytes then + req_body = str_sub(req_body, 1, max_bytes) end + return req_body end - core.log.info("log_format: ", core.json.delay_encode(log_format)) - return log_format -end - -local function get_request_body(conf, ctx) - local res = {} - - if conf.include_req_body then + local file_name = req_get_body_file() + if not file_name then + return nil + end - local log_request_body = true + core.log.info("attempt to read body from file: ", file_name) - if conf.include_req_body_expr then + local f, err = io.open(file_name, 'r') + if not f then + return nil, "fail to open file " .. err + end - if not conf.request_expr then - local request_expr, err = expr.new(conf.include_req_body_expr) - if not request_expr then - core.log.error('generate request expr err ' .. err) - return res - end - conf.request_expr = request_expr - end + req_body = f:read(max_bytes) + f:close() - local result = conf.request_expr:eval(ctx.var) + return req_body +end - if not result then - log_request_body = false - end - end - if log_request_body then - local body = req_get_body_data() - if body then - res.request_body = body - return res - else - local body_file = ngx.req.get_body_file() - if body_file then - res.request_body_file = body_file - return res - end - end +local function gen_log_format(format) + local log_format = {} + for k, var_name in pairs(format) do + if var_name:byte(1, 1) == str_byte("$") then + log_format[k] = {true, var_name:sub(2)} + else + log_format[k] = {false, var_name} end end - - return res + core.log.info("log_format: ", core.json.delay_encode(log_format)) + return log_format end -local function get_custom_format_log(ctx, format, conf) +local function get_custom_format_log(ctx, format) local log_format = lru_log_format(format or "", nil, gen_log_format, format) local entry = core.table.new(0, core.table.nkeys(log_format)) for k, var_attr in pairs(log_format) do if var_attr[1] then - if var_attr[2] == "response_body" then - entry[k] = ctx.resp_body - elseif var_attr[2] == "request_body" then - local request_data = get_request_body(conf, ctx) - entry[k] = request_data.request_body - elseif var_attr[2] == "request_body_file" then - local request_data = get_request_body(conf, ctx) - entry[k] = request_data.request_body_file - else - entry[k] = ctx.var[var_attr[2]] - end + entry[k] = ctx.var[var_attr[2]] else entry[k] = var_attr[2] end @@ -218,9 +200,38 @@ local function get_full_log(ngx, conf) log.response.body = ctx.resp_body end - local request_data = get_request_body(conf, ctx) - log.request.body = request_data.request_body - log.request.body_file = request_data.request_body_file + if conf.include_req_body then + + local log_request_body = true + + if conf.include_req_body_expr then + + if not conf.request_expr then + local request_expr, err = expr.new(conf.include_req_body_expr) + if not request_expr then + core.log.error('generate request expr err ' .. err) + return log + end + conf.request_expr = request_expr + end + + local result = conf.request_expr:eval(ctx.var) + + if not result then + log_request_body = false + end + end + + if log_request_body then + local max_req_body_bytes = conf.max_req_body_bytes or MAX_REQ_BODY + local body, err = get_request_body(max_req_body_bytes) + if err then + core.log.error("fail to get request body: ", err) + return + end + log.request.body = body + end + end return log end @@ -234,7 +245,27 @@ function _M.inject_get_full_log(f) end +local function is_match(match, ctx) + local match_result + for _, m in pairs(match) do + local expr, _ = expr.new(m) + match_result = expr:eval(ctx.var) + if match_result then + break + end + end + + return match_result +end + + function _M.get_log_entry(plugin_name, conf, ctx) + -- If the "match" configuration is set and the matching conditions are not met, + -- then do not log the message. + if conf.match and not is_match(conf.match, ctx) then + return + end + local metadata = plugin.plugin_metadata(plugin_name) core.log.info("metadata: ", core.json.delay_encode(metadata)) @@ -246,7 +277,7 @@ function _M.get_log_entry(plugin_name, conf, ctx) if conf.log_format or has_meta_log_format then customized = true - entry = get_custom_format_log(ctx, conf.log_format or metadata.value.log_format, conf) + entry = get_custom_format_log(ctx, conf.log_format or metadata.value.log_format) else if is_http then entry = get_full_log(ngx, conf) @@ -261,20 +292,21 @@ end function _M.get_req_original(ctx, conf) - local headers = { + local data = { ctx.var.request, "\r\n" } for k, v in pairs(ngx.req.get_headers()) do - core.table.insert_tail(headers, k, ": ", v, "\r\n") + core.table.insert_tail(data, k, ": ", v, "\r\n") end - -- core.log.error("headers: ", core.table.concat(headers, "")) - core.table.insert(headers, "\r\n") + core.table.insert(data, "\r\n") if conf.include_req_body then - core.table.insert(headers, ctx.var.request_body) + local max_req_body_bytes = conf.max_req_body_bytes or MAX_REQ_BODY + local req_body = get_request_body(max_req_body_bytes) + core.table.insert(data, req_body) end - return core.table.concat(headers, "") + return core.table.concat(data, "") end @@ -319,11 +351,38 @@ function _M.collect_body(conf, ctx) end if log_response_body then - local final_body = core.response.hold_body_chunk(ctx, true) + local max_resp_body_bytes = conf.max_resp_body_bytes or MAX_RESP_BODY + + if ctx._resp_body_bytes and ctx._resp_body_bytes >= max_resp_body_bytes then + return + end + local final_body = core.response.hold_body_chunk(ctx, true, max_resp_body_bytes) if not final_body then return end - ctx.resp_body = final_body + + local response_encoding = ngx_header["Content-Encoding"] + if not response_encoding then + ctx.resp_body = final_body + return + end + + local decoder = content_decode.dispatch_decoder(response_encoding) + if not decoder then + core.log.warn("unsupported compression encoding type: ", + response_encoding) + ctx.resp_body = final_body + return + end + + local decoded_body, err = decoder(final_body) + if err ~= nil then + core.log.warn("try decode compressed data err: ", err) + ctx.resp_body = final_body + return + end + + ctx.resp_body = decoded_body end end end diff --git a/ci/init-plugin-test-service.sh b/ci/init-plugin-test-service.sh index ae86dcde2c85..fc3e43df7d84 100755 --- a/ci/init-plugin-test-service.sh +++ b/ci/init-plugin-test-service.sh @@ -17,9 +17,9 @@ # after() { - docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 - docker exec -i apache-apisix_kafka-server1_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 - docker exec -i apache-apisix_kafka-server2_1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 + docker exec -i apache-apisix-kafka-server2-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 # prepare openwhisk env docker pull openwhisk/action-nodejs-v14:nightly diff --git a/ci/linux_openresty_common_runner.sh b/ci/linux_openresty_common_runner.sh index 4029d851bd16..c572e5318d59 100755 --- a/ci/linux_openresty_common_runner.sh +++ b/ci/linux_openresty_common_runner.sh @@ -85,6 +85,8 @@ script() { sleep 1 done + make init + # APISIX_ENABLE_LUACOV=1 PERL5LIB=.:$PERL5LIB prove -Itest-nginx/lib -r t FLUSH_ETCD=1 prove --timer -Itest-nginx/lib -I./ -r $TEST_FILE_SUB_DIR | tee /tmp/test.result rerun_flaky_tests /tmp/test.result diff --git a/rockspec/apisix-3.2.2.1-0.rockspec b/rockspec/apisix-3.2.2.1-0.rockspec index e3cdc7ff79eb..a5131c7837b2 100644 --- a/rockspec/apisix-3.2.2.1-0.rockspec +++ b/rockspec/apisix-3.2.2.1-0.rockspec @@ -53,7 +53,7 @@ dependencies = { "nginx-lua-prometheus = 0.20220527", "jsonschema = 0.9.8", "lua-resty-ipmatcher = 0.6.1", - "lua-resty-kafka = 0.20-0", + "lua-resty-kafka = 0.23-0", "lua-resty-logger-socket = 2.0.1-0", "skywalking-nginx-lua = 0.6.0", "base64 = 1.5-2", @@ -78,7 +78,9 @@ dependencies = { "xml2lua = 1.5-2", "nanoid = 0.1-1", "lua-resty-mediador = 0.1.2-1", - "lua-resty-ldap = 0.1.0-0" + "lua-resty-ldap = 0.1.0-0", + "lua-ffi-zlib = 0.6-0", + "brotli-ffi = 0.3-1" } build = { diff --git a/rockspec/apisix-master-0.rockspec b/rockspec/apisix-master-0.rockspec index 9342c584afd1..4d4b4c0f1350 100644 --- a/rockspec/apisix-master-0.rockspec +++ b/rockspec/apisix-master-0.rockspec @@ -53,7 +53,7 @@ dependencies = { "nginx-lua-prometheus = 0.20220527", "jsonschema = 0.9.8", "lua-resty-ipmatcher = 0.6.1", - "lua-resty-kafka = 0.20-0", + "lua-resty-kafka = 0.23-0", "lua-resty-logger-socket = 2.0.1-0", "skywalking-nginx-lua = 0.6.0", "base64 = 1.5-2", @@ -78,7 +78,9 @@ dependencies = { "xml2lua = 1.5-2", "nanoid = 0.1-1", "lua-resty-mediador = 0.1.2-1", - "lua-resty-ldap = 0.1.0-0" + "lua-resty-ldap = 0.1.0-0", + "lua-ffi-zlib = 0.6-0", + "brotli-ffi = 0.3-1" } build = { diff --git a/t/plugin/grpc-web/package-lock.json b/t/plugin/grpc-web/package-lock.json index 01de0e7a4cf4..02c7f981fa4b 100644 --- a/t/plugin/grpc-web/package-lock.json +++ b/t/plugin/grpc-web/package-lock.json @@ -25,7 +25,7 @@ }, "node_modules/xhr2": { "version": "0.2.1", - "resolved": "https://registry.npm.taobao.org/xhr2/download/xhr2-0.2.1.tgz", + "resolved": "https://registry.npmmirror.com/xhr2/download/xhr2-0.2.1.tgz", "integrity": "sha1-TnOtxPnP7Jy9IVf3Pv3OOl8QipM=", "engines": { "node": ">= 6" @@ -45,7 +45,7 @@ }, "xhr2": { "version": "0.2.1", - "resolved": "https://registry.npm.taobao.org/xhr2/download/xhr2-0.2.1.tgz", + "resolved": "https://registry.npmmirror.com/xhr2/download/xhr2-0.2.1.tgz", "integrity": "sha1-TnOtxPnP7Jy9IVf3Pv3OOl8QipM=" } } diff --git a/t/plugin/kafka-logger-large-body.t b/t/plugin/kafka-logger-large-body.t new file mode 100644 index 000000000000..e86c645915b2 --- /dev/null +++ b/t/plugin/kafka-logger-large-body.t @@ -0,0 +1,869 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + + my $http_config = $block->http_config // <<_EOC_; + # fake server, only for test + server { + listen 1970; + location /large_resp { + content_by_lua_block { + local large_body = { + "h", "e", "l", "l", "o" + } + + local size_in_bytes = 1024 * 1024 -- 1mb + for i = 1, size_in_bytes do + large_body[i+5] = "l" + end + large_body = table.concat(large_body, "") + + ngx.say(large_body) + } + } + } +_EOC_ + + $block->set_value("http_config", $http_config); +}); + +run_tests; + +__DATA__ + +=== TEST 1: max_body_bytes is not an integer +--- config + location /t { + content_by_lua_block { + local plugin = require("apisix.plugins.kafka-logger") + local ok, err = plugin.check_schema({ + broker_list= { + ["127.0.0.1"] = 9092 + }, + kafka_topic = "test2", + key = "key1", + timeout = 1, + batch_max_size = 1, + max_req_body_bytes = "10", + include_req_body = true, + meta_format = "origin" + }) + if not ok then + ngx.say(err) + end + ngx.say("done") + } + } +--- response_body +property "max_req_body_bytes" validation failed: wrong type: expected integer, got string +done + + + +=== TEST 2: set route(meta_format = origin, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "kafka-logger": { + "broker_list" : { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "batch_max_size": 1, + "max_req_body_bytes": 5, + "include_req_body": true, + "meta_format": "origin" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 3: hit route(meta_format = origin, include_req_body = true) +--- request +GET /hello?ab=cd +abcdef +--- response_body +hello world +--- error_log +send data to kafka: GET /hello?ab=cd HTTP/1.1 +host: localhost +content-length: 6 +connection: close +abcde +--- wait: 2 + + + +=== TEST 4: set route(meta_format = default, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "kafka-logger": { + "broker_list" : { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "batch_max_size": 1, + "max_req_body_bytes": 5, + "include_req_body": true + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 5: hit route(meta_format = default, include_req_body = true) +--- request +GET /hello?ab=cd +abcdef +--- response_body +hello world +--- error_log_like eval +qr/"body": "abcde"/ +--- wait: 2 + + + +=== TEST 6: set route(id: 1, meta_format = default, include_resp_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "max_resp_body_bytes": 5, + "include_resp_body": true, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 7: hit route(meta_format = default, include_resp_body = true) +--- request +POST /hello?name=qwerty +abcdef +--- response_body +hello world +--- error_log eval +qr/send data to kafka: \{.*"body":"hello"/ +--- wait: 2 + + + +=== TEST 8: set route(id: 1, meta_format = origin, include_resp_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "origin", + "include_resp_body": true, + "max_resp_body_bytes": 5, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 9: hit route(meta_format = origin, include_resp_body = true) +--- request +POST /hello?name=qwerty +abcdef +--- response_body +hello world +--- error_log +send data to kafka: POST /hello?name=qwerty HTTP/1.1 +host: localhost +content-length: 6 +connection: close +--- wait: 2 + + + +=== TEST 10: set route(id: 1, meta_format = default, include_resp_body = true, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_req_body": true, + "max_req_body_bytes": 5, + "include_resp_body": true, + "max_resp_body_bytes": 5, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 11: hit route(meta_format = default, include_resp_body = true, include_req_body = true) +--- request +POST /hello?name=qwerty +abcdef +--- response_body +hello world +--- error_log eval +qr/send data to kafka: \{.*"body":"abcde"/ +--- error_log_like +*"body":"hello" +--- wait: 2 + + + +=== TEST 12: set route(id: 1, meta_format = default, include_resp_body = false, include_req_body = false) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 13: hit route(meta_format = default, include_resp_body = false, include_req_body = false) +--- request +POST /hello?name=qwerty +abcdef +--- response_body +hello world +--- no_error_log eval +qr/send data to kafka: \{.*"body":.*/ +--- wait: 2 + + + +=== TEST 14: set route(large_body, meta_format = default, include_resp_body = true, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_req_body": true, + "max_req_body_bytes": 256, + "include_resp_body": true, + "max_resp_body_bytes": 256, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/echo" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 15: hit route(large_body, meta_format = default, include_resp_body = true, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + local http = require("resty.http") + + local large_body = { + "h", "e", "l", "l", "o" + } + + local size_in_bytes = 10 * 1024 -- 10kb + for i = 1, size_in_bytes do + large_body[i+5] = "l" + end + large_body = table.concat(large_body, "") + + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/echo" + + local httpc = http.new() + local res, err = httpc:request_uri(uri, + { + method = "POST", + body = large_body, + } + ) + ngx.say(res.body) + } + } +--- request +GET /t +--- error_log eval +qr/send data to kafka: \{.*"body":"hello(l{251})".*/ +--- response_body eval +qr/hello.*/ + + + +=== TEST 16: set route(large_body, meta_format = default, include_resp_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_resp_body": true, + "max_resp_body_bytes": 256, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/echo" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 17: hit route(large_body, meta_format = default, include_resp_body = true) +--- config + location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + local http = require("resty.http") + + local large_body = { + "h", "e", "l", "l", "o" + } + + local size_in_bytes = 10 * 1024 -- 10kb + for i = 1, size_in_bytes do + large_body[i+5] = "l" + end + large_body = table.concat(large_body, "") + + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/echo" + + local httpc = http.new() + local res, err = httpc:request_uri(uri, + { + method = "POST", + body = large_body, + } + ) + ngx.say(res.body) + } + } +--- request +GET /t +--- error_log eval +qr/send data to kafka: \{.*"body":"hello(l{251})".*/ +--- response_body eval +qr/hello.*/ + + + +=== TEST 18: set route(large_body, meta_format = default, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_req_body": true, + "max_req_body_bytes": 256, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/echo" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 19: hit route(large_body, meta_format = default, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + local http = require("resty.http") + + local large_body = { + "h", "e", "l", "l", "o" + } + + local size_in_bytes = 10 * 1024 -- 10kb + for i = 1, size_in_bytes do + large_body[i+5] = "l" + end + large_body = table.concat(large_body, "") + + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/echo" + + local httpc = http.new() + local res, err = httpc:request_uri(uri, + { + method = "POST", + body = large_body, + } + ) + ngx.say(res.body) + } + } +--- request +GET /t +--- error_log eval +qr/send data to kafka: \{.*"body":"hello(l{251})".*/ +--- response_body eval +qr/hello.*/ + + + +=== TEST 20: set route(large_body, meta_format = default, include_resp_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_resp_body": true, + "max_resp_body_bytes": 256, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1970": 1 + }, + "type": "roundrobin" + }, + "uri": "/large_resp" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 21: truncate upstream response body 1m to 256 bytes +--- request +GET /large_resp +--- response_body eval +qr/hello.*/ +--- error_log eval +qr/send data to kafka: \{.*"body":"hello(l{251})".*/ + + + +=== TEST 22: set route(large_body, meta_format = default, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "meta_format": "default", + "include_req_body": true, + "max_req_body_bytes": 256, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 23: truncate upstream request body 1m to 256 bytes +--- config + location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + local http = require("resty.http") + + local large_body = { + "h", "e", "l", "l", "o" + } + + local size_in_bytes = 100 * 1024 -- 10kb + for i = 1, size_in_bytes do + large_body[i+5] = "l" + end + large_body = table.concat(large_body, "") + + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/hello" + + local httpc = http.new() + local res, err = httpc:request_uri(uri, + { + method = "POST", + body = large_body, + } + ) + + if err then + ngx.say(err) + end + + ngx.say(res.body) + } + } +--- request +GET /t +--- response_body_like +hello world +--- error_log eval +qr/send data to kafka: \{.*"body":"hello(l{251})".*/ + + + +=== TEST 24: set route(meta_format = default, include_req_body = true) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "kafka-logger": { + "broker_list" : { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "batch_max_size": 1, + "max_req_body_bytes": 5, + "include_req_body": true, + "meta_format": "default" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 25: empty request body +--- request +GET /hello?ab=cd +--- response_body +hello world +--- error_log eval +qr/send data to kafka/ +--- wait: 2 diff --git a/t/plugin/kafka-logger2.t b/t/plugin/kafka-logger2.t index 4aabf8756869..84b6f90cd4cb 100644 --- a/t/plugin/kafka-logger2.t +++ b/t/plugin/kafka-logger2.t @@ -57,7 +57,7 @@ done -=== TEST 2: report log to kafka, with required_acks(1, 0, -1) +=== TEST 2: report log to kafka, with required_acks(1, -1) --- config location /t { content_by_lua_block { @@ -110,30 +110,6 @@ location /t { uri = "/hello", }, }, - { - input = { - plugins = { - ["kafka-logger"] = { - broker_list = { - ["127.0.0.1"] = 9092 - }, - kafka_topic = "test2", - producer_type = "sync", - timeout = 1, - batch_max_size = 1, - required_acks = 0, - meta_format = "origin", - } - }, - upstream = { - nodes = { - ["127.0.0.1:1980"] = 1 - }, - type = "roundrobin" - }, - uri = "/hello", - }, - }, } local t = require("lib.test_admin").test @@ -657,7 +633,166 @@ done -=== TEST 12: set route(id: 1,include_resp_body = true,include_resp_body_expr = array) +=== TEST 12: set route include_resp_body - gzip +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "include_resp_body": true, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:11451": 1 + }, + "type": "roundrobin" + }, + "uri": "/gzip_hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 13: hit +--- http_config +server { + listen 11451; + gzip on; + gzip_types *; + gzip_min_length 1; + location /gzip_hello { + content_by_lua_block { + ngx.req.read_body() + local s = "gzip hello world" + ngx.header['Content-Length'] = #s + 1 + ngx.say(s) + } + } +} +--- request +GET /gzip_hello +--- more_headers +Accept-Encoding: gzip +--- error_log eval +qr/send data to kafka: \{.*"body":"gzip hello world\\n"/ +--- wait: 2 + + + +=== TEST 14: set route include_resp_body - brotli +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [=[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "include_resp_body": true, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:11452": 1 + }, + "type": "roundrobin" + }, + "uri": "/brotli_hello" + }]=] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } + +--- response_body +passed + + + +=== TEST 15: hit +--- http_config +server { + listen 11452; + location /brotli_hello { + content_by_lua_block { + ngx.req.read_body() + local s = "brotli hello world" + ngx.header['Content-Length'] = #s + 1 + ngx.say(s) + } + header_filter_by_lua_block { + local conf = { + comp_level = 6, + http_version = 1.1, + lgblock = 0, + lgwin = 19, + min_length = 1, + mode = 0, + types = "*", + } + local brotli = require("apisix.plugins.brotli") + brotli.header_filter(conf, ngx.ctx) + } + body_filter_by_lua_block { + local conf = { + comp_level = 6, + http_version = 1.1, + lgblock = 0, + lgwin = 19, + min_length = 1, + mode = 0, + types = "*", + } + local brotli = require("apisix.plugins.brotli") + brotli.body_filter(conf, ngx.ctx) + } + } +} +--- request +GET /brotli_hello +--- more_headers +Accept-Encoding: br +--- error_log eval +qr/send data to kafka: \{.*"body":"brotli hello world\\n"/ +--- wait: 2 + + + +=== TEST 16: set route(id: 1,include_resp_body = true,include_resp_body_expr = array) --- config location /t { content_by_lua_block { @@ -706,7 +841,7 @@ passed -=== TEST 13: hit route, expr eval success +=== TEST 17: hit route, expr eval success --- request POST /hello?name=qwerty abcdef @@ -718,7 +853,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/ -=== TEST 14: hit route,expr eval fail +=== TEST 18: hit route,expr eval fail --- request POST /hello?name=zcxv abcdef @@ -730,7 +865,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/ -=== TEST 15: multi level nested expr conditions +=== TEST 19: multi level nested expr conditions --- config location /t { content_by_lua_block { @@ -782,7 +917,7 @@ passed -=== TEST 16: hit route, req_body_expr and resp_body_expr both eval success +=== TEST 20: hit route, req_body_expr and resp_body_expr both eval success --- request POST /hello?name=qwerty abcdef @@ -795,7 +930,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/] -=== TEST 17: hit route, req_body_expr eval success, resp_body_expr both eval failed +=== TEST 21: hit route, req_body_expr eval success, resp_body_expr both eval failed --- request POST /hello?name=asdfgh abcdef @@ -809,7 +944,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/ -=== TEST 18: hit route, req_body_expr eval failed, resp_body_expr both eval success +=== TEST 22: hit route, req_body_expr eval failed, resp_body_expr both eval success --- request POST /hello?name=zxcvbn abcdef @@ -823,7 +958,7 @@ qr/send data to kafka: \{.*"body":"abcdef"/ -=== TEST 19: hit route, req_body_expr eval success, resp_body_expr both eval failed +=== TEST 23: hit route, req_body_expr eval success, resp_body_expr both eval failed --- request POST /hello?name=xxxxxx abcdef @@ -836,7 +971,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/] -=== TEST 20: update route(id: 1,include_req_body = true,include_req_body_expr = array) +=== TEST 24: update route(id: 1,include_req_body = true,include_req_body_expr = array) --- config location /t { content_by_lua_block { @@ -886,7 +1021,7 @@ passed -=== TEST 21: hit route, expr eval success +=== TEST 25: hit route, expr eval success --- request POST /hello?name=qwerty abcdef @@ -898,7 +1033,7 @@ qr/send data to kafka: \{.*"body":"abcdef"/ -=== TEST 22: setup route with meta_refresh_interval +=== TEST 26: setup route with meta_refresh_interval --- config location /t { content_by_lua_block { @@ -942,7 +1077,7 @@ passed -=== TEST 23: hit route, send data to kafka successfully +=== TEST 27: hit route, send data to kafka successfully --- request POST /hello abcdef