From 0b38ea2226de808ccfe208f3e9bd4401e854e2ba Mon Sep 17 00:00:00 2001 From: Sarasa Kisaragi Date: Sun, 15 Oct 2023 20:43:12 +0800 Subject: [PATCH 01/23] feat: support config stream_route upstream in service (#10298) * feat: support config stream_route upstream ain service Signed-off-by: Ling Samuel (WSL) --- apisix/admin/services.lua | 16 ++ apisix/admin/stream_routes.lua | 17 ++ apisix/constants.lua | 1 + apisix/http/service.lua | 2 +- apisix/init.lua | 29 +++ apisix/plugin.lua | 46 +++++ apisix/schema_def.lua | 1 + apisix/stream/router/ip_port.lua | 4 + docs/en/latest/admin-api.md | 1 + docs/zh/latest/admin-api.md | 1 + t/stream-node/sanity-with-service.t | 294 ++++++++++++++++++++++++++++ 11 files changed, 411 insertions(+), 1 deletion(-) create mode 100644 t/stream-node/sanity-with-service.t diff --git a/apisix/admin/services.lua b/apisix/admin/services.lua index dc14bda44ec6..4218b77f22dd 100644 --- a/apisix/admin/services.lua +++ b/apisix/admin/services.lua @@ -16,6 +16,7 @@ -- local core = require("apisix.core") local get_routes = require("apisix.router").http_routes +local get_stream_routes = require("apisix.router").stream_routes local apisix_upstream = require("apisix.upstream") local resource = require("apisix.admin.resource") local schema_plugin = require("apisix.admin.plugins").check_schema @@ -99,6 +100,21 @@ local function delete_checker(id) end end + local stream_routes, stream_routes_ver = get_stream_routes() + core.log.info("stream_routes: ", core.json.delay_encode(stream_routes, true)) + core.log.info("stream_routes_ver: ", stream_routes_ver) + if stream_routes_ver and stream_routes then + for _, route in ipairs(stream_routes) do + if type(route) == "table" and route.value + and route.value.service_id + and tostring(route.value.service_id) == id then + return 400, {error_msg = "can not delete this service directly," + .. " stream_route [" .. route.value.id + .. "] is still using it now"} + end + end + end + return nil, nil end diff --git a/apisix/admin/stream_routes.lua b/apisix/admin/stream_routes.lua index c16a9a7938c3..6e1c6e6385c3 100644 --- a/apisix/admin/stream_routes.lua +++ b/apisix/admin/stream_routes.lua @@ -42,6 +42,23 @@ local function check_conf(id, conf, need_id, schema) end end + local service_id = conf.service_id + if service_id then + local key = "/services/" .. service_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "], " + .. "response code: " .. res.status} + end + end + local ok, err = stream_route_checker(conf, true) if not ok then return nil, {error_msg = err} diff --git a/apisix/constants.lua b/apisix/constants.lua index 72209aa4d905..0b3ec160b53d 100644 --- a/apisix/constants.lua +++ b/apisix/constants.lua @@ -37,6 +37,7 @@ return { }, STREAM_ETCD_DIRECTORY = { ["/upstreams"] = true, + ["/services"] = true, ["/plugins"] = true, ["/ssls"] = true, ["/stream_routes"] = true, diff --git a/apisix/http/service.lua b/apisix/http/service.lua index 83bcb9b9d341..97b224d622c8 100644 --- a/apisix/http/service.lua +++ b/apisix/http/service.lua @@ -61,7 +61,7 @@ function _M.init_worker() filter = filter, }) if not services then - error("failed to create etcd instance for fetching upstream: " .. err) + error("failed to create etcd instance for fetching /services: " .. err) return end end diff --git a/apisix/init.lua b/apisix/init.lua index 86b68cf62208..da9fe915a665 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -1021,6 +1021,7 @@ function _M.stream_init_worker() plugin.init_worker() xrpc.init_worker() router.stream_init_worker() + require("apisix.http.service").init_worker() apisix_upstream.init_worker() local we = require("resty.worker.events") @@ -1078,6 +1079,34 @@ function _M.stream_preread_phase() api_ctx.matched_upstream = upstream + elseif matched_route.value.service_id then + local service = service_fetch(matched_route.value.service_id) + if not service then + core.log.error("failed to fetch service configuration by ", + "id: ", matched_route.value.service_id) + return core.response.exit(404) + end + + matched_route = plugin.merge_service_stream_route(service, matched_route) + api_ctx.matched_route = matched_route + api_ctx.conf_type = "stream_route&service" + api_ctx.conf_version = matched_route.modifiedIndex .. "&" .. service.modifiedIndex + api_ctx.conf_id = matched_route.value.id .. "&" .. service.value.id + api_ctx.service_id = service.value.id + api_ctx.service_name = service.value.name + api_ctx.matched_upstream = matched_route.value.upstream + if matched_route.value.upstream_id and not matched_route.value.upstream then + local upstream = apisix_upstream.get_by_id(matched_route.value.upstream_id) + if not upstream then + if is_http then + return core.response.exit(502) + end + + return ngx_exit(1) + end + + api_ctx.matched_upstream = upstream + end else if matched_route.has_domain then local err diff --git a/apisix/plugin.lua b/apisix/plugin.lua index bde2b89a5393..fa1d814b290a 100644 --- a/apisix/plugin.lua +++ b/apisix/plugin.lua @@ -43,6 +43,9 @@ local stream_local_plugins_hash = core.table.new(0, 32) local merged_route = core.lrucache.new({ ttl = 300, count = 512 }) +local merged_stream_route = core.lrucache.new({ + ttl = 300, count = 512 +}) local expr_lrucache = core.lrucache.new({ ttl = 300, count = 512 }) @@ -637,6 +640,49 @@ function _M.merge_service_route(service_conf, route_conf) end +local function merge_service_stream_route(service_conf, route_conf) + -- because many fields in Service are not supported by stream route, + -- so we copy the stream route as base object + local new_conf = core.table.deepcopy(route_conf) + if service_conf.value.plugins then + for name, conf in pairs(service_conf.value.plugins) do + if not new_conf.value.plugins then + new_conf.value.plugins = {} + end + + if not new_conf.value.plugins[name] then + new_conf.value.plugins[name] = conf + end + end + end + + new_conf.value.service_id = nil + + if not new_conf.value.upstream and service_conf.value.upstream then + new_conf.value.upstream = service_conf.value.upstream + end + + if not new_conf.value.upstream_id and service_conf.value.upstream_id then + new_conf.value.upstream_id = service_conf.value.upstream_id + end + + return new_conf +end + + +function _M.merge_service_stream_route(service_conf, route_conf) + core.log.info("service conf: ", core.json.delay_encode(service_conf, true)) + core.log.info(" stream route conf: ", core.json.delay_encode(route_conf, true)) + + local version = route_conf.modifiedIndex .. "#" .. service_conf.modifiedIndex + local route_service_key = route_conf.value.id .. "#" + .. version + return merged_stream_route(route_service_key, version, + merge_service_stream_route, + service_conf, route_conf) +end + + local function merge_consumer_route(route_conf, consumer_conf, consumer_group_conf) if not consumer_conf.plugins or core.table.nkeys(consumer_conf.plugins) == 0 diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index a958492c3fb1..e3e9a05aca26 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -908,6 +908,7 @@ _M.stream_route = { }, upstream = upstream_schema, upstream_id = id_schema, + service_id = id_schema, plugins = plugins_schema, protocol = xrpc_protocol_schema, }, diff --git a/apisix/stream/router/ip_port.lua b/apisix/stream/router/ip_port.lua index 977bcb2d3a4e..284cc456edbc 100644 --- a/apisix/stream/router/ip_port.lua +++ b/apisix/stream/router/ip_port.lua @@ -110,6 +110,8 @@ do for _, route in ipairs(items) do local hit = match_addrs(route, vars) if hit then + route.value.remote_addr_matcher = nil + route.value.server_addr_matcher = nil ctx.matched_route = route return true end @@ -175,6 +177,8 @@ do for _, route in ipairs(other_routes) do local hit = match_addrs(route, api_ctx.var) if hit then + route.value.remote_addr_matcher = nil + route.value.server_addr_matcher = nil api_ctx.matched_route = route return true end diff --git a/docs/en/latest/admin-api.md b/docs/en/latest/admin-api.md index 1491694d09fa..77c2141336c8 100644 --- a/docs/en/latest/admin-api.md +++ b/docs/en/latest/admin-api.md @@ -1419,6 +1419,7 @@ Stream Route resource request address: /apisix/admin/stream_routes/{id} | ----------- | -------- | -------- | ------------------------------------------------------------------- | ----------------------------- | | upstream | False | Upstream | Configuration of the [Upstream](./terminology/upstream.md). | | | upstream_id | False | Upstream | Id of the [Upstream](terminology/upstream.md) service. | | +| service_id | False | String | Id of the [Service](terminology/service.md) service. | | | remote_addr | False | IPv4, IPv4 CIDR, IPv6 | Filters Upstream forwards by matching with client IP. | "127.0.0.1" or "127.0.0.1/32" or "::1" | | server_addr | False | IPv4, IPv4 CIDR, IPv6 | Filters Upstream forwards by matching with APISIX Server IP. | "127.0.0.1" or "127.0.0.1/32" or "::1" | | server_port | False | Integer | Filters Upstream forwards by matching with APISIX Server port. | 9090 | diff --git a/docs/zh/latest/admin-api.md b/docs/zh/latest/admin-api.md index dd2d257a4043..899fb4c44deb 100644 --- a/docs/zh/latest/admin-api.md +++ b/docs/zh/latest/admin-api.md @@ -1428,6 +1428,7 @@ Plugin 资源请求地址:/apisix/admin/stream_routes/{id} | ---------------- | ------| -------- | ------------------------------------------------------------------------------| ------ | | upstream | 否 | Upstream | Upstream 配置,详细信息请参考 [Upstream](terminology/upstream.md)。 | | | upstream_id | 否 | Upstream | 需要使用的 Upstream id,详细信息请 [Upstream](terminology/upstream.md)。 | | +| service_id | 否 | String | 需要使用的 [Service](terminology/service.md) id. | | | remote_addr | 否 | IPv4, IPv4 CIDR, IPv6 | 过滤选项:如果客户端 IP 匹配,则转发到上游 | "127.0.0.1" 或 "127.0.0.1/32" 或 "::1" | | server_addr | 否 | IPv4, IPv4 CIDR, IPv6 | 过滤选项:如果 APISIX 服务器的 IP 与 `server_addr` 匹配,则转发到上游。 | "127.0.0.1" 或 "127.0.0.1/32" 或 "::1" | | server_port | 否 | 整数 | 过滤选项:如果 APISIX 服务器的端口 与 `server_port` 匹配,则转发到上游。 | 9090 | diff --git a/t/stream-node/sanity-with-service.t b/t/stream-node/sanity-with-service.t new file mode 100644 index 000000000000..799a96aa8434 --- /dev/null +++ b/t/stream-node/sanity-with-service.t @@ -0,0 +1,294 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +log_level('info'); +no_root_location(); + +run_tests(); + +__DATA__ + +=== TEST 1: set stream route(id: 1) -> service(id: 1) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/services/1', + ngx.HTTP_PUT, + [[{ + "upstream": { + "nodes": { + "127.0.0.1:1995": 1 + }, + "type": "roundrobin" + } + }]] + ) + + if code >= 300 then + ngx.status = code + end + + code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_PUT, + [[{ + "remote_addr": "127.0.0.1", + "service_id": 1 + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 2: hit route +--- stream_request eval +mmm +--- stream_response +hello world + + + +=== TEST 3: set stream route(id: 1) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_PUT, + [[{ + "remote_addr": "127.0.0.2", + "service_id": 1 + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 4: not hit route +--- stream_enable +--- stream_response + + + +=== TEST 5: delete route(id: 1) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_DELETE + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 6: set service upstream (id: 1) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/upstreams/1', + ngx.HTTP_PUT, + [[{ + "nodes": { + "127.0.0.1:1995": 1 + }, + "type": "roundrobin" + }]] + ) + if code >= 300 then + ngx.status = code + end + + code, body = t('/apisix/admin/services/1', + ngx.HTTP_PUT, + [[{ + "upstream_id": 1 + }]] + ) + + if code >= 300 then + ngx.status = code + end + + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 7: set stream route (id: 1) with service (id: 1) which uses upstream_id +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_PUT, + [[{ + "remote_addr": "127.0.0.1", + "service_id": 1 + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 8: hit route +--- stream_request eval +mmm +--- stream_response +hello world + + + +=== TEST 9: set stream route (id: 1) which uses upstream_id and remote address with IP CIDR +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_PUT, + [[{ + "remote_addr": "127.0.0.1/26", + "service_id": "1" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 10: hit route +--- stream_request eval +mmm +--- stream_response +hello world + + + +=== TEST 11: reject bad CIDR +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/stream_routes/1', + ngx.HTTP_PUT, + [[{ + "remote_addr": ":/8", + "service_id": "1" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.print(body) + } + } +--- request +GET /t +--- error_code: 400 +--- response_body +{"error_msg":"invalid remote_addr: :/8"} + + + +=== TEST 12: skip upstream http host check in stream subsystem +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/upstreams/1', + ngx.HTTP_PUT, + [[{ + "nodes": { + "127.0.0.1:1995": 1, + "127.0.0.2:1995": 1 + }, + "pass_host": "node", + "type": "roundrobin" + }]] + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 13: hit route +--- stream_request eval +mmm +--- stream_response +hello world From 3a87611f93fb2de9fb36b3f87d3bc7a530ca5f52 Mon Sep 17 00:00:00 2001 From: vuittont60 <81072379+vuittont60@users.noreply.github.com> Date: Mon, 16 Oct 2023 15:17:07 +0800 Subject: [PATCH 02/23] fix: typos in comments (#10330) --- apisix/cli/ngx_tpl.lua | 2 +- apisix/init.lua | 2 +- t/APISIX.pm | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/apisix/cli/ngx_tpl.lua b/apisix/cli/ngx_tpl.lua index 76c1832f3b32..cd652a9b1415 100644 --- a/apisix/cli/ngx_tpl.lua +++ b/apisix/cli/ngx_tpl.lua @@ -469,7 +469,7 @@ http { } apisix.http_init(args) - -- set apisix_lua_home into constans module + -- set apisix_lua_home into constants module -- it may be used by plugins to determine the work path of apisix local constants = require("apisix.constants") constants.apisix_lua_home = "{*apisix_lua_home*}" diff --git a/apisix/init.lua b/apisix/init.lua index da9fe915a665..4cfd179d25a6 100644 --- a/apisix/init.lua +++ b/apisix/init.lua @@ -170,7 +170,7 @@ end function _M.http_exit_worker() - -- TODO: we can support stream plugin later - currently there is not `destory` method + -- TODO: we can support stream plugin later - currently there is not `destroy` method -- in stream plugins plugin.exit_worker() require("apisix.plugins.ext-plugin.init").exit_worker() diff --git a/t/APISIX.pm b/t/APISIX.pm index b8e288abc881..a8c49348f563 100644 --- a/t/APISIX.pm +++ b/t/APISIX.pm @@ -524,7 +524,7 @@ _EOC_ } apisix.http_init(args) - -- set apisix_lua_home into constans module + -- set apisix_lua_home into constants module -- it may be used by plugins to determine the work path of apisix local constants = require("apisix.constants") constants.apisix_lua_home = "$apisix_home" From 3127e2dd9c38540c99328e8d8596a06ccc9efbb8 Mon Sep 17 00:00:00 2001 From: Traky Deng Date: Mon, 16 Oct 2023 23:36:11 -0700 Subject: [PATCH 03/23] docs: update underscore to hyphen in HTTP headers in `response-rewrite` plugin (#10347) --- docs/en/latest/plugins/response-rewrite.md | 8 ++++---- docs/zh/latest/plugins/response-rewrite.md | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/en/latest/plugins/response-rewrite.md b/docs/en/latest/plugins/response-rewrite.md index 392d367254f2..9f1312e0bed1 100644 --- a/docs/en/latest/plugins/response-rewrite.md +++ b/docs/en/latest/plugins/response-rewrite.md @@ -83,7 +83,7 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f1 "set": { "X-Server-id": 3, "X-Server-status": "on", - "X-Server-balancer_addr": "$balancer_ip:$balancer_port" + "X-Server-balancer-addr": "$balancer_ip:$balancer_port" } }, "vars":[ @@ -107,7 +107,7 @@ Besides `set` operation, you can also `add` or `remove` response header like: ```json "headers": { "add": [ - "X-Server-balancer_addr: $balancer_ip:$balancer_port" + "X-Server-balancer-addr: $balancer_ip:$balancer_port" ], "remove": [ "X-TO-BE-REMOVED" @@ -137,7 +137,7 @@ Transfer-Encoding: chunked Connection: keep-alive X-Server-id: 3 X-Server-status: on -X-Server-balancer_addr: 127.0.0.1:80 +X-Server-balancer-addr: 127.0.0.1:80 {"code":"ok","message":"new json body"} ``` @@ -170,7 +170,7 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f13 "set": { "X-Server-id":3, "X-Server-status":"on", - "X-Server-balancer_addr":"$balancer_ip:$balancer_port" + "X-Server-balancer-addr":"$balancer_ip:$balancer_port" } }, "filters":[ diff --git a/docs/zh/latest/plugins/response-rewrite.md b/docs/zh/latest/plugins/response-rewrite.md index a59c34f54806..be409411b346 100644 --- a/docs/zh/latest/plugins/response-rewrite.md +++ b/docs/zh/latest/plugins/response-rewrite.md @@ -84,7 +84,7 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 \ "set": { "X-Server-id": 3, "X-Server-status": "on", - "X-Server-balancer_addr": "$balancer_ip:$balancer_port" + "X-Server-balancer-addr": "$balancer_ip:$balancer_port" } }, "vars":[ @@ -108,7 +108,7 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 \ ```json "headers": { "add": [ - "X-Server-balancer_addr: $balancer_ip:$balancer_port" + "X-Server-balancer-addr: $balancer_ip:$balancer_port" ], "remove": [ "X-TO-BE-REMOVED" @@ -138,7 +138,7 @@ Transfer-Encoding: chunked Connection: keep-alive X-Server-id: 3 X-Server-status: on -X-Server-balancer_addr: 127.0.0.1:80 +X-Server-balancer-addr: 127.0.0.1:80 {"code":"ok","message":"new json body"} ``` @@ -169,7 +169,7 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f13 "set": { "X-Server-id":3, "X-Server-status":"on", - "X-Server-balancer_addr":"$balancer_ip:$balancer_port" + "X-Server-balancer-addr":"$balancer_ip:$balancer_port" } }, "filters":[ From 33ee8d6083d41d3f997d306ea7dcfa663cbd25f3 Mon Sep 17 00:00:00 2001 From: Liu Wei Date: Tue, 17 Oct 2023 14:38:37 +0800 Subject: [PATCH 04/23] chore: remove stream_proxy.only in config-default.yaml (#10337) --- conf/config-default.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/conf/config-default.yaml b/conf/config-default.yaml index 09c459677fcc..3267480f0cdb 100755 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -76,7 +76,6 @@ apisix: # http is the default proxy mode. proxy_mode can be one of `http`, `stream`, or `http&stream` proxy_mode: http # stream_proxy: # TCP/UDP L4 proxy - # only: true # Enable L4 proxy only without L7 proxy. # tcp: # - addr: 9100 # Set the TCP proxy listening ports. # tls: true From d0085f28537bfeccf061baddabc2aae7db9917fb Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Wed, 18 Oct 2023 11:14:48 +0800 Subject: [PATCH 05/23] test: use http2 to test limit-conn plugin (#10332) --- t/plugin/limit-conn3.t | 126 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 t/plugin/limit-conn3.t diff --git a/t/plugin/limit-conn3.t b/t/plugin/limit-conn3.t new file mode 100644 index 000000000000..a2fe36955800 --- /dev/null +++ b/t/plugin/limit-conn3.t @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +BEGIN { + if ($ENV{TEST_NGINX_CHECK_LEAK}) { + $SkipReason = "unavailable for the check leak tests"; + + } else { + $ENV{TEST_NGINX_USE_HUP} = 1; + undef $ENV{TEST_NGINX_USE_STAP}; + } +} + +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_shuffle(); +no_root_location(); + + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + + if (!$block->error_log && !$block->no_error_log) { + $block->set_value("no_error_log", "[error]\n[alert]"); + } +}); + +run_tests; + +__DATA__ + +=== TEST 1: create route with limit-conn plugin +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "limit-conn": { + "conn": 1, + "burst": 0, + "default_conn_delay": 0.1, + "rejected_code": 503, + "key": "$remote_addr", + "key_type": "var_combination" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/limit_conn", + "host": "www.test.com" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 2: create ssl(sni: www.test.com) +--- config +location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + + local ssl_cert = t.read_file("t/certs/apisix.crt") + local ssl_key = t.read_file("t/certs/apisix.key") + local data = {cert = ssl_cert, key = ssl_key, sni = "www.test.com"} + + local code, body = t.test('/apisix/admin/ssls/1', + ngx.HTTP_PUT, + core.json.encode(data), + [[{ + "value": { + "sni": "www.test.com" + }, + "key": "/apisix/ssls/1" + }]] + ) + + ngx.status = code + ngx.say(body) + } +} +--- response_body +passed + + + +=== TEST 3: use HTTP version 2 to request +--- exec +curl --http2 --parallel -k https://www.test.com:1994/limit_conn https://www.test.com:1994/limit_conn --resolve www.test.com:1994:127.0.0.1 +--- response_body_like +503 Service Temporarily Unavailable.*.hello world From acfd07e7217427b8fc4095bbeef10db9f0d6613e Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Wed, 18 Oct 2023 11:21:21 +0800 Subject: [PATCH 06/23] test: use http2 to test limit-req plugin (#10334) --- t/plugin/limit-req3.t | 114 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 t/plugin/limit-req3.t diff --git a/t/plugin/limit-req3.t b/t/plugin/limit-req3.t new file mode 100644 index 000000000000..684eb9d1b405 --- /dev/null +++ b/t/plugin/limit-req3.t @@ -0,0 +1,114 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +log_level('info'); +repeat_each(1); +no_long_string(); +no_shuffle(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + + if (!$block->error_log && !$block->no_error_log) { + $block->set_value("no_error_log", "[error]\n[alert]"); + } +}); + +run_tests; + +__DATA__ + +=== TEST 1: create route with limit-req plugin +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "limit-req": { + "rate": 0.1, + "burst": 0.1, + "rejected_code": 503, + "key": "$remote_addr", + "key_type": "var_combination" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello", + "host": "www.test.com" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 2: create ssl(sni: www.test.com) +--- config +location /t { + content_by_lua_block { + local core = require("apisix.core") + local t = require("lib.test_admin") + local ssl_cert = t.read_file("t/certs/apisix.crt") + local ssl_key = t.read_file("t/certs/apisix.key") + local data = {cert = ssl_cert, key = ssl_key, sni = "www.test.com"} + local code, body = t.test('/apisix/admin/ssls/1', + ngx.HTTP_PUT, + core.json.encode(data), + [[{ + "value": { + "sni": "www.test.com" + }, + "key": "/apisix/ssls/1" + }]] + ) + ngx.status = code + ngx.say(body) + } +} +--- response_body +passed + + + +=== TEST 3: use HTTP version 2 to request +--- exec +curl --http2 --parallel -k https://www.test.com:1994/hello https://www.test.com:1994/hello --resolve www.test.com:1994:127.0.0.1 +--- response_body_like +503 Service Temporarily Unavailable.*.hello world From 796cfffc7e670b6ede23a7a74368063d69127b99 Mon Sep 17 00:00:00 2001 From: Sylvia <39793568+SylviaBABY@users.noreply.github.com> Date: Wed, 18 Oct 2023 22:48:18 -0500 Subject: [PATCH 07/23] docs: Update APISIX usecases in README (#10358) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 9d304c5fed92..46e630743d61 100644 --- a/README.md +++ b/README.md @@ -217,6 +217,7 @@ A wide variety of companies and organizations use APISIX API Gateway for researc - HONOR - Horizon Robotics - iQIYI +- Lenovo - NASA JPL - Nayuki - OPPO @@ -226,6 +227,7 @@ A wide variety of companies and organizations use APISIX API Gateway for researc - Travelsky - vivo - Sina Weibo +- WeCity - WPS - XPENG - Zoom From 2c0449793fe81cdf98b0e7952354f2ce45d2cec3 Mon Sep 17 00:00:00 2001 From: Abhishek Choudhary Date: Fri, 20 Oct 2023 11:25:28 +0545 Subject: [PATCH 08/23] chore: update rules for unresponded issues (#10354) --- .github/workflows/close-unresponded.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/close-unresponded.yml b/.github/workflows/close-unresponded.yml index 52e81228eba2..9508af7ded1c 100644 --- a/.github/workflows/close-unresponded.yml +++ b/.github/workflows/close-unresponded.yml @@ -20,7 +20,7 @@ jobs: - name: Prune Stale uses: actions/stale@v8 with: - days-before-issue-stale: 14 + days-before-issue-stale: 60 days-before-issue-close: 3 stale-issue-message: > Due to lack of the reporter's response this issue has been labeled with "no response". @@ -35,4 +35,5 @@ jobs: # Issues with these labels will never be considered stale. only-labels: 'wait for update' stale-issue-label: 'no response' + exempt-issue-labels: "don't close" ascending: true From 3b16fce060fbac39ee054784213a6bc81df526fc Mon Sep 17 00:00:00 2001 From: theweakgod <1031205858@qq.com> Date: Fri, 20 Oct 2023 15:49:29 +0800 Subject: [PATCH 09/23] fix(consul): worker will not exit while reload or quit (#10342) --- apisix/discovery/consul/init.lua | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apisix/discovery/consul/init.lua b/apisix/discovery/consul/init.lua index 253d0cb8cebd..32e306709e95 100644 --- a/apisix/discovery/consul/init.lua +++ b/apisix/discovery/consul/init.lua @@ -32,6 +32,7 @@ local ngx_timer_every = ngx.timer.every local log = core.log local json_delay_encode = core.json.delay_encode local ngx_worker_id = ngx.worker.id +local exiting = ngx.worker.exiting local thread_spawn = ngx.thread.spawn local thread_wait = ngx.thread.wait local thread_kill = ngx.thread.kill @@ -276,7 +277,7 @@ end local function check_keepalive(consul_server, retry_delay) - if consul_server.keepalive then + if consul_server.keepalive and not exiting() then local ok, err = ngx_timer_at(0, _M.connect, consul_server, retry_delay) if not ok then log.error("create ngx_timer_at got error: ", err) From d57936545899e4919fdcf771cc60444ce350a6af Mon Sep 17 00:00:00 2001 From: Sn0rt Date: Mon, 23 Oct 2023 16:13:13 +0800 Subject: [PATCH 10/23] fix(traffic-split): post_arg match fails because content-type contains charset (#10372) --- apisix/core/ctx.lua | 4 ++- t/core/ctx2.t | 25 +++++++++---- t/plugin/traffic-split5.t | 75 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 97 insertions(+), 7 deletions(-) diff --git a/apisix/core/ctx.lua b/apisix/core/ctx.lua index 5128061d58fe..6d77b43811ca 100644 --- a/apisix/core/ctx.lua +++ b/apisix/core/ctx.lua @@ -260,7 +260,9 @@ do elseif core_str.has_prefix(key, "post_arg_") then -- only match default post form - if request.header(nil, "Content-Type") == "application/x-www-form-urlencoded" then + local content_type = request.header(nil, "Content-Type") + if content_type ~= nil and core_str.has_prefix(content_type, + "application/x-www-form-urlencoded") then local arg_key = sub_str(key, 10) local args = request.get_post_args()[arg_key] if args then diff --git a/t/core/ctx2.t b/t/core/ctx2.t index a99844ffd0d2..7782ac9125cd 100644 --- a/t/core/ctx2.t +++ b/t/core/ctx2.t @@ -292,7 +292,20 @@ find ctx.req_post_args.test: true -=== TEST 13: missed (post_arg_test is missing) +=== TEST 13: hit with charset +--- request +POST /hello +test=test +--- more_headers +Content-Type: application/x-www-form-urlencoded;charset=utf-8 +--- response_body +hello world +--- error_log +find ctx.req_post_args.test: true + + + +=== TEST 14: missed (post_arg_test is missing) --- request POST /hello --- more_headers @@ -303,7 +316,7 @@ Content-Type: application/x-www-form-urlencoded -=== TEST 14: missed (post_arg_test is mismatch) +=== TEST 15: missed (post_arg_test is mismatch) --- request POST /hello test=tesy @@ -315,7 +328,7 @@ Content-Type: application/x-www-form-urlencoded -=== TEST 15: register custom variable +=== TEST 16: register custom variable --- config location /t { content_by_lua_block { @@ -351,7 +364,7 @@ Content-Type: application/x-www-form-urlencoded -=== TEST 16: hit +=== TEST 17: hit --- config location /t { content_by_lua_block { @@ -375,7 +388,7 @@ find ctx.var.a6_labels_zone: Singapore -=== TEST 17: register custom variable with no cacheable +=== TEST 18: register custom variable with no cacheable --- config location /t { content_by_lua_block { @@ -412,7 +425,7 @@ find ctx.var.a6_labels_zone: Singapore -=== TEST 18: hit +=== TEST 19: hit --- config location /t { content_by_lua_block { diff --git a/t/plugin/traffic-split5.t b/t/plugin/traffic-split5.t index b627d38418ba..96e284d5bf06 100644 --- a/t/plugin/traffic-split5.t +++ b/t/plugin/traffic-split5.t @@ -460,3 +460,78 @@ GET /server_port?name=jack --- error_log eval qr/event timer add: \d+: 12345000:\d+/ --- error_code: 502 + + + +=== TEST 9: set upstream for post_arg_id test case +--- config + location /t { + content_by_lua_block { + local json = require("toolkit.json") + local t = require("lib.test_admin").test + local data = { + uri = "/hello", + plugins = { + ["traffic-split"] = { + rules = { + { + match = { { + vars = { { "post_arg_id", "==", "1" } } + } }, + weighted_upstreams = { + { + upstream = { + name = "upstream_A", + type = "roundrobin", + nodes = { + ["127.0.0.1:1970"] = 1 + } + }, + weight = 1 + } + } + } + } + } + }, + upstream = { + type = "roundrobin", + nodes = { + ["127.0.0.1:1974"] = 1 + } + } + } + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + json.encode(data) + ) + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 10: post_arg_id = 1 without content-type charset +--- request +POST /hello +id=1 +--- more_headers +Content-Type: application/x-www-form-urlencoded +--- response_body +1970 + + + +=== TEST 11: post_arg_id = 1 with content-type charset +--- request +POST /hello +id=1 +--- more_headers +Content-Type: application/x-www-form-urlencoded;charset=UTF-8 +--- response_body +1970 From d3bf3611775f16eb2bb880cce76ed4a6b2da7dfb Mon Sep 17 00:00:00 2001 From: Ashish Tiwari Date: Mon, 23 Oct 2023 13:45:34 +0530 Subject: [PATCH 11/23] feat: integrate authz-keycloak with secrets resource (#10353) --- apisix/plugins/authz-keycloak.lua | 3 + docs/en/latest/plugins/authz-keycloak.md | 2 +- t/plugin/authz-keycloak4.t | 245 +++++++++++++++++++++++ 3 files changed, 249 insertions(+), 1 deletion(-) create mode 100644 t/plugin/authz-keycloak4.t diff --git a/apisix/plugins/authz-keycloak.lua b/apisix/plugins/authz-keycloak.lua index f2c02727c0ce..99fe96cb06e7 100644 --- a/apisix/plugins/authz-keycloak.lua +++ b/apisix/plugins/authz-keycloak.lua @@ -20,6 +20,7 @@ local sub_str = string.sub local type = type local ngx = ngx local plugin_name = "authz-keycloak" +local fetch_secrets = require("apisix.secret").fetch_secrets local log = core.log local pairs = pairs @@ -757,6 +758,8 @@ local function generate_token_using_password_grant(conf,ctx) end function _M.access(conf, ctx) + -- resolve secrets + conf = fetch_secrets(conf) local headers = core.request.headers(ctx) local need_grant_token = conf.password_grant_token_generation_incoming_uri and ctx.var.request_uri == conf.password_grant_token_generation_incoming_uri and diff --git a/docs/en/latest/plugins/authz-keycloak.md b/docs/en/latest/plugins/authz-keycloak.md index d656e7095ea3..21ac21b80edd 100644 --- a/docs/en/latest/plugins/authz-keycloak.md +++ b/docs/en/latest/plugins/authz-keycloak.md @@ -48,7 +48,7 @@ Refer to [Authorization Services Guide](https://www.keycloak.org/docs/latest/aut | token_endpoint | string | False | | https://host.domain/auth/realms/foo/protocol/openid-connect/token | An OAuth2-compliant token endpoint that supports the `urn:ietf:params:oauth:grant-type:uma-ticket` grant type. If provided, overrides the value from discovery. | | resource_registration_endpoint | string | False | | https://host.domain/auth/realms/foo/authz/protection/resource_set | A UMA-compliant resource registration endpoint. If provided, overrides the value from discovery. | | client_id | string | True | | | The identifier of the resource server to which the client is seeking access. | -| client_secret | string | False | | | The client secret, if required. | +| client_secret | string | False | | | The client secret, if required. You can use APISIX secret to store and reference this value. APISIX currently supports storing secrets in two ways. [Environment Variables and HashiCorp Vault](../terminology/secret.md) | | grant_type | string | False | "urn:ietf:params:oauth:grant-type:uma-ticket" | ["urn:ietf:params:oauth:grant-type:uma-ticket"] | | | policy_enforcement_mode | string | False | "ENFORCING" | ["ENFORCING", "PERMISSIVE"] | | | permissions | array[string] | False | | | An array of strings, each representing a set of one or more resources and scopes the client is seeking access. | diff --git a/t/plugin/authz-keycloak4.t b/t/plugin/authz-keycloak4.t new file mode 100644 index 000000000000..60aea47f05c4 --- /dev/null +++ b/t/plugin/authz-keycloak4.t @@ -0,0 +1,245 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +BEGIN { + $ENV{VAULT_TOKEN} = "root"; + $ENV{CLIENT_SECRET} = "d1ec69e9-55d2-4109-a3ea-befa071579d5"; +} + +use t::APISIX 'no_plan'; + +log_level('debug'); +repeat_each(1); +no_long_string(); +no_root_location(); +run_tests; + +__DATA__ + +=== TEST 1: store secret into vault +--- exec +VAULT_TOKEN='root' VAULT_ADDR='http://0.0.0.0:8200' vault kv put kv/apisix/foo client_secret=d1ec69e9-55d2-4109-a3ea-befa071579d5 +--- response_body +Success! Data written to: kv/apisix/foo + + + +=== TEST 2: set client_secret as a reference to secret +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + -- put secret vault config + local code, body = t('/apisix/admin/secrets/vault/test1', + ngx.HTTP_PUT, + [[{ + "uri": "http://127.0.0.1:8200", + "prefix" : "kv/apisix", + "token" : "root" + }]] + ) + + if code >= 300 then + ngx.status = code + return ngx.say(body) + end + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "authz-keycloak": { + "token_endpoint": "https://127.0.0.1:8443/realms/University/protocol/openid-connect/token", + "permissions": ["course_resource"], + "client_id": "course_management", + "client_secret": "$secret://vault/test1/foo/client_secret", + "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + "timeout": 3000, + "ssl_verify": false, + "password_grant_token_generation_incoming_uri": "/api/token" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/api/token" + }]] + ) + + if code >= 300 then + ngx.status = code + return ngx.say(body) + end + + local json_decode = require("toolkit.json").decode + local http = require "resty.http" + local httpc = http.new() + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/api/token" + local headers = { + ["Content-Type"] = "application/x-www-form-urlencoded", + } + + -- no username + local res, err = httpc:request_uri(uri, { + method = "POST", + headers = headers, + body = ngx.encode_args({ + username = "teacher@gmail.com", + password = "123456", + }), + }) + if res.status == 200 then + ngx.print("success\n") + end + } + } +--- request +GET /t +--- response_body +success + + + +=== TEST 3: set client_secret as a reference to env variable +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "authz-keycloak": { + "token_endpoint": "https://127.0.0.1:8443/realms/University/protocol/openid-connect/token", + "permissions": ["course_resource"], + "client_id": "course_management", + "client_secret": "$env://CLIENT_SECRET", + "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + "timeout": 3000, + "ssl_verify": false, + "password_grant_token_generation_incoming_uri": "/api/token" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/api/token" + }]] + ) + + if code >= 300 then + ngx.status = code + return + end + + local json_decode = require("toolkit.json").decode + local http = require "resty.http" + local httpc = http.new() + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/api/token" + local headers = { + ["Content-Type"] = "application/x-www-form-urlencoded", + } + + -- no username + local res, err = httpc:request_uri(uri, { + method = "POST", + headers = headers, + body = ngx.encode_args({ + username = "teacher@gmail.com", + password = "123456", + }), + }) + if res.status == 200 then + ngx.print("success\n") + end + } + } +--- request +GET /t +--- response_body +success + + + +=== TEST 4: set invalid client_secret as a reference to env variable +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "authz-keycloak": { + "token_endpoint": "https://127.0.0.1:8443/realms/University/protocol/openid-connect/token", + "permissions": ["course_resource"], + "client_id": "course_management", + "client_secret": "$env://INVALID_CLIENT_SECRET", + "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + "timeout": 3000, + "ssl_verify": false, + "password_grant_token_generation_incoming_uri": "/api/token" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/api/token" + }]] + ) + + if code >= 300 then + ngx.status = code + return + end + + local json_decode = require("toolkit.json").decode + local http = require "resty.http" + local httpc = http.new() + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/api/token" + local headers = { + ["Content-Type"] = "application/x-www-form-urlencoded", + } + + -- no username + local res, err = httpc:request_uri(uri, { + method = "POST", + headers = headers, + body = ngx.encode_args({ + username = "teacher@gmail.com", + password = "123456", + }), + }) + if res.status == 200 then + ngx.print("success\n") + end + } + } +--- request +GET /t +--- grep_error_log eval +qr/Invalid client secret/ +--- grep_error_log_out +Invalid client secret +Invalid client secret From 88406dceab26ed7f25b663088b0b5e760bd45a38 Mon Sep 17 00:00:00 2001 From: Trevor Smith <105453067+TrevorSmith-msr@users.noreply.github.com> Date: Mon, 23 Oct 2023 03:19:35 -0500 Subject: [PATCH 12/23] feat: Add authorization params to openid-connect plugin (#10058) --- apisix/plugins/openid-connect.lua | 4 + docs/en/latest/plugins/openid-connect.md | 1 + docs/zh/latest/plugins/openid-connect.md | 1 + t/plugin/openid-connect4.t | 111 +++++++++++++++++++++++ 4 files changed, 117 insertions(+) create mode 100644 t/plugin/openid-connect4.t diff --git a/apisix/plugins/openid-connect.lua b/apisix/plugins/openid-connect.lua index 927e4ddbd8aa..0bd39f20d2cb 100644 --- a/apisix/plugins/openid-connect.lua +++ b/apisix/plugins/openid-connect.lua @@ -156,6 +156,10 @@ local schema = { description = "Comma separated list of hosts that should not be proxied.", } }, + }, + authorization_params = { + description = "Extra authorization params to the authorize endpoint", + type = "object" } }, encrypt_fields = {"client_secret"}, diff --git a/docs/en/latest/plugins/openid-connect.md b/docs/en/latest/plugins/openid-connect.md index 493370240362..0130d192113d 100644 --- a/docs/en/latest/plugins/openid-connect.md +++ b/docs/en/latest/plugins/openid-connect.md @@ -67,6 +67,7 @@ description: OpenID Connect allows the client to obtain user information from th | proxy_opts.http_proxy_authorization | string | False | | Basic [base64 username:password] | Default `Proxy-Authorization` header value to be used with `http_proxy`. | | proxy_opts.https_proxy_authorization | string | False | | Basic [base64 username:password] | As `http_proxy_authorization` but for use with `https_proxy` (since with HTTPS the authorisation is done when connecting, this one cannot be overridden by passing the `Proxy-Authorization` request header). | | proxy_opts.no_proxy | string | False | | | Comma separated list of hosts that should not be proxied. | +| authorization_params | object | False | | | Additional parameters to send in the in the request to the authorization endpoint. | NOTE: `encrypt_fields = {"client_secret"}` is also defined in the schema, which means that the field will be stored encrypted in etcd. See [encrypted storage fields](../plugin-develop.md#encrypted-storage-fields). diff --git a/docs/zh/latest/plugins/openid-connect.md b/docs/zh/latest/plugins/openid-connect.md index aececc4986e0..9355f951ebec 100644 --- a/docs/zh/latest/plugins/openid-connect.md +++ b/docs/zh/latest/plugins/openid-connect.md @@ -67,6 +67,7 @@ description: OpenID Connect(OIDC)是基于 OAuth 2.0 的身份认证协议 | proxy_opts.http_proxy_authorization | string | 否 | | Basic [base64 username:password] | `http_proxy` 默认的 `Proxy-Authorization` 请求头参数值。 | | proxy_opts.https_proxy_authorization | string | 否 | | Basic [base64 username:password] | 与`http_proxy_authorization`相同,但与`https_proxy`一起使用(因为使用 HTTPS 时,授权是在连接时完成的,因此不能通过传递 Proxy-Authorization 请求头来覆盖此授权)。 | | proxy_opts.no_proxy | string | 否 | | | 不应被代理的主机的逗号分隔列表。 | +| authorization_params | object | false | | | 在请求中发送到授权端点的附加参数 | 注意:schema 中还定义了 `encrypt_fields = {"client_secret"}`,这意味着该字段将会被加密存储在 etcd 中。具体参考 [加密存储字段](../plugin-develop.md#加密存储字段)。 diff --git a/t/plugin/openid-connect4.t b/t/plugin/openid-connect4.t new file mode 100644 index 000000000000..35f33acbe25e --- /dev/null +++ b/t/plugin/openid-connect4.t @@ -0,0 +1,111 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +log_level('debug'); +repeat_each(1); +no_long_string(); +no_root_location(); +no_shuffle(); + +add_block_preprocessor(sub { + my ($block) = @_; + + if ((!defined $block->error_log) && (!defined $block->no_error_log)) { + $block->set_value("no_error_log", "[error]"); + } + + if (!defined $block->request) { + $block->set_value("request", "GET /t"); + } +}); + +run_tests(); + +__DATA__ + +=== TEST 1: Set up new route access the auth server with header test +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "openid-connect": { + "client_id": "kbyuFDidLLm280LIwVFiazOqjO3ty8KH", + "client_secret": "60Op4HFM0I8ajz0WdiStAbziZ-VFQttXuxixHHs2R7r7-CW8GR79l-mmLqMhc-Sa", + "discovery": "https://samples.auth0.com/.well-known/openid-configuration", + "redirect_uri": "https://iresty.com", + "authorization_params":{ + "test":"abc" + }, + "ssl_verify": false, + "timeout": 10, + "scope": "apisix", + "proxy_opts": { + "http_proxy": "http://127.0.0.1:8080", + "http_proxy_authorization": "Basic dXNlcm5hbWU6cGFzc3dvcmQK" + }, + "use_pkce": false + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + + } + } +--- response_body +passed + + + +=== TEST 2: Check the uri of the authorization endpoint for passed headers +--- config + location /t { + content_by_lua_block { + local http = require "resty.http" + local httpc = http.new() + local uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/hello" + local res, err = httpc:request_uri(uri, {method = "GET"}) + ngx.status = res.status + local location = res.headers['Location'] + if location and string.find(location, 'https://samples.auth0.com/authorize') ~= -1 and + string.find(location, 'test=abc') ~= -1 then + ngx.say(true) + end + } + } +--- timeout: 10s +--- response_body +true +--- error_code: 302 +--- error_log +use http proxy From 9bf0167ed0812f33aa3ab5ace69b9b7a97b4fbbc Mon Sep 17 00:00:00 2001 From: Abhishek Choudhary Date: Wed, 25 Oct 2023 12:20:59 +0545 Subject: [PATCH 13/23] docs: fix usage of incorrect default admin api port (#10391) --- docs/en/latest/plugins/degraphql.md | 4 ++-- docs/en/latest/plugins/grpc-transcode.md | 8 ++++---- docs/en/latest/tutorials/add-multiple-api-versions.md | 10 +++++----- docs/zh/latest/plugins/grpc-transcode.md | 8 ++++---- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/en/latest/plugins/degraphql.md b/docs/en/latest/plugins/degraphql.md index b0eaaf83bf05..7407a435c531 100644 --- a/docs/en/latest/plugins/degraphql.md +++ b/docs/en/latest/plugins/degraphql.md @@ -97,7 +97,7 @@ Now we can use RESTful API to query the same data that is proxy by APISIX. First, we need to create a route in APISIX, and enable the degreaph plugin on the route, we need to define the GraphQL query in the plugin's config. ```bash -curl --location --request PUT 'http://localhost:9080/apisix/admin/routes/1' \ +curl --location --request PUT 'http://localhost:9180/apisix/admin/routes/1' \ --header 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -210,7 +210,7 @@ we can execute it on `http://localhost:8080/playground`, and get the data as bel We convert the GraphQL query to JSON string like `"query($name: String!, $githubAccount: String!) {\n persons(filter: { name: $name, githubAccount: $githubAccount }) {\n id\n name\n blog\n githubAccount\n talks {\n id\n title\n }\n }\n}"`, so we create a route like this: ```bash -curl --location --request PUT 'http://localhost:9080/apisix/admin/routes/1' \ +curl --location --request PUT 'http://localhost:9180/apisix/admin/routes/1' \ --header 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' \ --header 'Content-Type: application/json' \ --data-raw '{ diff --git a/docs/en/latest/plugins/grpc-transcode.md b/docs/en/latest/plugins/grpc-transcode.md index 56680946dff5..9d0fdb46f77c 100644 --- a/docs/en/latest/plugins/grpc-transcode.md +++ b/docs/en/latest/plugins/grpc-transcode.md @@ -238,7 +238,7 @@ If the gRPC service returns an error, there may be a `grpc-status-details-bin` f Upload the proto file: ```shell -curl http://127.0.0.1:9080/apisix/admin/protos/1 \ +curl http://127.0.0.1:9180/apisix/admin/protos/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "content" : "syntax = \"proto3\"; @@ -260,7 +260,7 @@ curl http://127.0.0.1:9080/apisix/admin/protos/1 \ Enable the `grpc-transcode` plugin,and set the option `show_status_in_body` to `true`: ```shell -curl http://127.0.0.1:9080/apisix/admin/routes/1 \ +curl http://127.0.0.1:9180/apisix/admin/routes/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "methods": ["GET"], @@ -308,7 +308,7 @@ Server: APISIX web server Note that there is an undecoded field in the return body. If you need to decode the field, you need to add the `message type` of the field in the uploaded proto file. ```shell -curl http://127.0.0.1:9080/apisix/admin/protos/1 \ +curl http://127.0.0.1:9180/apisix/admin/protos/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "content" : "syntax = \"proto3\"; @@ -335,7 +335,7 @@ curl http://127.0.0.1:9080/apisix/admin/protos/1 \ Also configure the option `status_detail_type` to `helloworld.ErrorDetail`. ```shell -curl http://127.0.0.1:9080/apisix/admin/routes/1 \ +curl http://127.0.0.1:9180/apisix/admin/routes/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "methods": ["GET"], diff --git a/docs/en/latest/tutorials/add-multiple-api-versions.md b/docs/en/latest/tutorials/add-multiple-api-versions.md index e48c0c581433..f125a542f09c 100644 --- a/docs/en/latest/tutorials/add-multiple-api-versions.md +++ b/docs/en/latest/tutorials/add-multiple-api-versions.md @@ -105,7 +105,7 @@ docker compose up -d You first need to [Route](https://apisix.apache.org/docs/apisix/terminology/route/) your HTTP requests from the gateway to an [Upstream](https://apisix.apache.org/docs/apisix/terminology/upstream/) (your API). With APISIX, you can create a route by sending an HTTP request to the gateway. ```shell -curl http://apisix:9080/apisix/admin/routes/1 -H 'X-API-KEY: xyz' -X PUT -d ' +curl http://apisix:9180/apisix/admin/routes/1 -H 'X-API-KEY: xyz' -X PUT -d ' { "name": "Direct Route to Old API", "methods": ["GET"], @@ -142,7 +142,7 @@ In the previous step, we created a route that wrapped an upstream inside its con Let's create the shared upstream by running below curl cmd: ```shell -curl http://apisix:9080/apisix/admin/upstreams/1 -H 'X-API-KEY: xyz' -X PUT -d ' +curl http://apisix:9180/apisix/admin/upstreams/1 -H 'X-API-KEY: xyz' -X PUT -d ' { "name": "Old API", "type": "roundrobin", @@ -161,7 +161,7 @@ In the scope of this tutorial, we will use _URI path-based versioning_ because i Before introducing the new version, we also need to rewrite the query that comes to the API gateway before forwarding it to the upstream. Because both the old and new versions should point to the same upstream and the upstream exposes endpoint `/hello`, not `/v1/hello`. Let’s create a plugin configuration to rewrite the path: ```shell -curl http://apisix:9080/apisix/admin/plugin_configs/1 -H 'X-API-KEY: xyz' -X PUT -d ' +curl http://apisix:9180/apisix/admin/plugin_configs/1 -H 'X-API-KEY: xyz' -X PUT -d ' { "plugins": { "proxy-rewrite": { @@ -176,7 +176,7 @@ We can now create the second versioned route that references the existing upstr > Note that we can create routes for different API versions. ```shell -curl http://apisix:9080/apisix/admin/routes/2 -H 'X-API-KEY: xyz' -X PUT -d ' +curl http://apisix:9180/apisix/admin/routes/2 -H 'X-API-KEY: xyz' -X PUT -d ' { "name": "Versioned Route to Old API", "methods": ["GET"], @@ -209,7 +209,7 @@ Hello world We have versioned our API, but our API consumers probably still use the legacy non-versioned API. We want them to migrate, but we cannot just delete the legacy route as our users are unaware of it. Fortunately, the `301 HTTP` status code is our friend: we can let users know that the resource has moved from `http://apisix.org/hello` to `http://apisix.org/v1/hello`. It requires configuring the [redirect plugin](https://apisix.apache.org/docs/apisix/plugins/redirect/) on the initial route: ```shell -curl http://apisix:9080/apisix/admin/routes/1 -H 'X-API-KEY: xyz' -X PATCH -d ' +curl http://apisix:9180/apisix/admin/routes/1 -H 'X-API-KEY: xyz' -X PATCH -d ' { "plugins": { "redirect": { diff --git a/docs/zh/latest/plugins/grpc-transcode.md b/docs/zh/latest/plugins/grpc-transcode.md index 4c03f4cf9735..44445ab859e1 100644 --- a/docs/zh/latest/plugins/grpc-transcode.md +++ b/docs/zh/latest/plugins/grpc-transcode.md @@ -239,7 +239,7 @@ Trailer: grpc-message 上传 proto 文件: ```shell -curl http://127.0.0.1:9080/apisix/admin/protos/1 \ +curl http://127.0.0.1:9180/apisix/admin/protos/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "content" : "syntax = \"proto3\"; @@ -261,7 +261,7 @@ curl http://127.0.0.1:9080/apisix/admin/protos/1 \ 启用 `grpc-transcode` 插件,并设置选项 `show_status_in_body` 为 `true`: ```shell -curl http://127.0.0.1:9080/apisix/admin/routes/1 \ +curl http://127.0.0.1:9180/apisix/admin/routes/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "methods": ["GET"], @@ -309,7 +309,7 @@ Server: APISIX web server 注意返回体中还存在未解码的字段,如果需要解码该字段,需要在上传的 proto 文件中加上该字段对应的 `message type`。 ```shell -curl http://127.0.0.1:9080/apisix/admin/protos/1 \ +curl http://127.0.0.1:9180/apisix/admin/protos/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "content" : "syntax = \"proto3\"; @@ -336,7 +336,7 @@ curl http://127.0.0.1:9080/apisix/admin/protos/1 \ 同时配置选项 `status_detail_type` 为 `helloworld.ErrorDetail`: ```shell -curl http://127.0.0.1:9080/apisix/admin/routes/1 \ +curl http://127.0.0.1:9180/apisix/admin/routes/1 \ -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "methods": ["GET"], From fb007ca1785dc109d59384441e05044b6f79ad43 Mon Sep 17 00:00:00 2001 From: Fucheng Jiang Date: Wed, 25 Oct 2023 16:34:45 +0800 Subject: [PATCH 14/23] ci: trigger ci when doc-lint.yml changes (#10382) --- .github/workflows/doc-lint.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/doc-lint.yml b/.github/workflows/doc-lint.yml index 25e371162fbf..4e61aeff1036 100644 --- a/.github/workflows/doc-lint.yml +++ b/.github/workflows/doc-lint.yml @@ -5,11 +5,13 @@ on: paths: - "docs/**" - "**/*.md" + - ".github/workflows/doc-lint.yml" pull_request: branches: [master, "release/**"] paths: - "docs/**" - "**/*.md" + - ".github/workflows/doc-lint.yml" permissions: contents: read From fc39dd8c2bd2069514dcfe878c788a22e8c96bf2 Mon Sep 17 00:00:00 2001 From: Ruidong-X Date: Wed, 25 Oct 2023 16:50:14 +0800 Subject: [PATCH 15/23] ci: fix the grpc test error (#10388) --- ci/centos7-ci.sh | 9 ++------- ci/common.sh | 21 +++++++++++++++++++++ ci/linux_openresty_common_runner.sh | 15 +-------------- ci/redhat-ci.sh | 10 +++------- t/admin/protos-force-delete.t | 2 +- t/core/grpc.t | 8 ++++---- t/grpc_server_example/main.go | 4 ++-- t/node/grpc-proxy-mtls.t | 4 ++-- t/node/grpc-proxy-stream.t | 6 +++--- t/node/grpc-proxy-unary.t | 8 ++++---- t/node/grpc-proxy.t | 8 ++++---- t/node/upstream-keepalive-pool.t | 2 +- t/plugin/batch-requests-grpc.t | 2 +- t/plugin/grpc-transcode.t | 14 +++++++------- t/plugin/grpc-transcode2.t | 22 +++++++++++----------- t/plugin/grpc-transcode3.t | 12 ++++++------ t/plugin/proxy-mirror3.t | 2 +- 17 files changed, 74 insertions(+), 75 deletions(-) diff --git a/ci/centos7-ci.sh b/ci/centos7-ci.sh index cf2d0387bbc2..cf506ef54e55 100755 --- a/ci/centos7-ci.sh +++ b/ci/centos7-ci.sh @@ -58,14 +58,9 @@ install_dependencies() { cd t/grpc_server_example CGO_ENABLED=0 go build - ./grpc_server_example \ - -grpc-address :50051 -grpcs-address :50052 -grpcs-mtls-address :50053 -grpc-http-address :50054 \ - -crt ../certs/apisix.crt -key ../certs/apisix.key -ca ../certs/mtls_ca.crt \ - > grpc_server_example.log 2>&1 || (cat grpc_server_example.log && exit 1)& - cd ../../ - # wait for grpc_server_example to fully start - sleep 3 + + start_grpc_server_example # installing grpcurl install_grpcurl diff --git a/ci/common.sh b/ci/common.sh index 3ad1e651cf30..0aa9f9e85bda 100644 --- a/ci/common.sh +++ b/ci/common.sh @@ -151,3 +151,24 @@ linux_get_dependencies () { apt update apt install -y cpanminus build-essential libncurses5-dev libreadline-dev libssl-dev perl libpcre3 libpcre3-dev libldap2-dev } + +function start_grpc_server_example() { + ./t/grpc_server_example/grpc_server_example \ + -grpc-address :10051 -grpcs-address :10052 -grpcs-mtls-address :10053 -grpc-http-address :10054 \ + -crt ./t/certs/apisix.crt -key ./t/certs/apisix.key -ca ./t/certs/mtls_ca.crt \ + > grpc_server_example.log 2>&1 & + + for (( i = 0; i <= 10; i++ )); do + sleep 0.5 + GRPC_PROC=`ps -ef | grep grpc_server_example | grep -v grep || echo "none"` + if [[ $GRPC_PROC == "none" || "$i" -eq 10 ]]; then + echo "failed to start grpc_server_example" + ss -antp | grep 1005 || echo "no proc listen port 1005x" + cat grpc_server_example.log + + exit 1 + fi + + ss -lntp | grep 10051 | grep grpc_server && break + done +} diff --git a/ci/linux_openresty_common_runner.sh b/ci/linux_openresty_common_runner.sh index e4ed00905c86..466fe8b69651 100755 --- a/ci/linux_openresty_common_runner.sh +++ b/ci/linux_openresty_common_runner.sh @@ -72,20 +72,7 @@ script() { set_coredns - ./t/grpc_server_example/grpc_server_example \ - -grpc-address :50051 -grpcs-address :50052 -grpcs-mtls-address :50053 -grpc-http-address :50054 \ - -crt ./t/certs/apisix.crt -key ./t/certs/apisix.key -ca ./t/certs/mtls_ca.crt \ - & - - # ensure grpc server example is already started - for (( i = 0; i <= 100; i++ )); do - if [[ "$i" -eq 100 ]]; then - echo "failed to start grpc_server_example in time" - exit 1 - fi - nc -zv 127.0.0.1 50051 && break - sleep 1 - done + start_grpc_server_example # APISIX_ENABLE_LUACOV=1 PERL5LIB=.:$PERL5LIB prove -Itest-nginx/lib -r t FLUSH_ETCD=1 prove --timer -Itest-nginx/lib -I./ -r $TEST_FILE_SUB_DIR | tee /tmp/test.result diff --git a/ci/redhat-ci.sh b/ci/redhat-ci.sh index 18fed3208290..ff867fb71622 100755 --- a/ci/redhat-ci.sh +++ b/ci/redhat-ci.sh @@ -58,14 +58,10 @@ install_dependencies() { pushd t/grpc_server_example CGO_ENABLED=0 go build - ./grpc_server_example \ - -grpc-address :50051 -grpcs-address :50052 -grpcs-mtls-address :50053 -grpc-http-address :50054 \ - -crt ../certs/apisix.crt -key ../certs/apisix.key -ca ../certs/mtls_ca.crt \ - > grpc_server_example.log 2>&1 || (cat grpc_server_example.log && exit 1)& - popd - # wait for grpc_server_example to fully start - sleep 3 + + yum install -y iproute procps + start_grpc_server_example # installing grpcurl install_grpcurl diff --git a/t/admin/protos-force-delete.t b/t/admin/protos-force-delete.t index 909128924bfe..db0e5d8ae5ea 100644 --- a/t/admin/protos-force-delete.t +++ b/t/admin/protos-force-delete.t @@ -90,7 +90,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] diff --git a/t/core/grpc.t b/t/core/grpc.t index 3be331a54245..bd52d9e13409 100644 --- a/t/core/grpc.t +++ b/t/core/grpc.t @@ -44,7 +44,7 @@ __DATA__ local core = require "apisix.core" local gcli = core.grpc assert(gcli.load("t/grpc_server_example/proto/helloworld.proto")) - local conn = assert(gcli.connect("127.0.0.1:50051")) + local conn = assert(gcli.connect("127.0.0.1:10051")) local res, err = conn:call("helloworld.Greeter", "SayHello", { name = "apisix" }) conn:close() @@ -68,7 +68,7 @@ Hello apisix local core = require "apisix.core" local gcli = core.grpc assert(gcli.load("t/grpc_server_example/proto/helloworld.proto")) - local conn = assert(gcli.connect("127.0.0.1:50051")) + local conn = assert(gcli.connect("127.0.0.1:10051")) local st, err = conn:new_server_stream("helloworld.Greeter", "SayHelloServerStream", { name = "apisix" }) if not st then @@ -100,7 +100,7 @@ Hello apisix local core = require "apisix.core" local gcli = core.grpc assert(gcli.load("t/grpc_server_example/proto/helloworld.proto")) - local conn = assert(gcli.connect("127.0.0.1:50051")) + local conn = assert(gcli.connect("127.0.0.1:10051")) local st, err = conn:new_client_stream("helloworld.Greeter", "SayHelloClientStream", { name = "apisix" }) if not st then @@ -139,7 +139,7 @@ Hello apisix!Hello apisix!Hello apisix!Hello apisix! local core = require "apisix.core" local gcli = core.grpc assert(gcli.load("t/grpc_server_example/proto/helloworld.proto")) - local conn = assert(gcli.connect("127.0.0.1:50051")) + local conn = assert(gcli.connect("127.0.0.1:10051")) local st, err = conn:new_bidirectional_stream("helloworld.Greeter", "SayHelloBidirectionalStream", { name = "apisix" }) if not st then diff --git a/t/grpc_server_example/main.go b/t/grpc_server_example/main.go index 6eb4a9b32ae0..f6253df3a729 100644 --- a/t/grpc_server_example/main.go +++ b/t/grpc_server_example/main.go @@ -53,8 +53,8 @@ import ( ) var ( - grpcAddr = ":50051" - grpcsAddr = ":50052" + grpcAddr = ":10051" + grpcsAddr = ":10052" grpcsMtlsAddr string grpcHTTPAddr string diff --git a/t/node/grpc-proxy-mtls.t b/t/node/grpc-proxy-mtls.t index b238431e252c..b4d31b9d6698 100644 --- a/t/node/grpc-proxy-mtls.t +++ b/t/node/grpc-proxy-mtls.t @@ -59,7 +59,7 @@ routes: client_cert: "-----BEGIN CERTIFICATE-----\nMIIDUzCCAjugAwIBAgIURw+Rc5FSNUQWdJD+quORtr9KaE8wDQYJKoZIhvcNAQEN\nBQAwWDELMAkGA1UEBhMCY24xEjAQBgNVBAgMCUd1YW5nRG9uZzEPMA0GA1UEBwwG\nWmh1SGFpMRYwFAYDVQQDDA1jYS5hcGlzaXguZGV2MQwwCgYDVQQLDANvcHMwHhcN\nMjIxMjAxMTAxOTU3WhcNNDIwODE4MTAxOTU3WjBOMQswCQYDVQQGEwJjbjESMBAG\nA1UECAwJR3VhbmdEb25nMQ8wDQYDVQQHDAZaaHVIYWkxGjAYBgNVBAMMEWNsaWVu\ndC5hcGlzaXguZGV2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzypq\nkrsJ8MaqpS0kr2SboE9aRKOJzd6mY3AZLq3tFpio5cK5oIHkQLfeaaLcd4ycFcZw\nFTpxc+Eth6I0X9on+j4tEibc5IpDnRSAQlzHZzlrOG6WxcOza4VmfcrKqj27oodr\noqXv05r/5yIoRrEN9ZXfA8n2OnjhkP+C3Q68L6dBtPpv+e6HaAuw8MvcsEo+MQwu\ncTZyWqWT2UzKVzToW29dHRW+yZGuYNWRh15X09VSvx+E0s+uYKzN0Cyef2C6VtBJ\nKmJ3NtypAiPqw7Ebfov2Ym/zzU9pyWPi3P1mYPMKQqUT/FpZSXm4iSy0a5qTYhkF\nrFdV1YuYYZL5YGl9aQIDAQABox8wHTAbBgNVHREEFDASghBhZG1pbi5hcGlzaXgu\nZGV2MA0GCSqGSIb3DQEBDQUAA4IBAQBepRpwWdckZ6QdL5EuufYwU7p5SIqkVL/+\nN4/l5YSjPoAZf/M6XkZu/PsLI9/kPZN/PX4oxjZSDH14dU9ON3JjxtSrebizcT8V\naQ13TeW9KSv/i5oT6qBmj+V+RF2YCUhyzXdYokOfsSVtSlA1qMdm+cv0vkjYcImV\nl3L9nVHRPq15dY9sbmWEtFBWvOzqNSuQYax+iYG+XEuL9SPaYlwKRC6eS/dbXa1T\nPPWDQad2X/WmhxPzEHvjSl2bsZF1u0GEdKyhXWMOLCLiYIJo15G7bMz8cTUvkDN3\n6WaWBd6bd2g13Ho/OOceARpkR/ND8PU78Y8cq+zHoOSqH+1aly5H\n-----END CERTIFICATE-----\n", client_key: "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAzypqkrsJ8MaqpS0kr2SboE9aRKOJzd6mY3AZLq3tFpio5cK5\noIHkQLfeaaLcd4ycFcZwFTpxc+Eth6I0X9on+j4tEibc5IpDnRSAQlzHZzlrOG6W\nxcOza4VmfcrKqj27oodroqXv05r/5yIoRrEN9ZXfA8n2OnjhkP+C3Q68L6dBtPpv\n+e6HaAuw8MvcsEo+MQwucTZyWqWT2UzKVzToW29dHRW+yZGuYNWRh15X09VSvx+E\n0s+uYKzN0Cyef2C6VtBJKmJ3NtypAiPqw7Ebfov2Ym/zzU9pyWPi3P1mYPMKQqUT\n/FpZSXm4iSy0a5qTYhkFrFdV1YuYYZL5YGl9aQIDAQABAoIBAD7tUG//lnZnsj/4\nJXONaORaFj5ROrOpFPuRemS+egzqFCuuaXpC2lV6RHnr+XHq6SKII1WfagTb+lt/\nvs760jfmGQSxf1mAUidtqcP+sKc/Pr1mgi/SUTawz8AYEFWD6PHmlqBSLTYml+La\nckd+0pGtk49wEnYSb9n+cv640hra9AYpm9LXUFaypiFEu+xJhtyKKWkmiVGrt/X9\n3aG6MuYeZplW8Xq1L6jcHsieTOB3T+UBfG3O0bELBgTVexOQYI9O4Ejl9/n5/8WP\nAbIw7PaAYc7fBkwOGh7/qYUdHnrm5o9MiRT6dPxrVSf0PZVACmA+JoNjCPv0Typf\n3MMkHoECgYEA9+3LYzdP8j9iv1fP5hn5K6XZAobCD1mnzv3my0KmoSMC26XuS71f\nvyBhjL7zMxGEComvVTF9SaNMfMYTU4CwOJQxLAuT69PEzW6oVEeBoscE5hwhjj6o\n/lr5jMbt807J9HnldSpwllfj7JeiTuqRcCu/cwqKQQ1aB3YBZ7h5pZkCgYEA1ejo\nKrR1hN2FMhp4pj0nZ5+Ry2lyIVbN4kIcoteaPhyQ0AQ0zNoi27EBRnleRwVDYECi\nXAFrgJU+laKsg1iPjvinHibrB9G2p1uv3BEh6lPl9wPFlENTOjPkqjR6eVVZGP8e\nVzxYxIo2x/QLDUeOpxySdG4pdhEHGfvmdGmr2FECgYBeknedzhCR4HnjcTSdmlTA\nwI+p9gt6XYG0ZIewCymSl89UR9RBUeh++HQdgw0z8r+CYYjfH3SiLUdU5R2kIZeW\nzXiAS55OO8Z7cnWFSI17sRz+RcbLAr3l4IAGoi9MO0awGftcGSc/QiFwM1s3bSSz\nPAzYbjHUpKot5Gae0PCeKQKBgQCHfkfRBQ2LY2WDHxFc+0+Ca6jF17zbMUioEIhi\n/X5N6XowyPlI6MM7tRrBsQ7unX7X8Rjmfl/ByschsTDk4avNO+NfTfeBtGymBYWX\nN6Lr8sivdkwoZZzKOSSWSzdos48ELlThnO/9Ti706Lg3aSQK5iY+aakJiC+fXdfT\n1TtsgQKBgQDRYvtK/Cpaq0W6wO3I4R75lHGa7zjEr4HA0Kk/FlwS0YveuTh5xqBj\nwQz2YyuQQfJfJs7kbWOITBT3vuBJ8F+pktL2Xq5p7/ooIXOGS8Ib4/JAS1C/wb+t\nuJHGva12bZ4uizxdL2Q0/n9ziYTiMc/MMh/56o4Je8RMdOMT5lTsRQ==\n-----END RSA PRIVATE KEY-----\n" nodes: - "127.0.0.1:50053": 1 + "127.0.0.1:10053": 1 type: roundrobin #END --- exec @@ -88,7 +88,7 @@ routes: client_cert: "-----BEGIN CERTIFICATE-----\nMIIDUzCCAjugAwIBAgIURw+Rc5FSNUQWdJD+quORtr9KaE8wDQYJKoZIhvcNAQEN\nBQAwWDELMAkGA1UEBhMCY24xEjAQBgNVBAgMCUd1YW5nRG9uZzEPMA0GA1UEBwwG\nWmh1SGFpMRYwFAYDVQQDDA1jYS5hcGlzaXguZGV2MQwwCgYDVQQLDANvcHMwHhcN\nMjIxMjAxMTAxOTU3WhcNNDIwODE4MTAxOTU3WjBOMQswCQYDVQQGEwJjbjESMBAG\nA1UECAwJR3VhbmdEb25nMQ8wDQYDVQQHDAZaaHVIYWkxGjAYBgNVBAMMEWNsaWVu\ndC5hcGlzaXguZGV2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzypq\nkrsJ8MaqpS0kr2SboE9aRKOJzd6mY3AZLq3tFpio5cK5oIHkQLfeaaLcd4ycFcZw\nFTpxc+Eth6I0X9on+j4tEibc5IpDnRSAQlzHZzlrOG6WxcOza4VmfcrKqj27oodr\noqXv05r/5yIoRrEN9ZXfA8n2OnjhkP+C3Q68L6dBtPpv+e6HaAuw8MvcsEo+MQwu\ncTZyWqWT2UzKVzToW29dHRW+yZGuYNWRh15X09VSvx+E0s+uYKzN0Cyef2C6VtBJ\nKmJ3NtypAiPqw7Ebfov2Ym/zzU9pyWPi3P1mYPMKQqUT/FpZSXm4iSy0a5qTYhkF\nrFdV1YuYYZL5YGl9aQIDAQABox8wHTAbBgNVHREEFDASghBhZG1pbi5hcGlzaXgu\nZGV2MA0GCSqGSIb3DQEBDQUAA4IBAQBepRpwWdckZ6QdL5EuufYwU7p5SIqkVL/+\nN4/l5YSjPoAZf/M6XkZu/PsLI9/kPZN/PX4oxjZSDH14dU9ON3JjxtSrebizcT8V\naQ13TeW9KSv/i5oT6qBmj+V+RF2YCUhyzXdYokOfsSVtSlA1qMdm+cv0vkjYcImV\nl3L9nVHRPq15dY9sbmWEtFBWvOzqNSuQYax+iYG+XEuL9SPaYlwKRC6eS/dbXa1T\nPPWDQad2X/WmhxPzEHvjSl2bsZF1u0GEdKyhXWMOLCLiYIJo15G7bMz8cTUvkDN3\n6WaWBd6bd2g13Ho/OOceARpkR/ND8PU78Y8cq+zHoOSqH+1aly5H\n-----END CERTIFICATE-----\n", client_key: "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAzypqkrsJ8MaqpS0kr2SboE9aRKOJzd6mY3AZLq3tFpio5cK5\noIHkQLfeaaLcd4ycFcZwFTpxc+Eth6I0X9on+j4tEibc5IpDnRSAQlzHZzlrOG6W\nxcOza4VmfcrKqj27oodroqXv05r/5yIoRrEN9ZXfA8n2OnjhkP+C3Q68L6dBtPpv\n+e6HaAuw8MvcsEo+MQwucTZyWqWT2UzKVzToW29dHRW+yZGuYNWRh15X09VSvx+E\n0s+uYKzN0Cyef2C6VtBJKmJ3NtypAiPqw7Ebfov2Ym/zzU9pyWPi3P1mYPMKQqUT\n/FpZSXm4iSy0a5qTYhkFrFdV1YuYYZL5YGl9aQIDAQABAoIBAD7tUG//lnZnsj/4\nJXONaORaFj5ROrOpFPuRemS+egzqFCuuaXpC2lV6RHnr+XHq6SKII1WfagTb+lt/\nvs760jfmGQSxf1mAUidtqcP+sKc/Pr1mgi/SUTawz8AYEFWD6PHmlqBSLTYml+La\nckd+0pGtk49wEnYSb9n+cv640hra9AYpm9LXUFaypiFEu+xJhtyKKWkmiVGrt/X9\n3aG6MuYeZplW8Xq1L6jcHsieTOB3T+UBfG3O0bELBgTVexOQYI9O4Ejl9/n5/8WP\nAbIw7PaAYc7fBkwOGh7/qYUdHnrm5o9MiRT6dPxrVSf0PZVACmA+JoNjCPv0Typf\n3MMkHoECgYEA9+3LYzdP8j9iv1fP5hn5K6XZAobCD1mnzv3my0KmoSMC26XuS71f\nvyBhjL7zMxGEComvVTF9SaNMfMYTU4CwOJQxLAuT69PEzW6oVEeBoscE5hwhjj6o\n/lr5jMbt807J9HnldSpwllfj7JeiTuqRcCu/cwqKQQ1aB3YBZ7h5pZkCgYEA1ejo\nKrR1hN2FMhp4pj0nZ5+Ry2lyIVbN4kIcoteaPhyQ0AQ0zNoi27EBRnleRwVDYECi\nXAFrgJU+laKsg1iPjvinHibrB9G2p1uv3BEh6lPl9wPFlENTOjPkqjR6eVVZGP8e\nVzxYxIo2x/QLDUeOpxySdG4pdhEHGfvmdGmr2FECgYBeknedzhCR4HnjcTSdmlTA\nwI+p9gt6XYG0ZIewCymSl89UR9RBUeh++HQdgw0z8r+CYYjfH3SiLUdU5R2kIZeW\nzXiAS55OO8Z7cnWFSI17sRz+RcbLAr3l4IAGoi9MO0awGftcGSc/QiFwM1s3bSSz\nPAzYbjHUpKot5Gae0PCeKQKBgQCHfkfRBQ2LY2WDHxFc+0+Ca6jF17zbMUioEIhi\n/X5N6XowyPlI6MM7tRrBsQ7unX7X8Rjmfl/ByschsTDk4avNO+NfTfeBtGymBYWX\nN6Lr8sivdkwoZZzKOSSWSzdos48ELlThnO/9Ti706Lg3aSQK5iY+aakJiC+fXdfT\n1TtsgQKBgQDRYvtK/Cpaq0W6wO3I4R75lHGa7zjEr4HA0Kk/FlwS0YveuTh5xqBj\nwQz2YyuQQfJfJs7kbWOITBT3vuBJ8F+pktL2Xq5p7/ooIXOGS8Ib4/JAS1C/wb+t\nuJHGva12bZ4uizxdL2Q0/n9ziYTiMc/MMh/56o4Je8RMdOMT5lTsRQ==\n-----END RSA PRIVATE KEY-----\n" nodes: - "127.0.0.1:50053": 1 + "127.0.0.1:10053": 1 type: roundrobin #END --- exec diff --git a/t/node/grpc-proxy-stream.t b/t/node/grpc-proxy-stream.t index 1f10b9aad592..2e3da185d2d7 100644 --- a/t/node/grpc-proxy-stream.t +++ b/t/node/grpc-proxy-stream.t @@ -47,7 +47,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -85,7 +85,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -111,7 +111,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec diff --git a/t/node/grpc-proxy-unary.t b/t/node/grpc-proxy-unary.t index f1a063c54a15..62870dc70bd5 100644 --- a/t/node/grpc-proxy-unary.t +++ b/t/node/grpc-proxy-unary.t @@ -47,7 +47,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -73,7 +73,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -99,7 +99,7 @@ routes: upstream: scheme: grpcs nodes: - "127.0.0.1:50052": 1 + "127.0.0.1:10052": 1 type: roundrobin #END --- exec @@ -131,7 +131,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec diff --git a/t/node/grpc-proxy.t b/t/node/grpc-proxy.t index 01ba461f09b4..c4f0dd036f2d 100644 --- a/t/node/grpc-proxy.t +++ b/t/node/grpc-proxy.t @@ -207,7 +207,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -239,7 +239,7 @@ routes: scheme: grpc pass_host: node nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec @@ -251,7 +251,7 @@ grpcurl -import-path ./t/grpc_server_example/proto -proto helloworld.proto -plai --- grep_error_log eval qr/grpc header: "(:authority|host): [^"]+"/ --- grep_error_log_out eval -qr/grpc header: "(:authority|host): 127.0.0.1:50051"/ +qr/grpc header: "(:authority|host): 127.0.0.1:10051"/ @@ -272,7 +272,7 @@ routes: pass_host: rewrite upstream_host: hello.world nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec diff --git a/t/node/upstream-keepalive-pool.t b/t/node/upstream-keepalive-pool.t index a8a625ad9f08..4fc4a1ae1ceb 100644 --- a/t/node/upstream-keepalive-pool.t +++ b/t/node/upstream-keepalive-pool.t @@ -751,7 +751,7 @@ $/ scheme = "", type = "roundrobin", nodes = { - ["127.0.0.1:50054"] = 1, + ["127.0.0.1:10054"] = 1, }, keepalive_pool = { size = 4 diff --git a/t/plugin/batch-requests-grpc.t b/t/plugin/batch-requests-grpc.t index 4acd5005434c..28b9d39d4016 100644 --- a/t/plugin/batch-requests-grpc.t +++ b/t/plugin/batch-requests-grpc.t @@ -128,7 +128,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] diff --git a/t/plugin/grpc-transcode.t b/t/plugin/grpc-transcode.t index 53f676a0ca86..e261bf7bd554 100644 --- a/t/plugin/grpc-transcode.t +++ b/t/plugin/grpc-transcode.t @@ -157,7 +157,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -225,7 +225,7 @@ qr/\{"message":"Hello world"\}/ "scheme": "asf", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -361,7 +361,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -418,7 +418,7 @@ qr/\{"result":"#2251799813685261"\}/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -466,7 +466,7 @@ qr/\{"message":"Hello apisix"\}/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -581,7 +581,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -734,7 +734,7 @@ failed to encode request data to protobuf "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] diff --git a/t/plugin/grpc-transcode2.t b/t/plugin/grpc-transcode2.t index da91d3ceb265..66baf9a2bac7 100644 --- a/t/plugin/grpc-transcode2.t +++ b/t/plugin/grpc-transcode2.t @@ -87,7 +87,7 @@ __DATA__ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -171,7 +171,7 @@ Content-Type: application/json "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -262,7 +262,7 @@ failed to encode request data to protobuf "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -311,7 +311,7 @@ Content-Type: application/json "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -338,7 +338,7 @@ Content-Type: application/json "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -444,7 +444,7 @@ passed "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -533,7 +533,7 @@ qr/request log: \{.*body":\"\\u0000\\u0000\\u0000\\u0000\\u0002\\b\\u0003"/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -617,7 +617,7 @@ qr/request log: \{.*body":\"\{\\"result\\":3}/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -644,7 +644,7 @@ qr/request log: \{.*body":\"\{\\"result\\":3}/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -737,7 +737,7 @@ set protobuf option: enum_as_name "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -763,7 +763,7 @@ set protobuf option: enum_as_name "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] diff --git a/t/plugin/grpc-transcode3.t b/t/plugin/grpc-transcode3.t index 0cdbe6e16bdd..bd4164d3b5ae 100644 --- a/t/plugin/grpc-transcode3.t +++ b/t/plugin/grpc-transcode3.t @@ -92,7 +92,7 @@ __DATA__ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -172,7 +172,7 @@ Content-Type: application/json "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -243,7 +243,7 @@ qr/error/ "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -316,7 +316,7 @@ grpc-status-details-bin: CA4SDk91dCBvZiBzZXJ2aWNlGlcKKnR5cGUuZ29vZ2xlYXBpcy5jb20 "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -389,7 +389,7 @@ grpc-status-details-bin: CA4SDk91dCBvZiBzZXJ2aWNlGlcKKnR5cGUuZ29vZ2xlYXBpcy5jb20 "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] @@ -473,7 +473,7 @@ transform response error: failed to call pb.decode to decode details in grpc-sta "scheme": "grpc", "type": "roundrobin", "nodes": { - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 } } }]] diff --git a/t/plugin/proxy-mirror3.t b/t/plugin/proxy-mirror3.t index 65a23dc823d9..967ef0c11680 100644 --- a/t/plugin/proxy-mirror3.t +++ b/t/plugin/proxy-mirror3.t @@ -63,7 +63,7 @@ routes: upstream: scheme: grpc nodes: - "127.0.0.1:50051": 1 + "127.0.0.1:10051": 1 type: roundrobin #END --- exec From 5911211d2292b4c96df0c666fcc74fabeb10a5d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 11:09:06 +0800 Subject: [PATCH 16/23] chore(deps): bump actions/setup-node from 3.8.1 to 4.0.0 (#10381) --- .github/workflows/doc-lint.yml | 2 +- .github/workflows/lint.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/doc-lint.yml b/.github/workflows/doc-lint.yml index 4e61aeff1036..f644f59ce03e 100644 --- a/.github/workflows/doc-lint.yml +++ b/.github/workflows/doc-lint.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: 🚀 Use Node.js - uses: actions/setup-node@v3.8.1 + uses: actions/setup-node@v4.0.0 with: node-version: "12.x" - run: npm install -g markdownlint-cli@0.25.0 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index aa8c2a11f574..56cd00c02c8b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -33,7 +33,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Nodejs env - uses: actions/setup-node@v3.8.1 + uses: actions/setup-node@v4.0.0 with: node-version: '12' From f1d4168e33a5e2de26055c004c35b8284e5517d2 Mon Sep 17 00:00:00 2001 From: xiaoxuanzi Date: Fri, 27 Oct 2023 12:02:25 +0800 Subject: [PATCH 17/23] test(clickhouse-logger): to show that different endpoints will be chosen randomly (#8777) --- t/plugin/clickhouse-logger.t | 41 ++++++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/t/plugin/clickhouse-logger.t b/t/plugin/clickhouse-logger.t index 43ce54cd8a6d..4efcf11e3d70 100644 --- a/t/plugin/clickhouse-logger.t +++ b/t/plugin/clickhouse-logger.t @@ -187,7 +187,9 @@ passed "database": "default", "logtable": "test", "endpoint_addrs": ["http://127.0.0.1:8123", - "http://127.0.0.1:8124"] + "http://127.0.0.1:8124"], + "batch_max_size":1, + "inactive_timeout":1 } }, "upstream": { @@ -229,7 +231,38 @@ echo "select * from default.test" | curl 'http://localhost:8124/' --data-binary -=== TEST 8: use single clickhouse server +=== TEST 8: to show that different endpoints will be chosen randomly +--- config + location /t { + content_by_lua_block { + local code_count = {} + local t = require("lib.test_admin").test + for i = 1, 12 do + local code, body = t('/opentracing', ngx.HTTP_GET) + if code ~= 200 then + ngx.say("code: ", code, " body: ", body) + end + code_count[code] = (code_count[code] or 0) + 1 + end + + local code_arr = {} + for code, count in pairs(code_count) do + table.insert(code_arr, {code = code, count = count}) + end + + ngx.say(require("toolkit.json").encode(code_arr)) + ngx.exit(200) + } + } +--- response_body +[{"code":200,"count":12}] +--- error_log +sending a batch logs to http://127.0.0.1:8123 +sending a batch logs to http://127.0.0.1:8124 + + + +=== TEST 9: use single clickhouse server --- config location /t { content_by_lua_block { @@ -267,7 +300,7 @@ passed -=== TEST 9: hit route +=== TEST 10: hit route --- request GET /opentracing --- error_code: 200 @@ -275,7 +308,7 @@ GET /opentracing -=== TEST 10: get log +=== TEST 11: get log --- exec echo "select * from default.test" | curl 'http://localhost:8123/' --data-binary @- --- response_body_like From 595672c8dfb4de188bfcd67af0892437270a35f3 Mon Sep 17 00:00:00 2001 From: wizhuo <46399833+wizhuo@users.noreply.github.com> Date: Fri, 27 Oct 2023 12:23:03 +0800 Subject: [PATCH 18/23] feat(zipkin): add variable (#10361) --- apisix/cli/ngx_tpl.lua | 8 ++ apisix/cli/ops.lua | 9 ++- apisix/plugins/zipkin.lua | 22 +++++- conf/config-default.yaml | 2 + docs/en/latest/plugins/zipkin.md | 29 +++++++ docs/zh/latest/plugins/zipkin.md | 29 +++++++ t/cli/test_zipkin_set_ngx_var.sh | 48 ++++++++++++ t/plugin/zipkin3.t | 129 +++++++++++++++++++++++++++++++ 8 files changed, 274 insertions(+), 2 deletions(-) create mode 100755 t/cli/test_zipkin_set_ngx_var.sh create mode 100644 t/plugin/zipkin3.t diff --git a/apisix/cli/ngx_tpl.lua b/apisix/cli/ngx_tpl.lua index cd652a9b1415..3e1aadd9b543 100644 --- a/apisix/cli/ngx_tpl.lua +++ b/apisix/cli/ngx_tpl.lua @@ -643,6 +643,14 @@ http { {% end %} # opentelemetry_set_ngx_var ends + # zipkin_set_ngx_var starts + {% if zipkin_set_ngx_var then %} + set $zipkin_context_traceparent ''; + set $zipkin_trace_id ''; + set $zipkin_span_id ''; + {% end %} + # zipkin_set_ngx_var ends + # http server configuration snippet starts {% if http_server_configuration_snippet then %} {* http_server_configuration_snippet *} diff --git a/apisix/cli/ops.lua b/apisix/cli/ops.lua index a5a6975d80bd..0eaebae56c43 100644 --- a/apisix/cli/ops.lua +++ b/apisix/cli/ops.lua @@ -546,6 +546,12 @@ Please modify "admin_key" in conf/config.yaml . if enabled_plugins["opentelemetry"] and yaml_conf.plugin_attr["opentelemetry"] then opentelemetry_set_ngx_var = yaml_conf.plugin_attr["opentelemetry"].set_ngx_var end + + local zipkin_set_ngx_var + if enabled_plugins["zipkin"] and yaml_conf.plugin_attr["zipkin"] then + zipkin_set_ngx_var = yaml_conf.plugin_attr["zipkin"].set_ngx_var + end + -- Using template.render local sys_conf = { lua_path = env.pkg_path_org, @@ -566,7 +572,8 @@ Please modify "admin_key" in conf/config.yaml . control_server_addr = control_server_addr, prometheus_server_addr = prometheus_server_addr, proxy_mirror_timeouts = proxy_mirror_timeouts, - opentelemetry_set_ngx_var = opentelemetry_set_ngx_var + opentelemetry_set_ngx_var = opentelemetry_set_ngx_var, + zipkin_set_ngx_var = zipkin_set_ngx_var } if not yaml_conf.apisix then diff --git a/apisix/plugins/zipkin.lua b/apisix/plugins/zipkin.lua index 0c0c4748daff..efebd5115035 100644 --- a/apisix/plugins/zipkin.lua +++ b/apisix/plugins/zipkin.lua @@ -20,13 +20,17 @@ local zipkin_codec = require("apisix.plugins.zipkin.codec") local new_random_sampler = require("apisix.plugins.zipkin.random_sampler").new local new_reporter = require("apisix.plugins.zipkin.reporter").new local ngx = ngx +local ngx_var = ngx.var local ngx_re = require("ngx.re") local pairs = pairs local tonumber = tonumber +local to_hex = require "resty.string".to_hex local plugin_name = "zipkin" local ZIPKIN_SPAN_VER_1 = 1 local ZIPKIN_SPAN_VER_2 = 2 +local plugin = require("apisix.plugin") +local string_format = string.format local lrucache = core.lrucache.new({ @@ -69,6 +73,8 @@ function _M.check_schema(conf) return core.schema.check(schema, conf) end +local plugin_info = plugin.plugin_attr(plugin_name) or {} + local function create_tracer(conf,ctx) conf.route_id = ctx.route_id @@ -205,9 +211,23 @@ function _M.rewrite(plugin_conf, ctx) ctx.opentracing_sample = tracer.sampler:sample(per_req_sample_ratio or conf.sample_ratio) if not ctx.opentracing_sample then request_span:set_baggage_item("x-b3-sampled","0") + else + request_span:set_baggage_item("x-b3-sampled","1") + end + + if plugin_info.set_ngx_var then + local span_context = request_span:context() + ngx_var.zipkin_context_traceparent = string_format("00-%s-%s-%02x", + to_hex(span_context.trace_id), + to_hex(span_context.span_id), + span_context:get_baggage_item("x-b3-sampled")) + ngx_var.zipkin_trace_id = span_context.trace_id + ngx_var.zipkin_span_id = span_context.span_id + end + + if not ctx.opentracing_sample then return end - request_span:set_baggage_item("x-b3-sampled","1") local request_span = ctx.opentracing.request_span if conf.span_version == ZIPKIN_SPAN_VER_1 then diff --git a/conf/config-default.yaml b/conf/config-default.yaml index 3267480f0cdb..38d67823e5a9 100755 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -615,6 +615,8 @@ plugin_attr: # Plugin attributes hooks_file: "/usr/local/apisix/plugin_inspect_hooks.lua" # Set the path to the Lua file that defines # hooks. Only administrators should have # write access to this file for security. + zipkin: # Plugin: zipkin + set_ngx_var: false # export zipkin variables to nginx variables deployment: # Deployment configurations role: traditional # Set deployment mode: traditional, control_plane, or data_plane. diff --git a/docs/en/latest/plugins/zipkin.md b/docs/en/latest/plugins/zipkin.md index 2a772e608f0d..16d89bec8e30 100644 --- a/docs/en/latest/plugins/zipkin.md +++ b/docs/en/latest/plugins/zipkin.md @@ -235,3 +235,32 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f13 } }' ``` + +## Variables + +The following nginx variables are set by zipkin: + +- `zipkin_context_traceparent` - [W3C trace context](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format), e.g.: `00-0af7651916cd43dd8448eb211c80319c-b9c7c989f97918e1-01` +- `zipkin_trace_id` - Trace Id of the current span +- `zipkin_span_id` - Span Id of the current span + +How to use variables? you have to add it to your configuration file (`conf/config.yaml`): + +```yaml title="./conf/config.yaml" +http: + enable_access_log: true + access_log: "/dev/stdout" + access_log_format: '{"time": "$time_iso8601","zipkin_context_traceparent": "$zipkin_context_traceparent","zipkin_trace_id": "$zipkin_trace_id","zipkin_span_id": "$zipkin_span_id","remote_addr": "$remote_addr","uri": "$uri"}' + access_log_format_escape: json +plugins: + - zipkin +plugin_attr: + zipkin: + set_ngx_var: true +``` + +You can also include a trace_id when printing logs + +```print error log +log.error(ngx.ERR,ngx_var.zipkin_trace_id,"error message") +``` diff --git a/docs/zh/latest/plugins/zipkin.md b/docs/zh/latest/plugins/zipkin.md index 6f6e0d2b8148..c43321f5f472 100644 --- a/docs/zh/latest/plugins/zipkin.md +++ b/docs/zh/latest/plugins/zipkin.md @@ -235,3 +235,32 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f13 } }' ``` + +## 如何使用变量 + +以下`nginx`变量是由`zipkin` 设置的。 + +- `zipkin_context_traceparent` - [W3C trace context](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format), 例如:`00-0af7651916cd43dd8448eb211c80319c-b9c7c989f97918e1-01` +- `zipkin_trace_id` - 当前 span 的 trace_id +- `zipkin_span_id` - 当前 span 的 span_id + +如何使用?你需要在配置文件(`./conf/config.yaml`)设置如下: + +```yaml title="./conf/config.yaml" +http: + enable_access_log: true + access_log: "/dev/stdout" + access_log_format: '{"time": "$time_iso8601","zipkin_context_traceparent": "$zipkin_context_traceparent","zipkin_trace_id": "$zipkin_trace_id","zipkin_span_id": "$zipkin_span_id","remote_addr": "$remote_addr","uri": "$uri"}' + access_log_format_escape: json +plugins: + - zipkin +plugin_attr: + zipkin: + set_ngx_var: true +``` + +你也可以在打印日志的时候带上 `trace_id` + +```print error log +log.error(ngx.ERR,ngx_var.zipkin_trace_id,"error message") +``` diff --git a/t/cli/test_zipkin_set_ngx_var.sh b/t/cli/test_zipkin_set_ngx_var.sh new file mode 100755 index 000000000000..3ddd0215524f --- /dev/null +++ b/t/cli/test_zipkin_set_ngx_var.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./t/cli/common.sh + +echo ' +plugins: + - zipkin +plugin_attr: + zipkin: + set_ngx_var: true +' > conf/config.yaml + +make init + +if ! grep "set \$zipkin_context_traceparent '';" conf/nginx.conf > /dev/null; then + echo "failed: zipkin_context_traceparent not found in nginx.conf" + exit 1 +fi + +if ! grep "set \$zipkin_trace_id '';" conf/nginx.conf > /dev/null; then + echo "failed: zipkin_trace_id not found in nginx.conf" + exit 1 +fi + +if ! grep "set \$zipkin_span_id '';" conf/nginx.conf > /dev/null; then + echo "failed: zipkin_span_id not found in nginx.conf" + exit 1 +fi + + +echo "passed: zipkin_set_ngx_var configuration is validated" diff --git a/t/plugin/zipkin3.t b/t/plugin/zipkin3.t new file mode 100644 index 000000000000..f3aef6b5d8fe --- /dev/null +++ b/t/plugin/zipkin3.t @@ -0,0 +1,129 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); +log_level("info"); + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->extra_yaml_config) { + my $extra_yaml_config = <<_EOC_; +plugins: + - zipkin +plugin_attr: + zipkin: + set_ngx_var: true +_EOC_ + $block->set_value("extra_yaml_config", $extra_yaml_config); + } + + my $upstream_server_config = $block->upstream_server_config // <<_EOC_; + set \$zipkin_context_traceparent ""; + set \$zipkin_trace_id ""; + set \$zipkin_span_id ""; +_EOC_ + + $block->set_value("upstream_server_config", $upstream_server_config); + + my $extra_init_by_lua = <<_EOC_; + local zipkin = require("apisix.plugins.zipkin") + local orig_func = zipkin.access + zipkin.access = function (...) + local traceparent = ngx.var.zipkin_context_traceparent + if traceparent == nil or traceparent == '' then + ngx.log(ngx.ERR,"ngx_var.zipkin_context_traceparent is empty") + else + ngx.log(ngx.ERR,"ngx_var.zipkin_context_traceparent:",ngx.var.zipkin_context_traceparent) + end + + local orig = orig_func(...) + return orig + end +_EOC_ + + $block->set_value("extra_init_by_lua", $extra_init_by_lua); + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + + $block; +}); + +run_tests; + +__DATA__ + +=== TEST 1: add plugin metadata +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "zipkin": { + "endpoint": "http://127.0.0.1:9999/mock_zipkin", + "sample_ratio": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/echo" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed + + + +=== TEST 2: trigger zipkin with open set variables +--- request +GET /echo +--- error_log eval +qr/ngx_var.zipkin_context_traceparent:00-\w{32}-\w{16}-01*/ + + + +=== TEST 3: trigger zipkin with disable set variables +--- yaml_config +plugin_attr: + zipkin: + set_ngx_var: false +--- request +GET /echo +--- error_log +ngx_var.zipkin_context_traceparent is empty From a00e75147aa8c7ed288475e422a4f51fdc6b82fd Mon Sep 17 00:00:00 2001 From: allen Date: Mon, 30 Oct 2023 14:57:18 +0800 Subject: [PATCH 19/23] fix: make install failed on mac (#10403) --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index f3cc375e061e..4031e314300c 100644 --- a/Makefile +++ b/Makefile @@ -69,6 +69,8 @@ endif ifeq ($(ENV_OS_NAME), darwin) ifeq ($(ENV_OS_ARCH), arm64) ENV_HOMEBREW_PREFIX := /opt/homebrew + ENV_INST_BINDIR := $(ENV_INST_PREFIX)/local/bin + ENV_INST_LUADIR := $(shell which lua | xargs realpath | sed 's/bin\/lua//g') endif # OSX archive `._` cache file From 455804771124c15ff909e49ad8dfdd961f0e4a88 Mon Sep 17 00:00:00 2001 From: Sn0rt Date: Wed, 1 Nov 2023 11:23:02 +0800 Subject: [PATCH 20/23] feat: new ci base on apisix-runtime with specified version (#10415) --- .github/workflows/centos7-ci.yml | 10 +++++++++- .github/workflows/redhat-ci.yaml | 10 +++++++++- .requirements | 20 ++++++++++++++++++++ ci/centos7-ci.sh | 13 ++++++++++++- ci/common.sh | 14 +++++++++----- ci/linux-install-openresty.sh | 28 ++++++++++------------------ ci/performance_test.sh | 2 +- ci/redhat-ci.sh | 13 +++++++++++-- 8 files changed, 81 insertions(+), 29 deletions(-) create mode 100644 .requirements diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml index 2be0c39cbb94..d2c447ef37ff 100644 --- a/.github/workflows/centos7-ci.yml +++ b/.github/workflows/centos7-ci.yml @@ -31,7 +31,7 @@ jobs: - t/plugin/[a-k]* - t/plugin/[l-z]* - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc - - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/xds-library steps: - name: Check out code @@ -94,6 +94,14 @@ jobs: cd .. rm -rf $(ls -1 --ignore=apisix-build-tools --ignore=t --ignore=utils --ignore=ci --ignore=Makefile --ignore=rockspec) + - name: Start Dubbo Backend + run: | + sudo apt install -y maven + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + - name: Build xDS library if: steps.test_env.outputs.type == 'last' run: | diff --git a/.github/workflows/redhat-ci.yaml b/.github/workflows/redhat-ci.yaml index 9bd8d39e35aa..cc9d819bebe4 100644 --- a/.github/workflows/redhat-ci.yaml +++ b/.github/workflows/redhat-ci.yaml @@ -27,7 +27,7 @@ jobs: - t/plugin/[a-k]* - t/plugin/[l-z]* - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc - - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/xds-library steps: - name: Check out code @@ -90,6 +90,14 @@ jobs: cd .. rm -rf $(ls -1 --ignore=apisix-build-tools --ignore=t --ignore=utils --ignore=ci --ignore=Makefile --ignore=rockspec) + - name: Start Dubbo Backend + run: | + sudo apt install -y maven + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + - name: Build xDS library if: steps.test_env.outputs.type == 'last' run: | diff --git a/.requirements b/.requirements new file mode 100644 index 000000000000..3155762a7d50 --- /dev/null +++ b/.requirements @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +APISIX_PACKAGE_NAME=apisix + +APISIX_RUNTIME=1.0.1 diff --git a/ci/centos7-ci.sh b/ci/centos7-ci.sh index cf506ef54e55..beb3750a1f56 100755 --- a/ci/centos7-ci.sh +++ b/ci/centos7-ci.sh @@ -19,6 +19,7 @@ . ./ci/common.sh install_dependencies() { + export_version_info export_or_prefix # install build & runtime deps @@ -31,9 +32,19 @@ install_dependencies() { yum install -y libnghttp2-devel install_curl + yum -y install centos-release-scl + yum -y install devtoolset-9 patch wget git make sudo + set +eu + source scl_source enable devtoolset-9 + set -eu + # install openresty to make apisix's rpm test work yum install -y yum-utils && yum-config-manager --add-repo https://openresty.org/package/centos/openresty.repo - yum install -y openresty-1.21.4.2 openresty-debug-1.21.4.2 openresty-openssl111-debug-devel pcre pcre-devel + wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime-debug-centos7.sh" + wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime.sh" + chmod +x build-apisix-runtime-debug-centos7.sh + chmod +x build-apisix-runtime.sh + ./build-apisix-runtime-debug-centos7.sh # install luarocks ./utils/linux-install-luarocks.sh diff --git a/ci/common.sh b/ci/common.sh index 0aa9f9e85bda..087c729562e8 100644 --- a/ci/common.sh +++ b/ci/common.sh @@ -17,9 +17,13 @@ set -ex +export_version_info() { + source ./.requirements +} + export_or_prefix() { - export OPENRESTY_PREFIX="/usr/local/openresty-debug" - export APISIX_MAIN="https://raw.githubusercontent.com/apache/incubator-apisix/master/rockspec/apisix-master-0.rockspec" + export OPENRESTY_PREFIX="/usr/local/openresty" + export APISIX_MAIN="https://raw.githubusercontent.com/apache/apisix/master/rockspec/apisix-master-0.rockspec" export PATH=$OPENRESTY_PREFIX/nginx/sbin:$OPENRESTY_PREFIX/luajit/bin:$OPENRESTY_PREFIX/bin:$PATH export OPENSSL111_BIN=$OPENRESTY_PREFIX/openssl111/bin/openssl } @@ -59,8 +63,8 @@ rerun_flaky_tests() { install_curl () { CURL_VERSION="7.88.0" - wget https://curl.se/download/curl-${CURL_VERSION}.tar.gz - tar -xzvf curl-${CURL_VERSION}.tar.gz + wget -q https://curl.se/download/curl-${CURL_VERSION}.tar.gz + tar -xzf curl-${CURL_VERSION}.tar.gz cd curl-${CURL_VERSION} ./configure --prefix=/usr/local --with-openssl --with-nghttp2 make @@ -90,7 +94,7 @@ install_nodejs () { NODEJS_PREFIX="/usr/local/node" NODEJS_VERSION="16.13.1" wget -q https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-linux-x64.tar.xz - tar -xvf node-v${NODEJS_VERSION}-linux-x64.tar.xz + tar -xf node-v${NODEJS_VERSION}-linux-x64.tar.xz rm -f /usr/local/bin/node rm -f /usr/local/bin/npm mv node-v${NODEJS_VERSION}-linux-x64 ${NODEJS_PREFIX} diff --git a/ci/linux-install-openresty.sh b/ci/linux-install-openresty.sh index 8d4b6f87224a..59807fbc8bff 100755 --- a/ci/linux-install-openresty.sh +++ b/ci/linux-install-openresty.sh @@ -17,6 +17,10 @@ # set -euo pipefail +source ./ci/common.sh + +export_version_info + ARCH=${ARCH:-`(uname -m | tr '[:upper:]' '[:lower:]')`} arch_path="" if [[ $ARCH == "arm64" ]] || [[ $ARCH == "aarch64" ]]; then @@ -24,13 +28,14 @@ if [[ $ARCH == "arm64" ]] || [[ $ARCH == "aarch64" ]]; then fi wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add - +wget -qO - http://repos.apiseven.com/pubkey.gpg | sudo apt-key add - sudo apt-get -y update --fix-missing sudo apt-get -y install software-properties-common sudo add-apt-repository -y "deb https://openresty.org/package/${arch_path}ubuntu $(lsb_release -sc) main" +sudo add-apt-repository -y "deb http://repos.apiseven.com/packages/${arch_path}debian bullseye main" sudo apt-get update - -abt_branch=${abt_branch:="master"} +sudo apt-get install -y openresty-openssl111 openresty-openssl111-dev libldap2-dev openresty-pcre openresty-zlib COMPILE_OPENSSL3=${COMPILE_OPENSSL3-no} USE_OPENSSL3=${USE_OPENSSL3-no} @@ -73,21 +78,8 @@ if [ "$OPENRESTY_VERSION" == "source" ]; then export cc_opt="-DNGX_LUA_ABORT_AT_PANIC -I${zlib_prefix}/include -I${pcre_prefix}/include -I${openssl_prefix}/include" export ld_opt="-L${zlib_prefix}/lib -L${pcre_prefix}/lib -L${openssl_prefix}/lib64 -Wl,-rpath,${zlib_prefix}/lib:${pcre_prefix}/lib:${openssl_prefix}/lib64" fi - - cd .. - wget -q https://raw.githubusercontent.com/api7/apisix-build-tools/$abt_branch/build-apisix-base.sh - chmod +x build-apisix-base.sh - ./build-apisix-base.sh latest - - sudo apt-get install -y openresty-openssl111 openresty-openssl111-debug-dev libldap2-dev openresty-pcre openresty-zlib - - exit 0 -fi - -if [ "$OPENRESTY_VERSION" == "default" ]; then - openresty='openresty-debug' -else - openresty="openresty-debug=$OPENRESTY_VERSION*" fi -sudo apt-get install "$openresty" openresty-openssl111-debug-dev libldap2-dev +wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime.sh" +chmod +x build-apisix-runtime.sh +./build-apisix-runtime.sh latest diff --git a/ci/performance_test.sh b/ci/performance_test.sh index 94966b5d627c..0ec2f1e20fec 100755 --- a/ci/performance_test.sh +++ b/ci/performance_test.sh @@ -78,7 +78,7 @@ run_performance_test() { pip3 install -r t/perf/requirements.txt --user #openresty-debug - export OPENRESTY_PREFIX="/usr/local/openresty-debug" + export OPENRESTY_PREFIX="/usr/local/openresty" export PATH=$OPENRESTY_PREFIX/nginx/sbin:$OPENRESTY_PREFIX/bin:$OPENRESTY_PREFIX/luajit/bin:$PATH mkdir output diff --git a/ci/redhat-ci.sh b/ci/redhat-ci.sh index ff867fb71622..d40ccbfeb495 100755 --- a/ci/redhat-ci.sh +++ b/ci/redhat-ci.sh @@ -18,6 +18,7 @@ . ./ci/common.sh install_dependencies() { + export_version_info export_or_prefix # install build & runtime deps @@ -30,9 +31,17 @@ install_dependencies() { yum install -y libnghttp2-devel install_curl - # install openresty to make apisix's rpm test work + # install apisix-runtime to make apisix's rpm test work yum install -y yum-utils && yum-config-manager --add-repo https://openresty.org/package/centos/openresty.repo - yum install -y openresty-1.21.4.2 openresty-debug-1.21.4.2 openresty-openssl111-debug-devel pcre pcre-devel xz + rpm --import https://repos.apiseven.com/KEYS + yum install -y openresty-openssl111 openresty-openssl111-devel pcre pcre pcre-devel xz + yum -y install https://repos.apiseven.com/packages/centos/apache-apisix-repo-1.0-1.noarch.rpm + + wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime-debug-centos7.sh" + wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime.sh" + chmod +x build-apisix-runtime.sh + chmod +x build-apisix-runtime-debug-centos7.sh + ./build-apisix-runtime-debug-centos7.sh # install luarocks ./utils/linux-install-luarocks.sh From 27629a5593204dfcd180fcf1c6499f6ee9370bcf Mon Sep 17 00:00:00 2001 From: Sn0rt Date: Thu, 2 Nov 2023 09:10:27 +0800 Subject: [PATCH 21/23] feat: switch apisix-base to apisix-runtime install-dependencies.sh (#10427) --- utils/install-dependencies.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/install-dependencies.sh b/utils/install-dependencies.sh index 85cfa7483081..530f19a54025 100755 --- a/utils/install-dependencies.sh +++ b/utils/install-dependencies.sh @@ -52,9 +52,9 @@ function install_dependencies_with_yum() { local apisix_pkg=apache-apisix-repo-1.0-1.noarch rpm -q --quiet ${apisix_pkg} || sudo yum install -y https://repos.apiseven.com/packages/centos/${apisix_pkg}.rpm - # install apisix-base and some compilation tools + # install apisix-runtime and some compilation tools # shellcheck disable=SC2086 - sudo yum install -y apisix-base $common_dep + sudo yum install -y apisix-runtime $common_dep else # add OpenResty source sudo yum-config-manager --add-repo "https://openresty.org/package/${1}/openresty.repo" From 1eaad271ecc78ae2001e9da79b347406bd9aa6be Mon Sep 17 00:00:00 2001 From: Abhishek Choudhary Date: Thu, 2 Nov 2023 09:53:45 +0545 Subject: [PATCH 22/23] chore: add comment for clarity (#10430) --- ci/init-plugin-test-service.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/init-plugin-test-service.sh b/ci/init-plugin-test-service.sh index d670b7fa39d4..aa0ccf19021a 100755 --- a/ci/init-plugin-test-service.sh +++ b/ci/init-plugin-test-service.sh @@ -52,6 +52,7 @@ after() { docker exec apisix_keycloak bash /tmp/kcadm_configure_cas.sh docker exec apisix_keycloak bash /tmp/kcadm_configure_university.sh + # configure clickhouse echo 'CREATE TABLE default.test (`host` String, `client_ip` String, `route_id` String, `service_id` String, `@timestamp` String, PRIMARY KEY(`@timestamp`)) ENGINE = MergeTree()' | curl 'http://localhost:8123/' --data-binary @- echo 'CREATE TABLE default.test (`host` String, `client_ip` String, `route_id` String, `service_id` String, `@timestamp` String, PRIMARY KEY(`@timestamp`)) ENGINE = MergeTree()' | curl 'http://localhost:8124/' --data-binary @- } From 7edf6618dcb853c061a18c3602ea50196f3ff1fe Mon Sep 17 00:00:00 2001 From: alptugay Date: Fri, 3 Nov 2023 05:09:08 +0300 Subject: [PATCH 23/23] fix: compression in log-rotate plugin exceeds the default timeout of shell.run (#8620) --- apisix/plugins/log-rotate.lua | 15 +++++++++------ conf/config-default.yaml | 1 + 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/apisix/plugins/log-rotate.lua b/apisix/plugins/log-rotate.lua index db3360e448b4..4b0f32753836 100644 --- a/apisix/plugins/log-rotate.lua +++ b/apisix/plugins/log-rotate.lua @@ -156,7 +156,7 @@ local function rename_file(log, date_str) end -local function compression_file(new_file) +local function compression_file(new_file, timeout) if not new_file or type(new_file) ~= "string" then core.log.info("compression file: ", new_file, " invalid") return @@ -170,7 +170,7 @@ local function compression_file(new_file) com_filename, new_filename) core.log.info("log file compress command: " .. cmd) - local ok, stdout, stderr, reason, status = shell.run(cmd) + local ok, stdout, stderr, reason, status = shell.run(cmd, nil, timeout, nil) if not ok then core.log.error("compress log file from ", new_filename, " to ", com_filename, " fail, stdout: ", stdout, " stderr: ", stderr, " reason: ", reason, @@ -205,7 +205,7 @@ local function file_size(file) end -local function rotate_file(files, now_time, max_kept) +local function rotate_file(files, now_time, max_kept, timeout) if core.table.isempty(files) then return end @@ -236,7 +236,7 @@ local function rotate_file(files, now_time, max_kept) ngx_sleep(0.5) for _, new_file in ipairs(new_files) do - compression_file(new_file) + compression_file(new_file, timeout) end end @@ -259,16 +259,19 @@ local function rotate() local max_kept = MAX_KEPT local max_size = MAX_SIZE local attr = plugin.plugin_attr(plugin_name) + local timeout = 10000 -- default timeout 10 seconds if attr then interval = attr.interval or interval max_kept = attr.max_kept or max_kept max_size = attr.max_size or max_size + timeout = attr.timeout or timeout enable_compression = attr.enable_compression or enable_compression end core.log.info("rotate interval:", interval) core.log.info("rotate max keep:", max_kept) core.log.info("rotate max size:", max_size) + core.log.info("rotate timeout:", timeout) if not default_logs then -- first init default log filepath and filename @@ -288,7 +291,7 @@ local function rotate() if now_time >= rotate_time then local files = {DEFAULT_ACCESS_LOG_FILENAME, DEFAULT_ERROR_LOG_FILENAME} - rotate_file(files, now_time, max_kept) + rotate_file(files, now_time, max_kept, timeout) -- reset rotate time rotate_time = rotate_time + interval @@ -306,7 +309,7 @@ local function rotate() core.table.insert(files, DEFAULT_ERROR_LOG_FILENAME) end - rotate_file(files, now_time, max_kept) + rotate_file(files, now_time, max_kept, timeout) end end diff --git a/conf/config-default.yaml b/conf/config-default.yaml index 38d67823e5a9..b435533ca02d 100755 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -543,6 +543,7 @@ stream_plugins: # stream plugin list (sorted by priority) # - name: pingpong plugin_attr: # Plugin attributes log-rotate: # Plugin: log-rotate + timeout: 10000 # maximum wait time for a log rotation(unit: millisecond) interval: 3600 # Set the log rotate interval in seconds. max_kept: 168 # Set the maximum number of log files to keep. If exceeded, historic logs are deleted. max_size: -1 # Set the maximum size of log files in bytes before a rotation.