Skip to content

Commit

Permalink
Merge branch 'master' into revolyssup/feat/pre-func
Browse files Browse the repository at this point in the history
  • Loading branch information
Revolyssup authored Dec 30, 2024
2 parents 4174c6b + b89c1a0 commit cfafc54
Show file tree
Hide file tree
Showing 23 changed files with 859 additions and 166 deletions.
1 change: 1 addition & 0 deletions .licenserc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,5 +55,6 @@ header:
- '.luacheckrc'
# Exclude file contains certificate revocation information
- 't/certs/ocsp/index.txt'
- 'utils/lj-releng'

comment: on-failure
10 changes: 10 additions & 0 deletions apisix/control/v1.lua
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,11 @@ local function iter_add_get_routes_info(values, route_id)
if new_route.value.upstream and new_route.value.upstream.parent then
new_route.value.upstream.parent = nil
end
-- remove healthcheck info
new_route.checker = nil
new_route.checker_idx = nil
new_route.checker_upstream = nil
new_route.clean_handlers = nil
core.table.insert(infos, new_route)
-- check the route id
if route_id and route.value.id == route_id then
Expand Down Expand Up @@ -352,6 +357,11 @@ local function iter_add_get_services_info(values, svc_id)
if new_svc.value.upstream and new_svc.value.upstream.parent then
new_svc.value.upstream.parent = nil
end
-- remove healthcheck info
new_svc.checker = nil
new_svc.checker_idx = nil
new_svc.checker_upstream = nil
new_svc.clean_handlers = nil
core.table.insert(infos, new_svc)
-- check the service id
if svc_id and svc.value.id == svc_id then
Expand Down
8 changes: 7 additions & 1 deletion apisix/core/config_etcd.lua
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,11 @@ local function do_run_watch(premature)
end

local rev = tonumber(res.result.header.revision)
if rev == nil then
log.warn("receive a invalid revision header, header: ", inspect(res.result.header))
cancel_watch(http_cli)
break
end
if rev > watch_ctx.rev then
watch_ctx.rev = rev + 1
end
Expand Down Expand Up @@ -284,7 +289,8 @@ local function run_watch(premature)

local ok, err = ngx_thread_wait(run_watch_th, check_worker_th)
if not ok then
log.error("check_worker thread terminates failed, retart checker, error: " .. err)
log.error("run_watch or check_worker thread terminates failed",
" restart those threads, error: ", inspect(err))
end

ngx_thread_kill(run_watch_th)
Expand Down
29 changes: 20 additions & 9 deletions apisix/core/table.lua
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ local newproxy = newproxy
local getmetatable = getmetatable
local setmetatable = setmetatable
local select = select
local tostring = tostring
local new_tab = require("table.new")
local nkeys = require("table.nkeys")
local ipairs = ipairs
Expand Down Expand Up @@ -91,7 +92,7 @@ end
-- @usage
-- local arr = {"a", "b", "c"}
-- local idx = core.table.array_find(arr, "b") -- idx = 2
function _M.array_find(array, val)
local function array_find(array, val)
if type(array) ~= "table" then
return nil
end
Expand All @@ -104,6 +105,7 @@ function _M.array_find(array, val)

return nil
end
_M.array_find = array_find


-- only work under lua51 or luajit
Expand All @@ -117,19 +119,28 @@ end

local deepcopy
do
local function _deepcopy(orig, copied)
-- prevent infinite loop when a field refers its parent
copied[orig] = true
local function _deepcopy(orig, copied, parent, opts)
-- If the array-like table contains nil in the middle,
-- the len might be smaller than the expected.
-- But it doesn't affect the correctness.
local len = #orig
local copy = new_tab(len, nkeys(orig) - len)
-- prevent infinite loop when a field refers its parent
copied[orig] = copy
for orig_key, orig_value in pairs(orig) do
if type(orig_value) == "table" and not copied[orig_value] then
copy[orig_key] = _deepcopy(orig_value, copied)
else
local path = parent .. "." .. tostring(orig_key)
if opts and array_find(opts.shallows, path) then
copy[orig_key] = orig_value
else
if type(orig_value) == "table" then
if copied[orig_value] then
copy[orig_key] = copied[orig_value]
else
copy[orig_key] = _deepcopy(orig_value, copied, path, opts)
end
else
copy[orig_key] = orig_value
end
end
end

Expand All @@ -144,13 +155,13 @@ do

local copied_recorder = {}

function deepcopy(orig)
function deepcopy(orig, opts)
local orig_type = type(orig)
if orig_type ~= 'table' then
return orig
end

local res = _deepcopy(orig, copied_recorder)
local res = _deepcopy(orig, copied_recorder, "self", opts)
_M.clear(copied_recorder)
return res
end
Expand Down
10 changes: 1 addition & 9 deletions apisix/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -246,15 +246,7 @@ local function parse_domain_in_route(route)
-- don't modify the modifiedIndex to avoid plugin cache miss because of DNS resolve result
-- has changed

local parent = route.value.upstream.parent
if parent then
route.value.upstream.parent = nil
end
route.dns_value = core.table.deepcopy(route.value)
if parent then
route.value.upstream.parent = parent
route.dns_value.upstream.parent = parent
end
route.dns_value = core.table.deepcopy(route.value, { shallows = { "self.upstream.parent"}})
route.dns_value.upstream.nodes = new_nodes
core.log.info("parse route which contain domain: ",
core.json.delay_encode(route, true))
Expand Down
11 changes: 8 additions & 3 deletions apisix/plugin.lua
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,10 @@ local function load_plugin(name, plugins_list, plugin_type)
plugin.init()
end

if plugin.workflow_handler then
plugin.workflow_handler()
end

return
end

Expand Down Expand Up @@ -586,7 +590,7 @@ end


local function merge_service_route(service_conf, route_conf)
local new_conf = core.table.deepcopy(service_conf)
local new_conf = core.table.deepcopy(service_conf, { shallows = {"self.value.upstream.parent"}})
new_conf.value.service_id = new_conf.value.id
new_conf.value.id = route_conf.value.id
new_conf.modifiedIndex = route_conf.modifiedIndex
Expand Down Expand Up @@ -660,7 +664,7 @@ end
local function merge_service_stream_route(service_conf, route_conf)
-- because many fields in Service are not supported by stream route,
-- so we copy the stream route as base object
local new_conf = core.table.deepcopy(route_conf)
local new_conf = core.table.deepcopy(route_conf, { shallows = {"self.value.upstream.parent"}})
if service_conf.value.plugins then
for name, conf in pairs(service_conf.value.plugins) do
if not new_conf.value.plugins then
Expand Down Expand Up @@ -708,7 +712,8 @@ local function merge_consumer_route(route_conf, consumer_conf, consumer_group_co
return route_conf
end

local new_route_conf = core.table.deepcopy(route_conf)
local new_route_conf = core.table.deepcopy(route_conf,
{ shallows = {"self.value.upstream.parent"}})

if consumer_group_conf then
for name, conf in pairs(consumer_group_conf.value.plugins) do
Expand Down
12 changes: 11 additions & 1 deletion apisix/plugins/ai-proxy/drivers/openai.lua
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ local http = require("resty.http")
local url = require("socket.url")

local pairs = pairs
local type = type

-- globals
local DEFAULT_HOST = "api.openai.com"
Expand Down Expand Up @@ -54,6 +55,15 @@ function _M.request(conf, request_table, ctx)
return nil, "failed to connect to LLM server: " .. err
end

local query_params = conf.auth.query or {}

if type(parsed_url) == "table" and parsed_url.query and #parsed_url.query > 0 then
local args_tab = core.string.decode_args(parsed_url.query)
if type(args_tab) == "table" then
core.table.merge(query_params, args_tab)
end
end

local path = (endpoint and parsed_url.path or DEFAULT_PATH)

local headers = (conf.auth.header or {})
Expand All @@ -64,7 +74,7 @@ function _M.request(conf, request_table, ctx)
keepalive = conf.keepalive,
ssl_verify = conf.ssl_verify,
path = path,
query = conf.auth.query
query = query_params
}

if conf.model.options then
Expand Down
4 changes: 3 additions & 1 deletion apisix/plugins/ai.lua
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ local default_keepalive_pool = {}

local function create_router_matching_cache(api_ctx)
orig_router_http_matching(api_ctx)
return core.table.deepcopy(api_ctx)
return core.table.deepcopy(api_ctx, {
shallows = { "self.matched_route.value.upstream.parent" }
})
end


Expand Down
10 changes: 8 additions & 2 deletions apisix/plugins/jwt-auth.lua
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,14 @@ local consumer_schema = {
type = "object",
-- can't use additionalProperties with dependencies
properties = {
key = {type = "string"},
secret = {type = "string"},
key = {
type = "string",
minLength = 1,
},
secret = {
type = "string",
minLength = 1,
},
algorithm = {
type = "string",
enum = {"HS256", "HS512", "RS256", "ES256"},
Expand Down
15 changes: 13 additions & 2 deletions apisix/plugins/limit-count.lua
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,20 @@
--
local fetch_secrets = require("apisix.secret").fetch_secrets
local limit_count = require("apisix.plugins.limit-count.init")
local workflow = require("apisix.plugins.workflow")

local plugin_name = "limit-count"
local _M = {
version = 0.5,
priority = 1002,
name = plugin_name,
schema = limit_count.schema,
metadata_schema = limit_count.metadata_schema,
}


function _M.check_schema(conf)
return limit_count.check_schema(conf)
function _M.check_schema(conf, schema_type)
return limit_count.check_schema(conf, schema_type)
end


Expand All @@ -36,5 +38,14 @@ function _M.access(conf, ctx)
return limit_count.rate_limit(conf, ctx, plugin_name, 1)
end

function _M.workflow_handler()
workflow.register(plugin_name,
function (conf, ctx)
return limit_count.rate_limit(conf, ctx, plugin_name, 1)
end,
function (conf)
return limit_count.check_schema(conf)
end)
end

return _M
54 changes: 46 additions & 8 deletions apisix/plugins/limit-count/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,30 @@ local group_conf_lru = core.lrucache.new({
type = 'plugin',
})

local metadata_defaults = {
limit_header = "X-RateLimit-Limit",
remaining_header = "X-RateLimit-Remaining",
reset_header = "X-RateLimit-Reset",
}

local metadata_schema = {
type = "object",
properties = {
limit_header = {
type = "string",
default = metadata_defaults.limit_header,
},
remaining_header = {
type = "string",
default = metadata_defaults.remaining_header,
},
reset_header = {
type = "string",
default = metadata_defaults.reset_header,
},
},
}

local schema = {
type = "object",
properties = {
Expand Down Expand Up @@ -91,7 +115,8 @@ local schema = {
local schema_copy = core.table.deepcopy(schema)

local _M = {
schema = schema
schema = schema,
metadata_schema = metadata_schema,
}


Expand All @@ -100,7 +125,12 @@ local function group_conf(conf)
end


function _M.check_schema(conf)

function _M.check_schema(conf, schema_type)
if schema_type == core.schema.TYPE_METADATA then
return core.schema.check(metadata_schema, conf)
end

local ok, err = core.schema.check(schema, conf)
if not ok then
return false, err
Expand Down Expand Up @@ -250,14 +280,22 @@ function _M.rate_limit(conf, ctx, name, cost)
delay, remaining, reset = lim:incoming(key, cost)
end

local metadata = apisix_plugin.plugin_metadata("limit-count")
if metadata then
metadata = metadata.value
else
metadata = metadata_defaults
end
core.log.info("limit-count plugin-metadata: ", core.json.delay_encode(metadata))

if not delay then
local err = remaining
if err == "rejected" then
-- show count limit header when rejected
if conf.show_limit_quota_header then
core.response.set_header("X-RateLimit-Limit", conf.count,
"X-RateLimit-Remaining", 0,
"X-RateLimit-Reset", reset)
core.response.set_header(metadata.limit_header, conf.count,
metadata.remaining_header, 0,
metadata.reset_header, reset)
end

if conf.rejected_msg then
Expand All @@ -274,9 +312,9 @@ function _M.rate_limit(conf, ctx, name, cost)
end

if conf.show_limit_quota_header then
core.response.set_header("X-RateLimit-Limit", conf.count,
"X-RateLimit-Remaining", remaining,
"X-RateLimit-Reset", reset)
core.response.set_header(metadata.limit_header, conf.count,
metadata.remaining_header, remaining,
metadata.reset_header, reset)
end
end

Expand Down
Loading

0 comments on commit cfafc54

Please sign in to comment.