Skip to content

Commit

Permalink
feat(plugins): ai-prompt-decorator plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
tysoekong committed Dec 21, 2023
1 parent f7e6eee commit 1576e00
Show file tree
Hide file tree
Showing 9 changed files with 558 additions and 0 deletions.
4 changes: 4 additions & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ plugins/acme:
- changed-files:
- any-glob-to-any-file: kong/plugins/acme/**/*

plugins/ai-prompt-decorator:
- changed-files:
- any-glob-to-any-file: kong/plugins/ai-prompt-decorator/**/*

plugins/aws-lambda:
- changed-files:
- any-glob-to-any-file: kong/plugins/aws-lambda/**/*
Expand Down
3 changes: 3 additions & 0 deletions changelog/unreleased/kong/add-ai-prompt-decorator-plugin.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
message: Introduced the new **AI Prompt Decorator** plugin that allows Kong administrators to inject chat messages into LLM consumer prompts.
type: feature
scope: Plugin
3 changes: 3 additions & 0 deletions kong-3.6.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -550,6 +550,9 @@ build = {
["kong.plugins.opentelemetry.proto"] = "kong/plugins/opentelemetry/proto.lua",
["kong.plugins.opentelemetry.otlp"] = "kong/plugins/opentelemetry/otlp.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",

["kong.vaults.env"] = "kong/vaults/env/init.lua",
["kong.vaults.env.schema"] = "kong/vaults/env/schema.lua",

Expand Down
42 changes: 42 additions & 0 deletions kong/plugins/ai-prompt-decorator/access.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
local _M = {}

-- imports
local kong_meta = require "kong.meta"
local re_match = ngx.re.match
local re_find = ngx.re.find
local fmt = string.format
local table_insert = table.insert
--

_M.PRIORITY = 772
_M.VERSION = kong_meta.version

local function to_chat_prompt(version, role, content)
if version == "v1" then
return { role = role, content = content }
else
return nil
end
end

function _M.execute(request, conf)
-- 1. add in-order to the head of the chat
if conf.prompts.prepend and #conf.prompts.prepend > 0 then
for i, v in ipairs(conf.prompts.prepend) do
table.insert(request.messages, i, to_chat_prompt("v1", v.role, v.content))
end
end

-- 2. add in-order to the tail of the chat
if conf.prompts.append and #conf.prompts.append > 0 then
local messages_length = #request.messages

for i, v in ipairs(conf.prompts.append) do
request.messages[i + messages_length] = to_chat_prompt("v1", v.role, v.content)
end
end

return nil, nil
end

return _M
67 changes: 67 additions & 0 deletions kong/plugins/ai-prompt-decorator/handler.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
local _M = {}

-- imports
local kong_meta = require "kong.meta"
local access_handler = require("kong.plugins.ai-prompt-decorator.access")
local re_match = ngx.re.match
local re_find = ngx.re.find
local fmt = string.format
local table_insert = table.insert
--

_M.PRIORITY = 772
_M.VERSION = kong_meta.version


local function do_bad_request(msg)
kong.log.warn(msg)
kong.response.exit(400, { error = true, message = msg })
end


local function do_internal_server_error(msg)
kong.log.err(msg)
kong.response.exit(500, { error = true, message = msg })
end


function _M:access(conf)
kong.log.debug("IN: ai-prompt-decorator/access")
kong.service.request.enable_buffering()
kong.ctx.shared.prompt_decorated = true

-- if plugin ordering was altered, receive the "decorated" request
local request, err
if not kong.ctx.replacement_request then
request, err = kong.request.get_body("application/json")

if err then
do_bad_request("ai-prompt-decorator only supports application/json requests")
end
else
request = kong.ctx.replacement_request
end

if not request.messages or #request.messages < 1 then
do_bad_request("ai-prompt-decorator only support llm/chat type requests")
end

-- run access handler to decorate the messages[] block
local code, err = access_handler.execute(request, conf)
if err then
-- don't run header_filter and body_filter from ai-proxy plugin
kong.ctx.shared.skip_response_transformer = true

if code == 500 then kong.log.err(err) end
kong.response.exit(code, err)
end

-- stash the result for parsing later (in ai-proxy)
kong.ctx.shared.replacement_request = request

-- all good
kong.log.debug("OUT: ai-prompt-decorator/access")
end


return _M
80 changes: 80 additions & 0 deletions kong/plugins/ai-prompt-decorator/schema.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
local typedefs = require "kong.db.schema.typedefs"

-- local prompt_schema = {
-- type = "record",
-- required = false,
-- fields = {
-- { role = { type = "string", required = false, one_of = { "system", "assistant", "user" }, default = "system" }},
-- { content = { type = "string", required = true } },
-- { position = { type = "string", required = true, one_of = { "BEGINNING", "AFTER_FINAL_SYSTEM", "AFTER_FINAL_ASSISTANT" "END" }, default = "BEGINNING" }},
-- }
-- }

local prompt_record = {
type = "record",
required = false,
fields = {
{ role = { type = "string", required = true, one_of = { "system", "assistant", "user" }, default = "system" }},
{ content = { type = "string", required = true } },
}
}

local prompts_record = {
type = "record",
required = false,
fields = {
{ prepend = {
type = "array",
description = [[Insert chat messages at the beginning of the chat message array.
This array preserves exact order when adding messages.]],
elements = prompt_record,
required = false,
}},
{ append = {
type = "array",
description = [[Insert chat messages at the end of the chat message array.
This array preserves exact order when adding messages.]],
elements = prompt_record,
required = false,
}},
}
}

return {
name = "ai-prompt-injector",
fields = {
{ protocols = typedefs.protocols_http },
{ config = {
type = "record",
fields = {
{ prompts = prompts_record }
}
}
}
},
entity_checks = {
{
custom_entity_check = {
field_sources = { "config" },
fn = function(entity)
local config = entity.config

if config and config.prompts ~= ngx.null then
local head_prompts_set = (config.prompts.prepend ~= ngx.null) and (#config.prompts.prepend > 0)
local tail_prompts_set = (config.prompts.append ~= ngx.null) and (#config.prompts.append > 0)

if (not head_prompts_set) and (not tail_prompts_set) then
return nil, "must set one array item in either [prompts.prepend] or [prompts.append]"
end

else
return nil, "must specify one or more [prompts.prepend] or [prompts.append] to add to requests"

end

return true
end
}
}
}
}
52 changes: 52 additions & 0 deletions spec/03-plugins/40-ai-prompt-decorator/00-config_spec.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
local PLUGIN_NAME = "ai-prompt-decorator"


-- helper function to validate data against a schema
local validate do
local validate_entity = require("spec.helpers").validate_plugin_config_schema
local plugin_schema = require("kong.plugins."..PLUGIN_NAME..".schema")

function validate(data)
return validate_entity(data, plugin_schema)
end
end

describe(PLUGIN_NAME .. ": (schema)", function()
it("won't allow empty config object", function()
local config = {
}

local ok, err = validate(config)

assert.is_falsy(ok)
assert.not_nil(err)
assert.equal("must specify one or more [prompts.prepend] or [prompts.append] to add to requests", err["@entity"][1])
end)

it("won't allow both head and tail to be unset", function()
local config = {
prompts = {},
}

local ok, err = validate(config)

assert.is_falsy(ok)
assert.not_nil(err)
assert.equal("must set one array item in either [prompts.prepend] or [prompts.append]", err["@entity"][1])
end)

it("won't allow both allow_patterns and deny_patterns to be empty arrays", function()
local config = {
prompts = {
prepend = {},
append = {},
},
}

local ok, err = validate(config)

assert.is_falsy(ok)
assert.not_nil(err)
assert.equal("must set one array item in either [prompts.prepend] or [prompts.append]", err["@entity"][1])
end)
end)
Loading

0 comments on commit 1576e00

Please sign in to comment.