summaryrefslogtreecommitdiffstats
path: root/lualib/lua_verdict.lua
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-10 21:30:40 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-10 21:30:40 +0000
commit133a45c109da5310add55824db21af5239951f93 (patch)
treeba6ac4c0a950a0dda56451944315d66409923918 /lualib/lua_verdict.lua
parentInitial commit. (diff)
downloadrspamd-133a45c109da5310add55824db21af5239951f93.tar.xz
rspamd-133a45c109da5310add55824db21af5239951f93.zip
Adding upstream version 3.8.1.upstream/3.8.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'lualib/lua_verdict.lua')
-rw-r--r--lualib/lua_verdict.lua208
1 files changed, 208 insertions, 0 deletions
diff --git a/lualib/lua_verdict.lua b/lualib/lua_verdict.lua
new file mode 100644
index 0000000..6ce99e6
--- /dev/null
+++ b/lualib/lua_verdict.lua
@@ -0,0 +1,208 @@
+--[[
+Copyright (c) 2022, Vsevolod Stakhov <vsevolod@rspamd.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+]]--
+
+local exports = {}
+
+---[[[
+-- @function lua_verdict.get_default_verdict(task)
+-- Returns verdict for a task + score if certain, must be called from idempotent filters only
+-- Returns string:
+-- * `spam`: if message have over reject threshold and has more than one positive rule
+-- * `junk`: if a message has between score between [add_header/rewrite subject] to reject thresholds and has more than two positive rules
+-- * `passthrough`: if a message has been passed through some short-circuit rule
+-- * `ham`: if a message has overall score below junk level **and** more than three negative rule, or negative total score
+-- * `uncertain`: all other cases
+--]]
+local function default_verdict_function(task)
+ local result = task:get_metric_result()
+
+ if result then
+
+ if result.passthrough then
+ return 'passthrough', nil
+ end
+
+ local score = result.score
+
+ local action = result.action
+
+ if action == 'reject' and result.npositive > 1 then
+ return 'spam', score
+ elseif action == 'no action' then
+ if score < 0 or result.nnegative > 3 then
+ return 'ham', score
+ end
+ else
+ -- All colors of junk
+ if action == 'add header' or action == 'rewrite subject' then
+ if result.npositive > 2 then
+ return 'junk', score
+ end
+ end
+ end
+
+ return 'uncertain', score
+ end
+end
+
+local default_possible_verdicts = {
+ passthrough = {
+ can_learn = false,
+ description = 'message has passthrough result',
+ },
+ spam = {
+ can_learn = 'spam',
+ description = 'message is likely spam',
+ },
+ junk = {
+ can_learn = 'spam',
+ description = 'message is likely possible spam',
+ },
+ ham = {
+ can_learn = 'ham',
+ description = 'message is likely ham',
+ },
+ uncertain = {
+ can_learn = false,
+ description = 'not certainty in verdict'
+ }
+}
+
+-- Verdict functions specific for modules
+local specific_verdicts = {
+ default = {
+ callback = default_verdict_function,
+ possible_verdicts = default_possible_verdicts
+ }
+}
+
+local default_verdict = specific_verdicts.default
+
+exports.get_default_verdict = default_verdict.callback
+exports.set_verdict_function = function(func, what)
+ assert(type(func) == 'function')
+ if not what then
+ -- Default verdict
+ local existing = specific_verdicts.default.callback
+ specific_verdicts.default.callback = func
+ exports.get_default_verdict = func
+
+ return existing
+ else
+ local existing = specific_verdicts[what]
+
+ if not existing then
+ specific_verdicts[what] = {
+ callback = func,
+ possible_verdicts = default_possible_verdicts
+ }
+ else
+ existing = existing.callback
+ end
+
+ specific_verdicts[what].callback = func
+ return existing
+ end
+end
+
+exports.set_verdict_table = function(verdict_tbl, what)
+ assert(type(verdict_tbl) == 'table' and
+ type(verdict_tbl.callback) == 'function' and
+ type(verdict_tbl.possible_verdicts) == 'table')
+
+ if not what then
+ -- Default verdict
+ local existing = specific_verdicts.default
+ specific_verdicts.default = verdict_tbl
+ exports.get_default_verdict = specific_verdicts.default.callback
+
+ return existing
+ else
+ local existing = specific_verdicts[what]
+ specific_verdicts[what] = verdict_tbl
+ return existing
+ end
+end
+
+exports.get_specific_verdict = function(what, task)
+ if specific_verdicts[what] then
+ return specific_verdicts[what].callback(task)
+ end
+
+ return exports.get_default_verdict(task)
+end
+
+exports.get_possible_verdicts = function(what)
+ local lua_util = require "lua_util"
+ if what then
+ if specific_verdicts[what] then
+ return lua_util.keys(specific_verdicts[what].possible_verdicts)
+ end
+ else
+ return lua_util.keys(specific_verdicts.default.possible_verdicts)
+ end
+
+ return nil
+end
+
+exports.can_learn = function(verdict, what)
+ if what then
+ if specific_verdicts[what] and specific_verdicts[what].possible_verdicts[verdict] then
+ return specific_verdicts[what].possible_verdicts[verdict].can_learn
+ end
+ else
+ if specific_verdicts.default.possible_verdicts[verdict] then
+ return specific_verdicts.default.possible_verdicts[verdict].can_learn
+ end
+ end
+
+ return nil -- To distinguish from `false` that could happen in can_learn
+end
+
+exports.describe = function(verdict, what)
+ if what then
+ if specific_verdicts[what] and specific_verdicts[what].possible_verdicts[verdict] then
+ return specific_verdicts[what].possible_verdicts[verdict].description
+ end
+ else
+ if specific_verdicts.default.possible_verdicts[verdict] then
+ return specific_verdicts.default.possible_verdicts[verdict].description
+ end
+ end
+
+ return nil
+end
+
+---[[[
+-- @function lua_verdict.adjust_passthrough_action(task)
+-- If an action is `soft reject` then this function extracts a module that has set this action
+-- and returns an adjusted action (e.g. 'greylist' or 'ratelimit').
+-- Otherwise an action is returned as is.
+--]]
+exports.adjust_passthrough_action = function(task)
+ local action = task:get_metric_action()
+ if action == 'soft reject' then
+ local has_pr, _, _, module = task:has_pre_result()
+
+ if has_pr and module then
+ action = module
+ end
+ end
+
+ return action
+end
+
+return exports \ No newline at end of file