Merge pull request #5672 from agile6v/master

feat: enable lj-releng tool to lint lua code.
This commit is contained in:
Kubernetes Prow Robot 2020-06-09 11:15:19 -07:00 committed by GitHub
commit 0549d9b132
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 213 additions and 93 deletions

View file

@ -1,7 +1,5 @@
std = 'ngx_lua'
globals = {
'_TEST'
}
max_line_length = 100
exclude_files = {'./rootfs/etc/nginx/lua/test/**/*.lua', './rootfs/etc/nginx/lua/plugins/**/test/**/*.lua'}
files["rootfs/etc/nginx/lua/lua_ingress.lua"] = {
ignore = { "122" },

View file

@ -19,3 +19,5 @@ set -o nounset
set -o pipefail
luacheck --codes -q rootfs/etc/nginx/lua/
find rootfs/etc/nginx/lua/ -name "*.lua" -not -path "*/test/*" -exec lj-releng -L -s {} + && echo "lj-releng validation is success!"

View file

@ -16,7 +16,7 @@ local getmetatable = getmetatable
local tostring = tostring
local pairs = pairs
local math = math
local ngx = ngx
-- measured in seconds
-- for an Nginx worker to pick up the new list of upstream peers
@ -305,11 +305,11 @@ function _M.log()
balancer:after_balance()
end
if _TEST then
_M.get_implementation = get_implementation
_M.sync_backend = sync_backend
_M.route_to_alternative_balancer = route_to_alternative_balancer
_M.get_balancer = get_balancer
end
setmetatable(_M, {__index = {
get_implementation = get_implementation,
sync_backend = sync_backend,
route_to_alternative_balancer = route_to_alternative_balancer,
get_balancer = get_balancer,
}})
return _M

View file

@ -3,12 +3,14 @@ local resty_chash = require("resty.chash")
local util = require("util")
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local setmetatable = setmetatable
local _M = balancer_resty:new({ factory = resty_chash, name = "chash" })
function _M.new(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local complex_val, err = util.parse_complex_value(backend["upstreamHashByConfig"]["upstream-hash-by"])
local complex_val, err =
util.parse_complex_value(backend["upstreamHashByConfig"]["upstream-hash-by"])
if err ~= nil then
ngx_log(ngx_ERR, "could not parse the value of the upstream-hash-by: ", err)
end

View file

@ -5,6 +5,11 @@ local resty_chash = require("resty.chash")
local util = require("util")
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local setmetatable = setmetatable
local tostring = tostring
local math = math
local table = table
local pairs = pairs
local _M = { name = "chashsubset" }
@ -46,7 +51,8 @@ end
function _M.new(self, backend)
local subset_map, subsets = build_subset_map(backend)
local complex_val, err = util.parse_complex_value(backend["upstreamHashByConfig"]["upstream-hash-by"])
local complex_val, err =
util.parse_complex_value(backend["upstreamHashByConfig"]["upstream-hash-by"])
if err ~= nil then
ngx_log(ngx_ERR, "could not parse the value of the upstream-hash-by: ", err)
end

View file

@ -9,6 +9,14 @@ local resty_lock = require("resty.lock")
local util = require("util")
local split = require("util.split")
local ngx = ngx
local math = math
local pairs = pairs
local ipairs = ipairs
local tostring = tostring
local string = string
local tonumber = tonumber
local setmetatable = setmetatable
local string_format = string.format
local ngx_log = ngx.log
local INFO = ngx.INFO
@ -185,7 +193,8 @@ function _M.after_balance(_)
end
function _M.sync(self, backend)
local normalized_endpoints_added, normalized_endpoints_removed = util.diff_endpoints(self.peers, backend.endpoints)
local normalized_endpoints_added, normalized_endpoints_removed =
util.diff_endpoints(self.peers, backend.endpoints)
if #normalized_endpoints_added == 0 and #normalized_endpoints_removed == 0 then
ngx.log(ngx.INFO, "endpoints did not change for backend " .. tostring(backend.name))

View file

@ -3,6 +3,7 @@ local util = require("util")
local string_format = string.format
local ngx_log = ngx.log
local INFO = ngx.INFO
local setmetatable = setmetatable
local _M = {}

View file

@ -2,6 +2,8 @@ local balancer_resty = require("balancer.resty")
local resty_roundrobin = require("resty.roundrobin")
local util = require("util")
local setmetatable = setmetatable
local _M = balancer_resty:new({ factory = resty_roundrobin, name = "round_robin" })
function _M.new(self, backend)

View file

@ -4,6 +4,13 @@ local ngx_balancer = require("ngx.balancer")
local split = require("util.split")
local same_site = require("util.same_site")
local ngx = ngx
local pairs = pairs
local ipairs = ipairs
local string = string
local tonumber = tonumber
local setmetatable = setmetatable
local _M = balancer_resty:new()
local DEFAULT_COOKIE_NAME = "route"
@ -64,7 +71,8 @@ function _M.set_cookie(self, value)
}
if self.cookie_session_affinity.expires and self.cookie_session_affinity.expires ~= "" then
cookie_data.expires = ngx.cookie_time(ngx.time() + tonumber(self.cookie_session_affinity.expires))
cookie_data.expires = ngx.cookie_time(ngx.time() +
tonumber(self.cookie_session_affinity.expires))
end
if self.cookie_session_affinity.maxage and self.cookie_session_affinity.maxage ~= "" then
@ -132,8 +140,8 @@ function _M.balance(self)
end
local last_failure = self.get_last_failure()
local should_pick_new_upstream = last_failure ~= nil and self.cookie_session_affinity.change_on_failure or
upstream_from_cookie == nil
local should_pick_new_upstream = last_failure ~= nil and
self.cookie_session_affinity.change_on_failure or upstream_from_cookie == nil
if not should_pick_new_upstream then
return upstream_from_cookie

View file

@ -9,6 +9,10 @@ local math_random = require("math").random
local resty_chash = require("resty.chash")
local util_get_nodes = require("util").get_nodes
local ngx = ngx
local string = string
local setmetatable = setmetatable
local _M = balancer_sticky:new()
-- Consider the situation of N upstreams one of which is failing.

View file

@ -6,6 +6,7 @@
local balancer_sticky = require("balancer.sticky")
local util_get_nodes = require("util").get_nodes
local util_nodemap = require("util.nodemap")
local setmetatable = setmetatable
local _M = balancer_sticky:new()

View file

@ -1,7 +1,11 @@
local http = require("resty.http")
local ssl = require("ngx.ssl")
local ocsp = require("ngx.ocsp")
local ngx = ngx
local string = string
local tostring = tostring
local re_sub = ngx.re.sub
local unpack = unpack
local dns_lookup = require("util.dns").lookup
@ -215,8 +219,8 @@ function _M.call()
ngx.log(ngx.ERR, "error while obtaining hostname: " .. hostname_err)
end
if not hostname then
ngx.log(ngx.INFO,
"obtained hostname is nil (the client does not support SNI?), falling back to default certificate")
ngx.log(ngx.INFO, "obtained hostname is nil (the client does "
.. "not support SNI?), falling back to default certificate")
hostname = DEFAULT_CERT_HOSTNAME
end
@ -229,7 +233,8 @@ function _M.call()
pem_cert = certificate_data:get(pem_cert_uid)
end
if not pem_cert then
ngx.log(ngx.ERR, "certificate not found, falling back to fake certificate for hostname: " .. tostring(hostname))
ngx.log(ngx.ERR, "certificate not found, falling back to fake certificate for hostname: "
.. tostring(hostname))
return
end

View file

@ -1,5 +1,12 @@
local cjson = require("cjson.safe")
local io = io
local ngx = ngx
local tostring = tostring
local string = string
local table = table
local pairs = pairs
-- this is the Lua representation of Configuration struct in internal/ingress/types.go
local configuration_data = ngx.shared.configuration_data
local certificate_data = ngx.shared.certificate_data
@ -72,12 +79,13 @@ local function handle_servers()
else
local success, set_err, forcible = certificate_servers:set(server, uid)
if not success then
local err_msg = string.format("error setting certificate for %s: %s\n", server, tostring(set_err))
local err_msg = string.format("error setting certificate for %s: %s\n",
server, tostring(set_err))
table.insert(err_buf, err_msg)
end
if forcible then
local msg = string.format("certificate_servers dictionary is full, LRU entry has been removed to store %s",
server)
local msg = string.format("certificate_servers dictionary is full, "
.. "LRU entry has been removed to store %s", server)
ngx.log(ngx.WARN, msg)
end
end
@ -86,11 +94,13 @@ local function handle_servers()
for uid, cert in pairs(configuration.certificates) do
local success, set_err, forcible = certificate_data:set(uid, cert)
if not success then
local err_msg = string.format("error setting certificate for %s: %s\n", uid, tostring(set_err))
local err_msg = string.format("error setting certificate for %s: %s\n",
uid, tostring(set_err))
table.insert(err_buf, err_msg)
end
if forcible then
local msg = string.format("certificate_data dictionary is full, LRU entry has been removed to store %s", uid)
local msg = string.format("certificate_data dictionary is full, "
.. "LRU entry has been removed to store %s", uid)
ngx.log(ngx.WARN, msg)
end
end
@ -211,8 +221,6 @@ function _M.call()
ngx.print("Not found!")
end
if _TEST then
_M.handle_servers = handle_servers
end
setmetatable(_M, {__index = { handle_servers = handle_servers }})
return _M

View file

@ -1,7 +1,12 @@
local ngx_re_split = require("ngx.re").split
local certificate_configured_for_current_request = require("certificate").configured_for_current_request
local certificate_configured_for_current_request =
require("certificate").configured_for_current_request
local ngx = ngx
local io = io
local math = math
local string = string
local original_randomseed = math.randomseed
local string_format = string.format
local ngx_redirect = ngx.redirect
@ -38,8 +43,8 @@ end
math.randomseed = function(seed)
local pid = ngx.worker.pid()
if seeds[pid] then
ngx.log(ngx.WARN,
string.format("ignoring math.randomseed(%d) since PRNG is already seeded for worker %d", seed, pid))
ngx.log(ngx.WARN, string.format("ignoring math.randomseed(%d) since PRNG "
.. "is already seeded for worker %d", seed, pid))
return
end
@ -143,7 +148,8 @@ function _M.rewrite(location_config)
local uri = string_format("https://%s%s", redirect_host(), ngx.var.request_uri)
if location_config.use_port_in_redirects then
uri = string_format("https://%s:%s%s", redirect_host(), config.listen_ports.https, ngx.var.request_uri)
uri = string_format("https://%s:%s%s", redirect_host(),
config.listen_ports.https, ngx.var.request_uri)
end
ngx_redirect(uri, config.http_redirect_code)

View file

@ -6,7 +6,13 @@ local clear_tab = require "table.clear"
local clone_tab = require "table.clone"
local nkeys = require "table.nkeys"
-- if an Nginx worker processes more than (MAX_BATCH_SIZE/FLUSH_INTERVAL) RPS then it will start dropping metrics
local ngx = ngx
local tonumber = tonumber
local string = string
local tostring = tostring
-- if an Nginx worker processes more than (MAX_BATCH_SIZE/FLUSH_INTERVAL) RPS
-- then it will start dropping metrics
local MAX_BATCH_SIZE = 10000
local FLUSH_INTERVAL = 1 -- second
@ -80,9 +86,9 @@ function _M.call()
metrics_batch[metrics_size + 1] = metrics()
end
if _TEST then
_M.flush = flush
_M.get_metrics_batch = function() return metrics_batch end
end
setmetatable(_M, {__index = {
flush = flush,
get_metrics_batch = function() return metrics_batch end,
}})
return _M

View file

@ -1,8 +1,13 @@
local require = require
local ngx = ngx
local pairs = pairs
local ipairs = ipairs
local string_format = string.format
local new_tab = require "table.new"
local ngx_log = ngx.log
local INFO = ngx.INFO
local ERR = ngx.ERR
local pcall = pcall
local _M = {}
local MAX_NUMBER_OF_PLUGINS = 10000
@ -36,10 +41,12 @@ function _M.run()
-- TODO: consider sandboxing this, should we?
-- probably yes, at least prohibit plugin from accessing env vars etc
-- but since the plugins are going to be installed by ingress-nginx operator they can be assumed to be safe also
-- but since the plugins are going to be installed by ingress-nginx
-- operator they can be assumed to be safe also
local ok, err = pcall(plugin[phase])
if not ok then
ngx_log(ERR, string_format("error while running plugin \"%s\" in phase \"%s\": %s", name, phase, err))
ngx_log(ERR, string_format("error while running plugin \"%s\" in phase \"%s\": %s",
name, phase, err))
end
end
end

View file

@ -1,3 +1,5 @@
local ngx = ngx
local _M = {}
function _M.rewrite()

View file

@ -1,4 +1,3 @@
_G._TEST = true
local main = require("plugins.hello_world.main")

View file

@ -5,9 +5,18 @@ local dns_lookup = require("util.dns").lookup
local configuration = require("tcp_udp_configuration")
local round_robin = require("balancer.round_robin")
local ngx = ngx
local table = table
local ipairs = ipairs
local pairs = pairs
local tostring = tostring
local string = string
local getmetatable = getmetatable
-- measured in seconds
-- for an Nginx worker to pick up the new list of upstream peers
-- it will take <the delay until controller POSTed the backend object to the Nginx endpoint> + BACKENDS_SYNC_INTERVAL
-- it will take <the delay until controller POSTed the backend object
-- to the Nginx endpoint> + BACKENDS_SYNC_INTERVAL
local BACKENDS_SYNC_INTERVAL = 1
local DEFAULT_LB_ALG = "round_robin"
@ -23,7 +32,8 @@ local function get_implementation(backend)
local implementation = IMPLEMENTATIONS[name]
if not implementation then
ngx.log(ngx.WARN, string.format("%s is not supported, falling back to %s", backend["load-balance"], DEFAULT_LB_ALG))
ngx.log(ngx.WARN, string.format("%s is not supported, falling back to %s",
backend["load-balance"], DEFAULT_LB_ALG))
implementation = IMPLEMENTATIONS[DEFAULT_LB_ALG]
end
@ -73,15 +83,14 @@ local function sync_backend(backend)
-- here we check if `balancer` is the instance of `implementation`
-- if it is not then we deduce LB algorithm has changed for the backend
if getmetatable(balancer) ~= implementation then
ngx.log(
ngx.INFO,
string.format("LB algorithm changed from %s to %s, resetting the instance", balancer.name, implementation.name)
)
ngx.log(ngx.INFO, string.format("LB algorithm changed from %s to %s, "
.. "resetting the instance", balancer.name, implementation.name))
balancers[backend.name] = implementation:new(backend)
return
end
local service_type = backend.service and backend.service.spec and backend.service.spec["type"]
local service_type = backend.service and backend.service.spec and
backend.service.spec["type"]
if service_type == "ExternalName" then
backend = resolve_external_names(backend)
end
@ -131,7 +140,8 @@ function _M.init_worker()
sync_backends() -- when worker starts, sync backends without delay
local _, err = ngx.timer.every(BACKENDS_SYNC_INTERVAL, sync_backends)
if err then
ngx.log(ngx.ERR, string.format("error when setting up timer.every for sync_backends: %s", tostring(err)))
ngx.log(ngx.ERR, string.format("error when setting up timer.every "
.. "for sync_backends: %s", tostring(err)))
end
end
@ -168,9 +178,9 @@ function _M.log()
balancer:after_balance()
end
if _TEST then
_M.get_implementation = get_implementation
_M.sync_backend = sync_backend
end
setmetatable(_M, {__index = {
get_implementation = get_implementation,
sync_backend = sync_backend,
}})
return _M

View file

@ -1,3 +1,5 @@
local ngx = ngx
local tostring = tostring
-- this is the Lua representation of TCP/UDP Configuration
local tcp_udp_configuration_data = ngx.shared.tcp_udp_configuration_data

View file

@ -1,9 +1,15 @@
function mock_ngx(mock)
local _ngx = mock
setmetatable(_ngx, {__index = _G.ngx})
_G.ngx = _ngx
end
describe("Balancer chash", function()
local balancer_chash = require("balancer.chash")
describe("balance()", function()
it("uses correct key for given backend", function()
_G.ngx = { var = { request_uri = "/alma/armud" }}
mock_ngx({var = { request_uri = "/alma/armud"}})
local balancer_chash = require("balancer.chash")
local resty_chash = package.loaded["resty.chash"]
resty_chash.new = function(self, nodes)

View file

@ -1,3 +1,8 @@
function mock_ngx(mock)
local _ngx = mock
setmetatable(_ngx, {__index = _G.ngx})
_G.ngx = _ngx
end
local function get_test_backend(n_endpoints)
local backend = {
@ -18,11 +23,15 @@ local function get_test_backend(n_endpoints)
end
describe("Balancer chash subset", function()
local balancer_chashsubset = require("balancer.chashsubset")
local balancer_chashsubset
before_each(function()
mock_ngx({ var = { request_uri = "/alma/armud" }})
balancer_chashsubset = require("balancer.chashsubset")
end)
describe("balance()", function()
it("returns peers from the same subset", function()
_G.ngx = { var = { request_uri = "/alma/armud" }}
local backend = get_test_backend(9)
@ -67,7 +76,6 @@ describe("Balancer chash subset", function()
end)
describe("new(backend)", function()
it("fills last subset correctly", function()
_G.ngx = { var = { request_uri = "/alma/armud" }}
local backend = get_test_backend(7)

View file

@ -34,6 +34,8 @@ describe("Balancer ewma", function()
before_each(function()
mock_ngx({ now = function() return ngx_now end, var = { balancer_ewma_score = -1 } })
package.loaded["balancer.ewma"] = nil
balancer_ewma = require("balancer.ewma")
backend = {
name = "namespace-service-port", ["load-balance"] = "ewma",

View file

@ -1,5 +1,5 @@
local sticky_balanced = require("balancer.sticky_balanced")
local sticky_persistent = require("balancer.sticky_persistent")
local sticky_balanced
local sticky_persistent
local cookie = require("resty.cookie")
local util = require("util")
@ -15,6 +15,14 @@ local function reset_ngx()
_G.ngx = original_ngx
end
local function reset_sticky_balancer()
package.loaded["balancer.sticky"] = nil
package.loaded["balancer.sticky_balanced"] = nil
package.loaded["balancer.sticky_persistent"] = nil
sticky_balanced = require("balancer.sticky_balanced")
sticky_persistent = require("balancer.sticky_persistent")
end
function get_mocked_cookie_new()
local o = { value = nil }
local mock = {
@ -47,6 +55,7 @@ end
describe("Sticky", function()
before_each(function()
mock_ngx({ var = { location_path = "/", host = "test.com" } })
reset_sticky_balancer()
end)
after_each(function()
@ -302,11 +311,8 @@ describe("Sticky", function()
local mocked_cookie_new = cookie.new
before_each(function()
package.loaded["balancer.sticky_balanced"] = nil
package.loaded["balancer.sticky_persistent"] = nil
sticky_balanced = require("balancer.sticky_balanced")
sticky_persistent = require("balancer.sticky_persistent")
mock_ngx({ var = { location_path = "/", host = "test.com" } })
reset_sticky_balancer()
end)
after_each(function()
@ -459,6 +465,7 @@ describe("Sticky", function()
end)
it("returns a cookie without SameSite=None when user specifies samesite None and conditional samesite none with unsupported user agent", function()
mock_ngx({ var = { location_path = "/", host = "test.com" , http_user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"} })
reset_sticky_balancer()
test_set_cookie(sticky_balanced, "None", true, "/", nil)
end)
end)

View file

@ -1,4 +1,3 @@
_G._TEST = true
local balancer, expected_implementations, backends
local original_ngx = ngx
@ -110,11 +109,12 @@ describe("Balancer", function()
},
}
mock_ngx({ var = { proxy_upstream_name = backend.name } })
reset_balancer()
balancer.sync_backend(backend)
balancer.sync_backend(canary_backend)
mock_ngx({ var = { proxy_upstream_name = backend.name } })
local expected = balancer.get_balancer()
for i = 1,50,1 do
@ -134,6 +134,7 @@ describe("Balancer", function()
}
}
mock_ngx({ var = { request_uri = "/" } })
reset_balancer()
end)
it("returns false when no trafficShapingPolicy is set", function()
@ -171,8 +172,6 @@ describe("Balancer", function()
context("canary by cookie", function()
it("returns correct result for given cookies", function()
backend.trafficShapingPolicy.cookie = "canaryCookie"
balancer.sync_backend(backend)
local test_patterns = {
{
case_title = "cookie_value is 'always'",
@ -204,6 +203,9 @@ describe("Balancer", function()
["cookie_" .. test_pattern.request_cookie_name] = test_pattern.request_cookie_value,
request_uri = "/"
}})
reset_balancer()
backend.trafficShapingPolicy.cookie = "canaryCookie"
balancer.sync_backend(backend)
assert.message("\nTest data pattern: " .. test_pattern.case_title)
.equal(test_pattern.expected_result, balancer.route_to_alternative_balancer(_balancer))
reset_ngx()
@ -275,14 +277,14 @@ describe("Balancer", function()
}
for _, test_pattern in pairs(test_patterns) do
reset_balancer()
backend.trafficShapingPolicy.header = test_pattern.header_name
backend.trafficShapingPolicy.headerValue = test_pattern.header_value
balancer.sync_backend(backend)
mock_ngx({ var = {
["http_" .. test_pattern.request_header_name] = test_pattern.request_header_value,
request_uri = "/"
}})
reset_balancer()
backend.trafficShapingPolicy.header = test_pattern.header_name
backend.trafficShapingPolicy.headerValue = test_pattern.header_value
balancer.sync_backend(backend)
assert.message("\nTest data pattern: " .. test_pattern.case_title)
.equal(test_pattern.expected_result, balancer.route_to_alternative_balancer(_balancer))
reset_ngx()

View file

@ -165,6 +165,9 @@ describe("Certificate", function()
_G.ngx = _ngx
ngx.ctx.cert_configured_for_current_request = nil
package.loaded["certificate"] = nil
certificate = require("certificate")
set_certificate("hostname", EXAMPLE_CERT, UUID)
end)

View file

@ -1,4 +1,3 @@
_G._TEST = true
local cjson = require("cjson")
local configuration = require("configuration")
@ -48,12 +47,12 @@ end
describe("Configuration", function()
before_each(function()
_G.ngx = get_mocked_ngx_env()
package.loaded["configuration"] = nil
configuration = require("configuration")
end)
after_each(function()
_G.ngx = unmocked_ngx
package.loaded["configuration"] = nil
configuration = require("configuration")
end)
describe("Backends", function()

View file

@ -1,4 +1,3 @@
_G._TEST = true
local original_ngx = ngx
local function reset_ngx()
@ -31,8 +30,8 @@ describe("Monitor", function()
end)
it("batches metrics", function()
local monitor = require("monitor")
mock_ngx({ var = {} })
local monitor = require("monitor")
for i = 1,10,1 do
monitor.call()
@ -44,8 +43,8 @@ describe("Monitor", function()
describe("flush", function()
it("short circuits when premmature is true (when worker is shutting down)", function()
local tcp_mock = mock_ngx_socket_tcp()
local monitor = require("monitor")
mock_ngx({ var = {} })
local monitor = require("monitor")
for i = 1,10,1 do
monitor.call()
@ -64,7 +63,6 @@ describe("Monitor", function()
it("JSON encodes and sends the batched metrics", function()
local tcp_mock = mock_ngx_socket_tcp()
local monitor = require("monitor")
local ngx_var_mock = {
host = "example.com",
@ -86,6 +84,7 @@ describe("Monitor", function()
upstream_status = "200",
}
mock_ngx({ var = ngx_var_mock })
local monitor = require("monitor")
monitor.call()
local ngx_var_mock1 = ngx_var_mock

View file

@ -10,7 +10,6 @@ do
-- if there's more constants need to be whitelisted for test runs, add here.
local GLOBALS_ALLOWED_IN_TEST = {
_TEST = true,
helpers = true,
}
local newindex = function(table, key, value)
@ -35,7 +34,6 @@ do
end
_G.helpers = require("test.helpers")
_G._TEST = true
local ffi = require("ffi")
local lua_ingress = require("lua_ingress")

View file

@ -1,5 +1,5 @@
local original_ngx = ngx
local util = require("util")
local util
local function reset_ngx()
_G.ngx = original_ngx
@ -20,6 +20,7 @@ describe("utility", function()
describe("ngx_complex_value", function()
before_each(function()
mock_ngx({ var = { remote_addr = "192.168.1.1", [1] = "nginx/regexp/1/group/capturing" } })
util = require("util")
end)
local ngx_complex_value = function(data)

View file

@ -1,3 +1,4 @@
local ngx = ngx
local string = string
local string_len = string.len
local string_format = string.format
@ -117,7 +118,8 @@ function _M.diff_endpoints(old, new)
end
-- this implementation is taken from
-- https://web.archive.org/web/20131225070434/http://snippets.luacode.org/snippets/Deep_Comparison_of_Two_Values_3
-- https://web.archive.org/web/20131225070434/http://snippets.
-- luacode.org/snippets/Deep_Comparison_of_Two_Values_3
-- and modified for use in this project
local function deep_compare(t1, t2, ignore_mt)
local ty1 = type(t1)

View file

@ -13,7 +13,8 @@ local tostring = tostring
local _M = {}
local CACHE_SIZE = 10000
local MAXIMUM_TTL_VALUE = 2147483647 -- maximum value according to https://tools.ietf.org/html/rfc2181
-- maximum value according to https://tools.ietf.org/html/rfc2181
local MAXIMUM_TTL_VALUE = 2147483647
-- for every host we will try two queries for the following types with the order set here
local QTYPES_TO_CHECK = { resolver.TYPE_A, resolver.TYPE_AAAA }
@ -59,7 +60,8 @@ local function resolve_host_for_qtype(r, host, qtype)
end
if answers.errcode then
return nil, -1, string_format("server returned error code: %s: %s", answers.errcode, answers.errstr)
return nil, -1, string_format("server returned error code: %s: %s",
answers.errcode, answers.errstr)
end
local addresses, ttl = a_records_and_min_ttl(answers)
@ -116,7 +118,8 @@ function _M.lookup(host)
return addresses
end
ngx_log(ngx_ERR, "failed to query the DNS server for ", host, ":\n", table_concat(dns_errors, "\n"))
ngx_log(ngx_ERR, "failed to query the DNS server for ",
host, ":\n", table_concat(dns_errors, "\n"))
return { host }
end
@ -135,7 +138,8 @@ function _M.lookup(host)
end
for i = search_start, search_end, 1 do
local new_host = resolv_conf.search[i] and string_format("%s.%s", host, resolv_conf.search[i]) or host
local new_host = resolv_conf.search[i] and
string_format("%s.%s", host, resolv_conf.search[i]) or host
addresses, ttl, dns_errors = resolve_host(r, new_host)
if addresses then
@ -145,14 +149,13 @@ function _M.lookup(host)
end
if #dns_errors > 0 then
ngx_log(ngx_ERR, "failed to query the DNS server for ", host, ":\n", table_concat(dns_errors, "\n"))
ngx_log(ngx_ERR, "failed to query the DNS server for ",
host, ":\n", table_concat(dns_errors, "\n"))
end
return { host }
end
if _TEST then
_M._cache = cache
end
setmetatable(_M, {__index = { _cache = cache }})
return _M

View file

@ -1,5 +1,9 @@
local math_random = require("math").random
local util_tablelength = require("util").tablelength
local ngx = ngx
local pairs = pairs
local string = string
local setmetatable = setmetatable
local _M = {}
@ -41,7 +45,8 @@ local function get_random_node(map)
count = count + 1
end
ngx.log(ngx.ERR, string.format("Failed to find node %d of %d! This is a bug, please report!", index, size))
ngx.log(ngx.ERR, string.format("Failed to find node %d of %d! "
.. "This is a bug, please report!", index, size))
return nil, nil
end
@ -55,7 +60,8 @@ end
-- To make sure hash keys are reproducible on different ingress controller instances the salt
-- needs to be shared and therefore is not simply generated randomly.
--
-- @tparam {[string]=number} endpoints A table with the node endpoint as a key and its weight as a value.
-- @tparam {[string]=number} endpoints A table with the node endpoint
-- as a key and its weight as a value.
-- @tparam[opt] string hash_salt A optional hash salt that will be used to obfuscate the hash key.
function _M.new(self, endpoints, hash_salt)
if hash_salt == nil then

View file

@ -1,5 +1,6 @@
local ngx_re_split = require("ngx.re").split
local string_format = string.format
local tonumber = tonumber
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR

View file

@ -1,3 +1,5 @@
local string = string
local _M = {}
-- determines whether to apply a SameSite=None attribute

View file

@ -1,3 +1,5 @@
local ipairs = ipairs
local _M = {}
-- splits strings into host and port
@ -34,7 +36,8 @@ function _M.split_upstream_var(var)
return t
end
-- Splits an NGINX $upstream_addr and returns an array of tables with a `host` and `port` key-value pair.
-- Splits an NGINX $upstream_addr and returns an array of tables
-- with a `host` and `port` key-value pair.
function _M.split_upstream_addr(addrs_str)
if not addrs_str then
return nil, nil