refactor balancer into more testable and extensible interface

This commit is contained in:
Elvin Efendi 2018-05-18 17:36:43 -04:00
parent 1b5db4b3b0
commit e9dc275b81
20 changed files with 368 additions and 467 deletions

View file

@ -1,3 +1,5 @@
std = 'ngx_lua'
globals = {'_'}
globals = {
'_TEST'
}
exclude_files = {'./rootfs/etc/nginx/lua/test/**/*.lua'}

View file

@ -28,7 +28,6 @@ env:
- CHANGE_MINIKUBE_NONE_USER=true
- KUBERNETES_VERSION=v1.10.0
- DOCKER=docker
- BUSTED_VERSION=2.0.rc12
- GH_REF=github.com/kubernetes/ingress-nginx
- secure: LIS2XpZufWTcJ53jiRsSZy2Gi1EUJ1XmLg7z3f2ZHeMnyG2Jhk3GW4vod1FNru+PY4PWgddLdCdIl+jqOYXndFlbdAWF3/Oy5fEkYLXdYV7tdlHcPWDkqNFrfiyZ4guChN+b2Nk6FqU7o5fsZAIR7VAbgqNRF5XMo9Mhn/vhDCQRcnbXy7uq7JTrYUkqDbQoyYvT6b480GCY5gags1zp/xZfPDNZEe936o8i5IPTyiykRyNOXN/AH6kd3pR5e1xYgcvJ9KpSVPghcwFE7kJ4fOVMRhRG5ML+IyML+xD0jX43EMNoqRKZ/HS42kIMCInFbJEcxVde7DPNBZ7Y3GAqh7HO6qrE70Dn3ha6DID6zCoH2ArW39BxG4zempjn2VxYoMRGREyZszWQb++dwGoHmo5FHt6zvIrYBG0dA0H8ja9VkZkjFwtYTGHU1ooPzUfJK4O4VBayV8LqZibyZQR+GrmyQc0aagUY7J/fe4A2PJyI4DbkeZ7GX1ELj0ciDz4urQSzUc8l/T3aU3X+FuJItjgYtMLPmqcjA5uifDCtutE8Z9L2gSpanqUdvLSOozuxPho/KNl+2YlF7fXqPW3LnRf5mHD+NbOff306pvKlHJOb2Vmth+HBQ1XDzt/Cy5+sfwS3E0Vmh6UTq/NtkUXxwH10BDMF7FMVlQ4zdHQvyZ0=
- secure: rKDoy9IYYYy0fYBs4+9mwuBVq/TcxfFwMfE0ywYWhUUdgzrUYSJAwpoe/96EQ4YmESUefwC2nDNq4G3XzJKYOWf83PaIveb9Z//zmMrCQXjDuDBDLpwV3sXSh7evXiVDohJz4ogBCeMRUCMKYsyKBM9yWfa/iu+yI92dbphpK9peOKW6yBc0uspJlln4swN3GS2WT9LVuPY2Azv9U2UqrXufOPDKG/qEb/Vrn4yZ2lR/50r2k45e9nSvDoByvr10V8ubM5Zc0iP0vBuAUVRdByv6N53Q4gaBGapY6SxhIjIPC/h0rNnuT9EXp7MWaPT5FmBxLt9wnyleT9QhZJnFyaBYqFgcz/DKifYQkryY4M5dLMo/Rt3yATyAy8Y0df1TOoV2dKdqwOOwQ8bXB1wDfyrGxmQj9HY4Ffnphx3wPE1a+Sjuh+S5Epm7XJbPx5pZJqNO2hd4sTbk0Xp3gpPbihny2r/jtNwHl0wpFCfOM68RNrsVRlIwG3UhzbZvblbQ/M/mmWCdgzINjt07I2SGCJxfKG0e98Q49SKUoDoOgQTTRDqTC9IgOEDxyfAkT0Vr6BtlP88Nsgnf6kmboyigBrRAiaDQGTxn3SP6LnQI3CeopaRDYvFZe/rTwPXE9XlKoTn9FTWnAqF3MuWaLslDcDKYEh7OaYJjF01piu6g4Nc=

View file

@ -1,10 +1,10 @@
local ngx_balancer = require("ngx.balancer")
local json = require("cjson")
local configuration = require("configuration")
local util = require("util")
local lrucache = require("resty.lrucache")
local round_robin = require("balancer.round_robin")
local chash = require("balancer.chash")
local sticky = require("balancer.sticky")
local ewma = require("balancer.ewma")
local resty_balancer = require("balancer.resty")
-- measured in seconds
-- for an Nginx worker to pick up the new list of upstream peers
@ -12,70 +12,51 @@ local resty_balancer = require("balancer.resty")
local BACKENDS_SYNC_INTERVAL = 1
local DEFAULT_LB_ALG = "round_robin"
local IMPLEMENTATIONS = {
round_robin = round_robin,
chash = chash,
sticky = sticky,
ewma = ewma,
}
local _M = {}
local balancers = {}
-- TODO(elvinefendi) we can probably avoid storing all backends here. We already store them in their respective
-- load balancer implementations
local backends, backends_err = lrucache.new(1024)
if not backends then
return error("failed to create the cache for backends: " .. (backends_err or "unknown"))
end
local function get_implementation(backend)
local name = backend["load-balance"] or DEFAULT_LB_ALG
local function get_current_backend()
local backend_name = ngx.var.proxy_upstream_name
local backend = backends:get(backend_name)
if not backend then
-- TODO(elvinefendi) maybe force backend sync here?
ngx.log(ngx.WARN, "no backend configuration found for " .. tostring(backend_name))
if backend["sessionAffinityConfig"] and backend["sessionAffinityConfig"]["name"] == "cookie" then
name = "sticky"
elseif backend["upstream-hash-by"] then
name = "chash"
end
return backend
end
local function get_balancer(backend)
if not backend then
return nil
local implementation = IMPLEMENTATIONS[name]
if not implementation then
ngx.log(ngx.WARN, string.format("%s is not supported, falling back to %s", backend["load-balance"], DEFAULT_LB_ALG))
implementation = IMPLEMENTATIONS[DEFAULT_LB_ALG]
end
local lb_alg = backend["load-balance"] or DEFAULT_LB_ALG
if resty_balancer.is_applicable(backend) then
return resty_balancer
elseif lb_alg ~= "ewma" then
if lb_alg ~= DEFAULT_LB_ALG then
ngx.log(ngx.WARN,
string.format("%s is not supported, falling back to %s", backend["load-balance"], DEFAULT_LB_ALG))
end
return resty_balancer
end
return ewma
end
local function balance()
local backend = get_current_backend()
local balancer = get_balancer(backend)
if not balancer then
return nil, nil
end
local endpoint = balancer.balance(backend)
if not endpoint then
return nil, nil
end
return endpoint.address, endpoint.port
return implementation
end
local function sync_backend(backend)
backends:set(backend.name, backend)
local implementation = get_implementation(backend)
local balancer = balancers[backend.name]
local balancer = get_balancer(backend)
if not balancer then
balancers[backend.name] = implementation:new(backend)
return
end
balancer.sync(backend)
if getmetatable(balancer) ~= implementation then
ngx.log(ngx.INFO,
string.format("LB algorithm changed from %s to %s, resetting the instance", balancer.name, implementation.name))
balancers[backend.name] = implementation:new(backend)
return
end
balancer:sync(backend)
end
local function sync_backends()
@ -91,29 +72,10 @@ local function sync_backends()
end
for _, new_backend in pairs(new_backends) do
local backend = backends:get(new_backend.name)
local backend_changed = true
if backend then
backend_changed = not util.deep_compare(backend, new_backend)
end
if backend_changed then
sync_backend(new_backend)
end
sync_backend(new_backend)
end
end
local function after_balance()
local backend = get_current_backend()
local balancer = get_balancer(backend)
if not balancer then
return
end
balancer.after_balance()
end
function _M.init_worker()
sync_backends() -- when worker starts, sync backends without delay
local _, err = ngx.timer.every(BACKENDS_SYNC_INTERVAL, sync_backends)
@ -124,15 +86,24 @@ end
function _M.call()
local phase = ngx.get_phase()
if phase == "log" then
after_balance()
if phase ~= "log" and phase ~= "balancer" then
ngx.log(ngx.ERR, "must be called in balancer or log, but was called in: " .. phase)
return
end
if phase ~= "balancer" then
return error("must be called in balancer or log, but was called in: " .. phase)
local backend_name = ngx.var.proxy_upstream_name
local balancer = balancers[backend_name]
if not balancer then
ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE
return ngx.exit(ngx.status)
end
local host, port = balance()
if phase == "log" then
balancer:after_balance()
return
end
local host, port = balancer:balance()
if not host then
ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE
return ngx.exit(ngx.status)
@ -142,8 +113,13 @@ function _M.call()
local ok, err = ngx_balancer.set_current_peer(host, port)
if not ok then
ngx.log(ngx.ERR, string.format("error while setting current upstream peer to %s", tostring(err)))
ngx.log(ngx.ERR, "error while setting current upstream peer to " .. tostring(err))
end
end
if _TEST then
_M.get_implementation = get_implementation
_M.sync_backend = sync_backend
end
return _M

View file

@ -0,0 +1,21 @@
local balancer_resty = require("balancer.resty")
local resty_chash = require("resty.chash")
local util = require("util")
local _M = balancer_resty:new({ factory = resty_chash, name = "chash" })
function _M.new(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local o = { instance = self.factory:new(nodes), hash_by = backend["upstream-hash-by"] }
setmetatable(o, self)
self.__index = self
return o
end
function _M.balance(self)
local key = util.lua_ngx_var(self.hash_by)
local endpoint_string = self.instance:find(key)
return util.split_pair(endpoint_string, ":")
end
return _M

View file

@ -14,7 +14,7 @@ local PICK_SET_SIZE = 2
local ewma_lock = resty_lock:new("locks", {timeout = 0, exptime = 0.1})
local _M = {}
local _M = { name = "ewma" }
local function lock(upstream)
local _, err = ewma_lock:lock(upstream .. LOCK_KEY)
@ -117,17 +117,18 @@ local function pick_and_score(peers, k)
return peers[lowest_score_index]
end
function _M.balance(backend)
local peers = backend.endpoints
function _M.balance(self)
local peers = self.peers
if #peers == 1 then
return peers[1]
end
local k = (#peers < PICK_SET_SIZE) and #peers or PICK_SET_SIZE
local peer_copy = util.deepcopy(peers)
return pick_and_score(peer_copy, k)
local endpoint = pick_and_score(peer_copy, k)
return endpoint.address, endpoint.port
end
function _M.after_balance()
function _M.after_balance(_)
local response_time = tonumber(util.get_first_value(ngx.var.upstream_response_time)) or 0
local connect_time = tonumber(util.get_first_value(ngx.var.upstream_connect_time)) or 0
local rtt = connect_time + response_time
@ -139,10 +140,21 @@ function _M.after_balance()
get_or_update_ewma(upstream, rtt, true)
end
function _M.sync(_)
function _M.sync(self, backend)
local changed = not util.deep_compare(self.peers, backend.endpoints)
if not changed then
return
end
-- TODO: Reset state of EWMA per backend
ngx.shared.balancer_ewma:flush_all()
ngx.shared.balancer_ewma_last_touched_at:flush_all()
end
function _M.new(self, backend)
local o = { peers = backend.endpoints }
setmetatable(o, self)
self.__index = self
return o
end
return _M

View file

@ -1,142 +1,25 @@
local resty_roundrobin = require("resty.roundrobin")
local resty_chash = require("resty.chash")
local util = require("util")
local ck = require("resty.cookie")
local _M = {}
local instances = {}
local function get_resty_balancer_nodes(endpoints)
local nodes = {}
local weight = 1
for _, endpoint in pairs(endpoints) do
local endpoint_string = endpoint.address .. ":" .. endpoint.port
nodes[endpoint_string] = weight
end
return nodes
function _M.new(self, o)
o = o or {}
setmetatable(o, self)
self.__index = self
return o
end
local function init_resty_balancer(factory, instance, endpoints)
local nodes = get_resty_balancer_nodes(endpoints)
if instance then
instance:reinit(nodes)
else
instance = factory:new(nodes)
function _M.sync(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local changed = not util.deep_compare(self.instance.nodes, nodes)
if not changed then
return
end
return instance
self.instance:reinit(nodes)
end
local function is_sticky(backend)
return backend["sessionAffinityConfig"] and backend["sessionAffinityConfig"]["name"] == "cookie"
end
local function cookie_name(backend)
return backend["sessionAffinityConfig"]["cookieSessionAffinity"]["name"] or "route"
end
local function encrypted_endpoint_string(backend, endpoint_string)
local encrypted, err
if backend["sessionAffinityConfig"]["cookieSessionAffinity"]["hash"] == "sha1" then
encrypted, err = util.sha1_digest(endpoint_string)
else
encrypted, err = util.md5_digest(endpoint_string)
end
if err ~= nil then
ngx.log(ngx.ERR, err)
end
return encrypted
end
local function set_cookie(backend, value)
local cookie, err = ck:new()
if not cookie then
ngx.log(ngx.ERR, err)
end
local ok
ok, err = cookie:set({
key = cookie_name(backend),
value = value,
path = "/",
domain = ngx.var.host,
httponly = true,
})
if not ok then
ngx.log(ngx.ERR, err)
end
end
local function pick_random(instance)
local index = math.random(instance.npoints)
return instance:next(index)
end
local function sticky_endpoint_string(instance, backend)
local cookie, err = ck:new()
if not cookie then
ngx.log(ngx.ERR, err)
return pick_random(instance)
end
local key = cookie:get(cookie_name(backend))
if not key then
local tmp_endpoint_string = pick_random(instance)
key = encrypted_endpoint_string(backend, tmp_endpoint_string)
set_cookie(backend, key)
end
return instance:find(key)
end
function _M.is_applicable(backend)
return is_sticky(backend) or backend["upstream-hash-by"] or backend["load-balance"] == "round_robin"
end
function _M.balance(backend)
local instance = instances[backend.name]
if not instance then
ngx.log(ngx.ERR, "no LB algorithm instance was found")
return nil
end
local endpoint_string
if is_sticky(backend) then
endpoint_string = sticky_endpoint_string(instance, backend)
elseif backend["upstream-hash-by"] then
local key = util.lua_ngx_var(backend["upstream-hash-by"])
endpoint_string = instance:find(key)
else
endpoint_string = instance:find()
end
local address, port = util.split_pair(endpoint_string, ":")
return { address = address, port = port }
end
function _M.sync(backend)
local instance = instances[backend.name]
local factory = resty_roundrobin
if is_sticky(backend) or backend["upstream-hash-by"] then
factory = resty_chash
end
if instance then
local mt = getmetatable(instance)
if mt.__index ~= factory then
ngx.log(ngx.INFO, "LB algorithm has been changed, resetting the instance")
instance = nil
end
end
instances[backend.name] = init_resty_balancer(factory, instance, backend.endpoints)
end
function _M.after_balance()
function _M.after_balance(_)
end
return _M

View file

@ -0,0 +1,20 @@
local balancer_resty = require("balancer.resty")
local resty_roundrobin = require("resty.roundrobin")
local util = require("util")
local _M = balancer_resty:new({ factory = resty_roundrobin, name = "round_robin" })
function _M.new(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local o = { instance = self.factory:new(nodes) }
setmetatable(o, self)
self.__index = self
return o
end
function _M.balance(self)
local endpoint_string = self.instance:find()
return util.split_pair(endpoint_string, ":")
end
return _M

View file

@ -0,0 +1,80 @@
local balancer_resty = require("balancer.resty")
local resty_chash = require("resty.chash")
local util = require("util")
local ck = require("resty.cookie")
local _M = balancer_resty:new({ factory = resty_chash, name = "sticky" })
function _M.new(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local digest_func = util.md5_digest
if backend["sessionAffinityConfig"]["cookieSessionAffinity"]["hash"] == "sha1" then
digest_func = util.sha1_digest
end
local o = {
instance = self.factory:new(nodes),
cookie_name = backend["sessionAffinityConfig"]["cookieSessionAffinity"]["name"] or "route",
digest_func = digest_func,
}
setmetatable(o, self)
self.__index = self
return o
end
local function encrypted_endpoint_string(self, endpoint_string)
local encrypted, err = self.digest_func(endpoint_string)
if err ~= nil then
ngx.log(ngx.ERR, err)
end
return encrypted
end
local function set_cookie(self, value)
local cookie, err = ck:new()
if not cookie then
ngx.log(ngx.ERR, err)
end
local ok
ok, err = cookie:set({
key = self.cookie_name,
value = value,
path = "/",
domain = ngx.var.host,
httponly = true,
})
if not ok then
ngx.log(ngx.ERR, err)
end
end
local function pick_random(instance)
local index = math.random(instance.npoints)
return instance:next(index)
end
local function sticky_endpoint_string(self)
local cookie, err = ck:new()
if not cookie then
ngx.log(ngx.ERR, err)
return pick_random(self.instance)
end
local key = cookie:get(self.cookie_name)
if not key then
local tmp_endpoint_string = pick_random(self.instance)
key = encrypted_endpoint_string(self, tmp_endpoint_string)
set_cookie(self, key)
end
return self.instance:find(key)
end
function _M.balance(self)
local endpoint_string = sticky_endpoint_string(self)
return util.split_pair(endpoint_string, ":")
end
return _M

View file

@ -1,6 +1,31 @@
local cwd = io.popen("pwd"):read('*l')
package.path = cwd .. "/rootfs/etc/nginx/lua/?.lua;" .. package.path
package.path = "./rootfs/etc/nginx/lua/?.lua;./rootfs/etc/nginx/lua/test/mocks/?.lua;" .. package.path
describe("[chash_test]", function()
-- TODO(elvinefendi) add unit tests
describe("Balancer chash", function()
local balancer_chash = require("balancer.chash")
describe("balance()", function()
it("uses correct key for given backend", function()
_G.ngx = { var = { request_uri = "/alma/armud" }}
local resty_chash = package.loaded["resty.chash"]
resty_chash.new = function(self, nodes)
return {
find = function(self, key)
assert.equal("/alma/armud", key)
return "10.184.7.40:8080"
end
}
end
local backend = {
name = "my-dummy-backend", ["upstream-hash-by"] = "$request_uri",
endpoints = { { address = "10.184.7.40", port = "8080", maxFails = 0, failTimeout = 0 } }
}
local instance = balancer_chash:new(backend)
local host, port = instance:balance()
assert.equal("10.184.7.40", host)
assert.equal("8080", port)
end)
end)
end)

View file

@ -1,269 +1,118 @@
local cwd = io.popen("pwd"):read('*l')
package.path = cwd .. "/rootfs/etc/nginx/lua/?.lua;" .. package.path
package.path = "./rootfs/etc/nginx/lua/?.lua;./rootfs/etc/nginx/lua/test/mocks/?.lua;" .. package.path
_G._TEST = true
local balancer, mock_cjson, mock_config, mock_backends, mock_lrucache, lock, mock_lock,
mock_ngx_balancer, mock_ewma
local function dict_generator(vals)
local _dict = { __index = {
get = function(self, key)
return self._vals[key]
end,
set = function(self, key, val)
self._vals[key] = val
return true, nil, false
end,
delete = function(self, key)
return self:set(key, nil)
end,
flush_all = function(self)
return
end,
_vals = vals
}
}
return setmetatable({_vals = vals}, _dict)
end
local function backend_generator(name, endpoints, lb_alg)
return {
name = name,
endpoints = endpoints,
["load-balance"] = lb_alg,
}
end
local default_endpoints = {
{address = "000.000.000", port = "8080"},
{address = "000.000.001", port = "8081"},
local _ngx = {
shared = {},
log = function(...) end,
}
_G.ngx = _ngx
local default_backends = {
mock_rr_backend = backend_generator("mock_rr_backend", default_endpoints, "round_robin"),
mock_ewma_backend = backend_generator("mock_ewma_backend", default_endpoints, "ewma"),
}
local balancer, expected_implementations, backends
local function init()
mock_cjson = {}
mock_config = {}
mock_ngx_balancer = {}
mock_ewma = {
sync = function(b) return end
}
mock_resty_balancer = {
sync = function(b) return end,
after_balance = function () return end
}
mock_backends = dict_generator(default_backends)
mock_lrucache = {
new = function () return mock_backends end
}
lock = {
lock = function() return end,
unlock = function() return end
}
mock_lock = {
new = function () return lock end
}
_G.ngx = {
shared = {
balancer_ewma = dict_generator({}),
balancer_ewma_last_touched_at = dict_generator({}),
},
var = {},
log = function() return end,
WARN = "warn",
INFO = "info",
ERR = "err",
HTTP_SERVICE_UNAVAILABLE = 503,
exit = function(status) return end,
}
package.loaded["ngx.balancer"] = mock_ngx_balancer
package.loaded["resty.lrucache"] = mock_lrucache
package.loaded["resty.string"] = {}
package.loaded["resty.sha1"] = {}
package.loaded["resty.md5"] = {}
package.loaded["resty.cookie"] = {}
package.loaded["cjson"] = mock_cjson
package.loaded["resty.lock"] = mock_lock
package.loaded["balancer.ewma"] = mock_ewma
package.loaded["balancer.resty"] = mock_resty_balancer
package.loaded["configuration"] = mock_config
local function reset_balancer()
package.loaded["balancer"] = nil
balancer = require("balancer")
end
describe("[balancer_test]", function()
setup(function()
init()
local function reset_expected_implementations()
expected_implementations = {
["access-router-production-web-80"] = package.loaded["balancer.round_robin"],
["my-dummy-app-1"] = package.loaded["balancer.round_robin"],
["my-dummy-app-2"] = package.loaded["balancer.chash"],
["my-dummy-app-3"] = package.loaded["balancer.sticky"],
["my-dummy-app-4"] = package.loaded["balancer.ewma"],
["my-dummy-app-5"] = package.loaded["balancer.sticky"],
}
end
local function reset_backends()
backends = {
{
name = "access-router-production-web-80", port = "80", secure = false,
secureCACert = { secret = "", caFilename = "", pemSha = "" },
sslPassthrough = false,
endpoints = {
{ address = "10.184.7.40", port = "8080", maxFails = 0, failTimeout = 0 },
{ address = "10.184.97.100", port = "8080", maxFails = 0, failTimeout = 0 },
{ address = "10.184.98.239", port = "8080", maxFails = 0, failTimeout = 0 },
},
sessionAffinityConfig = { name = "", cookieSessionAffinity = { name = "", hash = "" } },
},
{ name = "my-dummy-app-1", ["load-balance"] = "round_robin", },
{ name = "my-dummy-app-2", ["load-balance"] = "round_robin", ["upstream-hash-by"] = "$request_uri", },
{
name = "my-dummy-app-3", ["load-balance"] = "ewma",
sessionAffinityConfig = { name = "cookie", cookieSessionAffinity = { name = "route", hash = "sha1" } }
},
{ name = "my-dummy-app-4", ["load-balance"] = "ewma", },
{
name = "my-dummy-app-5", ["load-balance"] = "ewma", ["upstream-hash-by"] = "$request_uri",
sessionAffinityConfig = { name = "cookie", cookieSessionAffinity = { name = "route", hash = "sha1" } }
},
}
end
describe("Balancer", function()
before_each(function()
reset_balancer()
reset_expected_implementations()
reset_backends()
end)
teardown(function()
local packages = {"ngx.balancer", "resty.lrucache","cjson", "resty.lock", "balancer.ewma","configuration"}
for i, package_name in ipairs(packages) do
package.loaded[package_name] = nil
end
end)
describe("balancer.call():", function()
setup(function()
mock_ngx_balancer.set_more_tries = function () return end
mock_ngx_balancer.set_current_peer = function () return end
mock_ewma.after_balance = function () return end
describe("get_implementation()", function()
it("returns correct implementation for given backend", function()
for _, backend in pairs(backends) do
local expected_implementation = expected_implementations[backend.name]
local implementation = balancer.get_implementation(backend)
assert.equal(expected_implementation, balancer.get_implementation(backend))
end
end)
end)
describe("sync_backend()", function()
local backend, implementation
before_each(function()
_G.ngx.get_phase = nil
_G.ngx.var = {}
mock_backends._vals = default_backends
backend = backends[1]
implementation = expected_implementations[backend.name]
end)
describe("phase=log", function()
before_each(function()
_G.ngx.get_phase = function() return "log" end
end)
it("initializes balancer for given backend", function()
local s = spy.on(implementation, "new")
it("lb_alg=ewma, ewma_after_balance was called", function()
_G.ngx.var.proxy_upstream_name = "mock_ewma_backend"
local ewma_after_balance_spy = spy.on(mock_ewma, "after_balance")
mock_resty_balancer.is_applicable = function(b) return false end
assert.has_no_errors(balancer.call)
assert.spy(ewma_after_balance_spy).was_called()
end)
it("lb_alg=round_robin, ewma_after_balance was not called", function()
_G.ngx.var.proxy_upstream_name = "mock_rr_backend"
local ewma_after_balance_spy = spy.on(mock_ewma, "after_balance")
mock_resty_balancer.is_applicable = function(b) return true end
assert.has_no_errors(balancer.call)
assert.spy(ewma_after_balance_spy).was_not_called()
end)
assert.has_no.errors(function() balancer.sync_backend(backend) end)
assert.spy(s).was_called_with(implementation, backend)
end)
describe("phase=balancer", function()
before_each(function()
_G.ngx.get_phase = function() return "balancer" end
end)
it("replaces the existing balancer when load balancing config changes for backend", function()
assert.has_no.errors(function() balancer.sync_backend(backend) end)
it("lb_alg=round_robin, peer was successfully set", function()
_G.ngx.var.proxy_upstream_name = "mock_rr_backend"
backend["load-balance"] = "ewma"
local new_implementation = package.loaded["balancer.ewma"]
local backend_get_spy = spy.on(mock_backends, "get")
local set_more_tries_spy = spy.on(mock_ngx_balancer, "set_more_tries")
local set_current_peer_spy = spy.on(mock_ngx_balancer, "set_current_peer")
local s_old = spy.on(implementation, "new")
local s = spy.on(new_implementation, "new")
local s_ngx_log = spy.on(_G.ngx, "log")
mock_resty_balancer.balance = function(b) return {address = "000.000.000", port = "8080"} end
assert.has_no_errors(balancer.call)
assert.spy(backend_get_spy).was_called_with(match.is_table(), "mock_rr_backend")
assert.spy(set_more_tries_spy).was_called_with(1)
assert.spy(set_current_peer_spy).was_called_with("000.000.000", "8080")
mock_backends.get:clear()
mock_ngx_balancer.set_more_tries:clear()
mock_ngx_balancer.set_current_peer:clear()
mock_resty_balancer.balance = function(b) return {address = "000.000.001", port = "8081"} end
assert.has_no_errors(balancer.call)
assert.spy(backend_get_spy).was_called_with(match.is_table(), "mock_rr_backend")
assert.spy(set_more_tries_spy).was_called_with(1)
assert.spy(set_current_peer_spy).was_called_with("000.000.001", "8081")
end)
it("lb_alg=ewma, peer was successfully set", function()
_G.ngx.var.proxy_upstream_name = "mock_ewma_backend"
mock_ewma.balance = function(b) return {address = "000.000.111", port = "8083"} end
local backend_get_spy = spy.on(mock_backends, "get")
local set_more_tries_spy = spy.on(mock_ngx_balancer, "set_more_tries")
local set_current_peer_spy = spy.on(mock_ngx_balancer, "set_current_peer")
mock_resty_balancer.is_applicable = function(b) return false end
assert.has_no_errors(balancer.call)
assert.spy(backend_get_spy).was_called_with(match.is_table(), "mock_ewma_backend")
assert.spy(set_more_tries_spy).was_called_with(1)
assert.spy(set_current_peer_spy).was_called_with("000.000.111", "8083")
end)
it("fails when no backend exists", function()
_G.ngx.var.proxy_upstream_name = "mock_rr_backend"
mock_backends._vals = {}
local backend_get_spy = spy.on(mock_backends, "get")
local set_current_peer_spy = spy.on(mock_ngx_balancer, "set_current_peer")
assert.has_no_errors(balancer.call)
assert.are_equal(ngx.status, 503)
assert.spy(backend_get_spy).was_called_with(match.is_table(), "mock_rr_backend")
assert.spy(set_current_peer_spy).was_not_called()
end)
assert.has_no.errors(function() balancer.sync_backend(backend) end)
assert.spy(s_ngx_log).was_called_with(ngx.ERR,
"LB algorithm changed from round_robin to ewma, resetting the instance")
-- TODO(elvinefendi) figure out why
-- assert.spy(s).was_called_with(new_implementation, backend) does not work here
assert.spy(s).was_called(1)
assert.spy(s_old).was_not_called()
end)
describe("not in phase log or balancer", function()
it("returns errors", function()
_G.ngx.get_phase = function() return "nope" end
assert.has_error(balancer.call, "must be called in balancer or log, but was called in: nope")
end)
end)
end)
it("calls sync(backend) on existing balancer instance when load balancing config does not change", function()
local mock_instance = { sync = function(...) end }
setmetatable(mock_instance, implementation)
implementation.new = function(self, backend) return mock_instance end
assert.has_no.errors(function() balancer.sync_backend(backend) end)
describe("balancer.init_worker():", function()
setup(function()
_G.ngx.timer = {
every = function(interval, func) return func() end
}
mock_cjson.decode = function(x) return x end
end)
stub(mock_instance, "sync")
before_each(function()
mock_backends._vals = default_backends
end)
describe("sync_backends():", function()
it("succeeds when no sync is required", function()
mock_config.get_backends_data = function() return default_backends end
local backend_set_spy = spy.on(mock_backends, "set")
assert.has_no_errors(balancer.init_worker)
assert.spy(backend_set_spy).was_not_called()
end)
it("lb_alg=round_robin, updates backend when sync is required", function()
mock_config.get_backends_data = function() return { default_backends.mock_rr_backend } end
mock_backends._vals = {}
local backend_set_spy = spy.on(mock_backends, "set")
local ewma_flush_spy = spy.on(_G.ngx.shared.balancer_ewma, "flush_all")
local ewma_lta_flush_spy = spy.on(_G.ngx.shared.balancer_ewma_last_touched_at, "flush_all")
mock_resty_balancer.balance = function(b) return {address = "000.000.000", port = "8080"} end
mock_resty_balancer.reinit = function(b) return end
assert.has_no_errors(balancer.init_worker)
assert.spy(backend_set_spy)
.was_called_with(match.is_table(), default_backends.mock_rr_backend.name, match.is_table())
assert.spy(ewma_flush_spy).was_not_called()
assert.spy(ewma_lta_flush_spy).was_not_called()
end)
it("lb_alg=ewma, updates backend when sync is required", function()
_G.ngx.var.proxy_upstream_name = "mock_ewma_backend"
mock_config.get_backends_data = function() return { default_backends.mock_ewma_backend } end
mock_backends._vals = {}
local backend_set_spy = spy.on(mock_backends, "set")
local ewma_sync_spy = spy.on(mock_ewma, "sync")
mock_resty_balancer.is_applicable = function(b) return false end
assert.has_no_errors(balancer.init_worker)
assert.spy(backend_set_spy)
.was_called_with(match.is_table(), default_backends.mock_ewma_backend.name, match.is_table())
assert.spy(ewma_sync_spy).was_called()
end)
assert.has_no.errors(function() balancer.sync_backend(backend) end)
assert.stub(mock_instance.sync).was_called_with(mock_instance, backend)
end)
end)
end)

View file

@ -0,0 +1 @@
return {}

View file

@ -0,0 +1,3 @@
return {
new = function(self, nodes) return {} end
}

View file

@ -0,0 +1 @@
return {}

View file

@ -0,0 +1,3 @@
return {
new = function(self, name, ...) return {} end,
}

View file

@ -0,0 +1 @@
return {}

View file

@ -0,0 +1,3 @@
return {
new = function(self, nodes) return {} end
}

View file

@ -0,0 +1 @@
return {}

View file

@ -0,0 +1 @@
return {}

View file

@ -14,9 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
if luarocks list --porcelain busted $BUSTED_VERSION | grep -q "installed"; then
echo busted already installed, skipping ;
else
echo busted not found, installing via luarocks...;
sudo luarocks install busted $BUSTED_VERSION;
fi
install()
{
package="$1"
version="$2"
if luarocks list --porcelain $package $version | grep -q "installed"; then
echo $package already installed, skipping ;
else
sudo luarocks install $package $version;
fi
}
install busted 2.0.rc12
install lua-cjson 2.1.0-1

View file

@ -6,6 +6,18 @@ local resty_md5 = require("resty.md5")
local _M = {}
function _M.get_nodes(endpoints)
local nodes = {}
local weight = 1
for _, endpoint in pairs(endpoints) do
local endpoint_string = endpoint.address .. ":" .. endpoint.port
nodes[endpoint_string] = weight
end
return nodes
end
local function hash_digest(hash_factory, message)
local hash = hash_factory:new()
if not hash then