diff --git a/docs/examples/affinity/cookie/README.md b/docs/examples/affinity/cookie/README.md index 5ee8855d0..ee6684bde 100644 --- a/docs/examples/affinity/cookie/README.md +++ b/docs/examples/affinity/cookie/README.md @@ -9,6 +9,7 @@ Session affinity can be configured using the following annotations: |Name|Description|Value| | --- | --- | --- | |nginx.ingress.kubernetes.io/affinity|Type of the affinity, set this to `cookie` to enable session affinity|string (NGINX only supports `cookie`)| +|nginx.ingress.kubernetes.io/affinity-mode|The affinity mode defines how sticky a session is. Use `balanced` to redistribute some sessions when scaling pods or `persistent` for maximum stickyness.|`balanced` (default) or `persistent`| |nginx.ingress.kubernetes.io/session-cookie-name|Name of the cookie that will be created|string (defaults to `INGRESSCOOKIE`)| |nginx.ingress.kubernetes.io/session-cookie-path|Path that will be set on the cookie (required if your [Ingress paths][ingress-paths] use regular expressions)|string (defaults to the currently [matched path][ingress-paths])| |nginx.ingress.kubernetes.io/session-cookie-max-age|Time until the cookie expires, corresponds to the `Max-Age` cookie directive|number of seconds| diff --git a/docs/user-guide/nginx-configuration/annotations.md b/docs/user-guide/nginx-configuration/annotations.md index c240a114c..16a1a7603 100755 --- a/docs/user-guide/nginx-configuration/annotations.md +++ b/docs/user-guide/nginx-configuration/annotations.md @@ -17,6 +17,7 @@ You can add these Kubernetes annotations to specific Ingress objects to customiz |---------------------------|------| |[nginx.ingress.kubernetes.io/app-root](#rewrite)|string| |[nginx.ingress.kubernetes.io/affinity](#session-affinity)|cookie| +|[nginx.ingress.kubernetes.io/affinity-mode](#session-affinity)|"balanced" or "persistent"| |[nginx.ingress.kubernetes.io/auth-realm](#authentication)|string| |[nginx.ingress.kubernetes.io/auth-secret](#authentication)|string| |[nginx.ingress.kubernetes.io/auth-secret-type](#authentication)|string| @@ -152,6 +153,8 @@ If the Application Root is exposed in a different path and needs to be redirecte The annotation `nginx.ingress.kubernetes.io/affinity` enables and sets the affinity type in all Upstreams of an Ingress. This way, a request will always be directed to the same upstream server. The only affinity type available for NGINX is `cookie`. +The annotation `nginx.ingress.kubernetes.io/affinity-mode` defines the stickyness of a session. Setting this to `balanced` (default) will redistribute some sessions if a deployment gets scaled up, therefore rebalancing the load on the servers. Setting this to `persistent` will not rebalance sessions to new servers, therefore providing maximum stickyness. + !!! attention If more than one Ingress is defined for a host and at least one Ingress uses `nginx.ingress.kubernetes.io/affinity: cookie`, then only paths on the Ingress using `nginx.ingress.kubernetes.io/affinity` will use session cookie affinity. All paths defined on other Ingresses for the host will be load balanced through the random selection of a backend server. diff --git a/internal/ingress/annotations/annotations_test.go b/internal/ingress/annotations/annotations_test.go index 69d90fd0c..a496cd9ba 100644 --- a/internal/ingress/annotations/annotations_test.go +++ b/internal/ingress/annotations/annotations_test.go @@ -33,6 +33,7 @@ var ( annotationSecureVerifyCACert = parser.GetAnnotationWithPrefix("secure-verify-ca-secret") annotationPassthrough = parser.GetAnnotationWithPrefix("ssl-passthrough") annotationAffinityType = parser.GetAnnotationWithPrefix("affinity") + annotationAffinityMode = parser.GetAnnotationWithPrefix("affinity-mode") annotationCorsEnabled = parser.GetAnnotationWithPrefix("enable-cors") annotationCorsAllowMethods = parser.GetAnnotationWithPrefix("cors-allow-methods") annotationCorsAllowHeaders = parser.GetAnnotationWithPrefix("cors-allow-headers") @@ -199,13 +200,14 @@ func TestAffinitySession(t *testing.T) { fooAnns := []struct { annotations map[string]string affinitytype string + affinitymode string name string }{ - {map[string]string{annotationAffinityType: "cookie", annotationAffinityCookieName: "route"}, "cookie", "route"}, - {map[string]string{annotationAffinityType: "cookie", annotationAffinityCookieName: "route1"}, "cookie", "route1"}, - {map[string]string{annotationAffinityType: "cookie", annotationAffinityCookieName: ""}, "cookie", "INGRESSCOOKIE"}, - {map[string]string{}, "", ""}, - {nil, "", ""}, + {map[string]string{annotationAffinityType: "cookie", annotationAffinityMode: "balanced", annotationAffinityCookieName: "route"}, "cookie", "balanced", "route"}, + {map[string]string{annotationAffinityType: "cookie", annotationAffinityMode: "persistent", annotationAffinityCookieName: "route1"}, "cookie", "persistent", "route1"}, + {map[string]string{annotationAffinityType: "cookie", annotationAffinityMode: "balanced", annotationAffinityCookieName: ""}, "cookie", "balanced", "INGRESSCOOKIE"}, + {map[string]string{}, "", "", ""}, + {nil, "", "", ""}, } for _, foo := range fooAnns { @@ -213,6 +215,10 @@ func TestAffinitySession(t *testing.T) { r := ec.Extract(ing).SessionAffinity t.Logf("Testing pass %v %v", foo.affinitytype, foo.name) + if r.Mode != foo.affinitymode { + t.Errorf("Returned %v but expected %v for Name", r.Mode, foo.affinitymode) + } + if r.Cookie.Name != foo.name { t.Errorf("Returned %v but expected %v for Name", r.Cookie.Name, foo.name) } diff --git a/internal/ingress/annotations/sessionaffinity/main.go b/internal/ingress/annotations/sessionaffinity/main.go index 417f625b0..d6ce54f5f 100644 --- a/internal/ingress/annotations/sessionaffinity/main.go +++ b/internal/ingress/annotations/sessionaffinity/main.go @@ -28,6 +28,7 @@ import ( const ( annotationAffinityType = "affinity" + annotationAffinityMode = "affinity-mode" // If a cookie with this name exists, // its value is used as an index into the list of available backends. annotationAffinityCookieName = "session-cookie-name" @@ -57,6 +58,8 @@ var ( type Config struct { // The type of affinity that will be used Type string `json:"type"` + // The affinity mode, i.e. how sticky a session is + Mode string `json:"mode"` Cookie } @@ -136,6 +139,12 @@ func (a affinity) Parse(ing *networking.Ingress) (interface{}, error) { at = "" } + // Check the afinity mode that will be used + am, err := parser.GetStringAnnotation(annotationAffinityMode, ing) + if err != nil { + am = "" + } + switch at { case "cookie": cookie = a.cookieAffinityParse(ing) @@ -146,6 +155,7 @@ func (a affinity) Parse(ing *networking.Ingress) (interface{}, error) { return &Config{ Type: at, + Mode: am, Cookie: *cookie, }, nil } diff --git a/internal/ingress/annotations/sessionaffinity/main_test.go b/internal/ingress/annotations/sessionaffinity/main_test.go index bdb038e0d..29c514047 100644 --- a/internal/ingress/annotations/sessionaffinity/main_test.go +++ b/internal/ingress/annotations/sessionaffinity/main_test.go @@ -67,6 +67,7 @@ func TestIngressAffinityCookieConfig(t *testing.T) { data := map[string]string{} data[parser.GetAnnotationWithPrefix(annotationAffinityType)] = "cookie" + data[parser.GetAnnotationWithPrefix(annotationAffinityMode)] = "balanced" data[parser.GetAnnotationWithPrefix(annotationAffinityCookieName)] = "INGRESSCOOKIE" data[parser.GetAnnotationWithPrefix(annotationAffinityCookieExpires)] = "4500" data[parser.GetAnnotationWithPrefix(annotationAffinityCookieMaxAge)] = "3000" @@ -84,6 +85,10 @@ func TestIngressAffinityCookieConfig(t *testing.T) { t.Errorf("expected cookie as affinity but returned %v", nginxAffinity.Type) } + if nginxAffinity.Mode != "balanced" { + t.Errorf("expected balanced as affinity mode but returned %v", nginxAffinity.Mode) + } + if nginxAffinity.Cookie.Name != "INGRESSCOOKIE" { t.Errorf("expected INGRESSCOOKIE as session-cookie-name but returned %v", nginxAffinity.Cookie.Name) } diff --git a/internal/ingress/controller/controller.go b/internal/ingress/controller/controller.go index 34889226e..a3ce13a6f 100644 --- a/internal/ingress/controller/controller.go +++ b/internal/ingress/controller/controller.go @@ -571,6 +571,10 @@ func (n *NGINXController) getBackendServers(ingresses []*ingress.Ingress) ([]*in ups.SessionAffinity.AffinityType = anns.SessionAffinity.Type } + if ups.SessionAffinity.AffinityMode == "" { + ups.SessionAffinity.AffinityMode = anns.SessionAffinity.Mode + } + if anns.SessionAffinity.Type == "cookie" { cookiePath := anns.SessionAffinity.Cookie.Path if anns.Rewrite.UseRegex && cookiePath == "" { diff --git a/internal/ingress/types.go b/internal/ingress/types.go index c87beb49d..9dd58a4f7 100644 --- a/internal/ingress/types.go +++ b/internal/ingress/types.go @@ -141,6 +141,7 @@ func (s Backend) HashInclude(field string, v interface{}) (bool, error) { // +k8s:deepcopy-gen=true type SessionAffinityConfig struct { AffinityType string `json:"name"` + AffinityMode string `json:"mode"` CookieSessionAffinity CookieSessionAffinity `json:"cookieSessionAffinity"` } diff --git a/internal/ingress/types_equals.go b/internal/ingress/types_equals.go index d13dbe080..88815c1a4 100644 --- a/internal/ingress/types_equals.go +++ b/internal/ingress/types_equals.go @@ -152,6 +152,9 @@ func (sac1 *SessionAffinityConfig) Equal(sac2 *SessionAffinityConfig) bool { if sac1.AffinityType != sac2.AffinityType { return false } + if sac1.AffinityMode != sac2.AffinityMode { + return false + } if !(&sac1.CookieSessionAffinity).Equal(&sac2.CookieSessionAffinity) { return false } diff --git a/rootfs/etc/nginx/lua/balancer.lua b/rootfs/etc/nginx/lua/balancer.lua index ec8eb194b..b23ee4e15 100644 --- a/rootfs/etc/nginx/lua/balancer.lua +++ b/rootfs/etc/nginx/lua/balancer.lua @@ -6,7 +6,8 @@ local configuration = require("configuration") local round_robin = require("balancer.round_robin") local chash = require("balancer.chash") local chashsubset = require("balancer.chashsubset") -local sticky = require("balancer.sticky") +local sticky_balanced = require("balancer.sticky_balanced") +local sticky_persistent = require("balancer.sticky_persistent") local ewma = require("balancer.ewma") -- measured in seconds @@ -19,7 +20,8 @@ local IMPLEMENTATIONS = { round_robin = round_robin, chash = chash, chashsubset = chashsubset, - sticky = sticky, + sticky_balanced = sticky_balanced, + sticky_persistent = sticky_persistent, ewma = ewma, } @@ -30,7 +32,11 @@ local function get_implementation(backend) local name = backend["load-balance"] or DEFAULT_LB_ALG if backend["sessionAffinityConfig"] and backend["sessionAffinityConfig"]["name"] == "cookie" then - name = "sticky" + if backend["sessionAffinityConfig"]["mode"] == 'persistent' then + name = "sticky_persistent" + else + name = "sticky_balanced" + end elseif backend["upstreamHashByConfig"] and backend["upstreamHashByConfig"]["upstream-hash-by"] then if backend["upstreamHashByConfig"]["upstream-hash-by-subset"] then name = "chashsubset" diff --git a/rootfs/etc/nginx/lua/balancer/sticky.lua b/rootfs/etc/nginx/lua/balancer/sticky.lua index 366ee6d32..a5570911c 100644 --- a/rootfs/etc/nginx/lua/balancer/sticky.lua +++ b/rootfs/etc/nginx/lua/balancer/sticky.lua @@ -1,43 +1,38 @@ local balancer_resty = require("balancer.resty") -local resty_chash = require("resty.chash") -local util = require("util") local ck = require("resty.cookie") -local math = require("math") local ngx_balancer = require("ngx.balancer") local split = require("util.split") -local string_format = string.format -local ngx_log = ngx.log -local INFO = ngx.INFO - -local _M = balancer_resty:new({ factory = resty_chash, name = "sticky" }) +local _M = balancer_resty:new() local DEFAULT_COOKIE_NAME = "route" --- Consider the situation of N upstreams one of which is failing. --- Then the probability to obtain failing upstream after M iterations would be close to (1/N)**M. --- For the worst case (2 upstreams; 20 iterations) it would be ~10**(-6) --- which is much better then ~10**(-3) for 10 iterations. -local MAX_UPSTREAM_CHECKS_COUNT = 20 - function _M.cookie_name(self) return self.cookie_session_affinity.name or DEFAULT_COOKIE_NAME end -function _M.new(self, backend) - local nodes = util.get_nodes(backend.endpoints) - +function _M.new(self) local o = { - instance = self.factory:new(nodes), - traffic_shaping_policy = backend.trafficShapingPolicy, - alternative_backends = backend.alternativeBackends, - cookie_session_affinity = backend["sessionAffinityConfig"]["cookieSessionAffinity"] + alternative_backends = nil, + cookie_session_affinity = nil, + traffic_shaping_policy = nil } + setmetatable(o, self) self.__index = self + return o end -local function set_cookie(self, value) +function _M.get_cookie(self) + local cookie, err = ck:new() + if not cookie then + ngx.log(ngx.ERR, err) + end + + return cookie:get(self:cookie_name()) +end + +function _M.set_cookie(self, value) local cookie, err = ck:new() if not cookie then ngx.log(ngx.ERR, err) @@ -86,22 +81,6 @@ local function get_failed_upstreams() return indexed_upstream_addrs end -local function pick_new_upstream(self) - local failed_upstreams = get_failed_upstreams() - - for i = 1, MAX_UPSTREAM_CHECKS_COUNT do - local key = string.format("%s.%s.%s", ngx.now() + i, ngx.worker.pid(), math.random(999999)) - - local new_upstream = self.instance:find(key) - - if not failed_upstreams[new_upstream] then - return new_upstream, key - end - end - - return nil, nil -end - local function should_set_cookie(self) if self.cookie_session_affinity.locations and ngx.var.host then local locs = self.cookie_session_affinity.locations[ngx.var.host] @@ -128,15 +107,9 @@ local function should_set_cookie(self) end function _M.balance(self) - local cookie, err = ck:new() - if not cookie then - ngx.log(ngx.ERR, "error while initializing cookie: " .. tostring(err)) - return - end - local upstream_from_cookie - local key = cookie:get(self:cookie_name()) + local key = self:get_cookie() if key then upstream_from_cookie = self.instance:find(key) end @@ -151,30 +124,22 @@ function _M.balance(self) local new_upstream - new_upstream, key = pick_new_upstream(self) + new_upstream, key = self:pick_new_upstream(get_failed_upstreams()) if not new_upstream then ngx.log(ngx.WARN, string.format("failed to get new upstream; using upstream %s", new_upstream)) elseif should_set_cookie(self) then - set_cookie(self, key) + self:set_cookie(key) end return new_upstream end function _M.sync(self, backend) + -- reload balancer nodes balancer_resty.sync(self, backend) - -- Reload the balancer if any of the annotations have changed. - local changed = not util.deep_compare( - self.cookie_session_affinity, - backend.sessionAffinityConfig.cookieSessionAffinity - ) - if not changed then - return - end - - ngx_log(INFO, string_format("[%s] nodes have changed for backend %s", self.name, backend.name)) - + self.traffic_shaping_policy = backend.trafficShapingPolicy + self.alternative_backends = backend.alternativeBackends self.cookie_session_affinity = backend.sessionAffinityConfig.cookieSessionAffinity end diff --git a/rootfs/etc/nginx/lua/balancer/sticky_balanced.lua b/rootfs/etc/nginx/lua/balancer/sticky_balanced.lua new file mode 100644 index 000000000..ce0018158 --- /dev/null +++ b/rootfs/etc/nginx/lua/balancer/sticky_balanced.lua @@ -0,0 +1,49 @@ +-- An affinity mode which makes sure connections are rebalanced when a deployment is scaled. +-- The advantage of this mode is that the load on the pods will be redistributed. +-- The drawback of this mode is that, when scaling up a deployment, roughly (n-c)/n users +-- will lose their session, where c is the current number of pods and n is the new number of +-- pods. +-- +local balancer_sticky = require("balancer.sticky") +local math_random = require("math").random +local resty_chash = require("resty.chash") +local util_get_nodes = require("util").get_nodes + +local _M = balancer_sticky:new() + +-- Consider the situation of N upstreams one of which is failing. +-- Then the probability to obtain failing upstream after M iterations would be close to (1/N)**M. +-- For the worst case (2 upstreams; 20 iterations) it would be ~10**(-6) +-- which is much better then ~10**(-3) for 10 iterations. +local MAX_UPSTREAM_CHECKS_COUNT = 20 + +function _M.new(self, backend) + local nodes = util_get_nodes(backend.endpoints) + + local o = { + name = "sticky_balanced", + instance = resty_chash:new(nodes) + } + + setmetatable(o, self) + self.__index = self + + balancer_sticky.sync(o, backend) + + return o +end + +function _M.pick_new_upstream(self, failed_upstreams) + for i = 1, MAX_UPSTREAM_CHECKS_COUNT do + local key = string.format("%s.%s.%s", ngx.now() + i, ngx.worker.pid(), math_random(999999)) + local new_upstream = self.instance:find(key) + + if not failed_upstreams[new_upstream] then + return new_upstream, key + end + end + + return nil, nil +end + +return _M diff --git a/rootfs/etc/nginx/lua/balancer/sticky_persistent.lua b/rootfs/etc/nginx/lua/balancer/sticky_persistent.lua new file mode 100644 index 000000000..a4a6f0da2 --- /dev/null +++ b/rootfs/etc/nginx/lua/balancer/sticky_persistent.lua @@ -0,0 +1,33 @@ +-- An affinity mode which makes sure a session is always routed to the same endpoint. +-- The advantage of this mode is that a user will never lose his session. +-- The drawback of this mode is that when scaling up a deployment, sessions will not +-- be rebalanced. +-- +local balancer_sticky = require("balancer.sticky") +local util_get_nodes = require("util").get_nodes +local util_nodemap = require("util.nodemap") + +local _M = balancer_sticky:new() + +function _M.new(self, backend) + local nodes = util_get_nodes(backend.endpoints) + local hash_salt = backend["name"] + + local o = { + name = "sticky_persistent", + instance = util_nodemap:new(nodes, hash_salt) + } + + setmetatable(o, self) + self.__index = self + + balancer_sticky.sync(o, backend) + + return o +end + +function _M.pick_new_upstream(self, failed_upstreams) + return self.instance:random_except(failed_upstreams) +end + +return _M diff --git a/rootfs/etc/nginx/lua/test/balancer/sticky_test.lua b/rootfs/etc/nginx/lua/test/balancer/sticky_test.lua index 0c8b3297d..53f1c1ea7 100644 --- a/rootfs/etc/nginx/lua/test/balancer/sticky_test.lua +++ b/rootfs/etc/nginx/lua/test/balancer/sticky_test.lua @@ -1,4 +1,5 @@ -local sticky = require("balancer.sticky") +local sticky_balanced = require("balancer.sticky_balanced") +local sticky_persistent = require("balancer.sticky_persistent") local cookie = require("resty.cookie") local util = require("util") @@ -15,11 +16,16 @@ local function reset_ngx() end function get_mocked_cookie_new() + local o = { value = nil } + local mock = { + get = function(self, n) return self.value end, + set = function(self, c) self.value = c.value ; return true, nil end + } + setmetatable(o, mock) + mock.__index = mock + return function(self) - return { - get = function(self, n) return nil, "error" end, - set = function(self, n) return true, "" end - } + return o; end end @@ -52,21 +58,27 @@ describe("Sticky", function() describe("new(backend)", function() context("when backend specifies cookie name", function() - it("returns an instance containing the corresponding cookie name", function() + local function test(sticky) local sticky_balancer_instance = sticky:new(test_backend) local test_backend_cookie_name = test_backend.sessionAffinityConfig.cookieSessionAffinity.name assert.equal(sticky_balancer_instance:cookie_name(), test_backend_cookie_name) - end) + end + + it("returns an instance containing the corresponding cookie name", function() test(sticky_balanced) end) + it("returns an instance containing the corresponding cookie name", function() test(sticky_persistent) end) end) context("when backend does not specify cookie name", function() - it("returns an instance with 'route' as cookie name", function() + local function test(sticky) local temp_backend = util.deepcopy(test_backend) temp_backend.sessionAffinityConfig.cookieSessionAffinity.name = nil local sticky_balancer_instance = sticky:new(temp_backend) local default_cookie_name = "route" assert.equal(sticky_balancer_instance:cookie_name(), default_cookie_name) - end) + end + + it("returns an instance with 'route' as cookie name", function() test(sticky_balanced) end) + it("returns an instance with 'route' as cookie name", function() test(sticky_persistent) end) end) end) @@ -74,8 +86,10 @@ describe("Sticky", function() local mocked_cookie_new = cookie.new before_each(function() - package.loaded["balancer.sticky"] = nil - sticky = require("balancer.sticky") + package.loaded["balancer.sticky_balanced"] = nil + package.loaded["balancer.sticky_persistent"] = nil + sticky_balanced = require("balancer.sticky_balanced") + sticky_persistent = require("balancer.sticky_persistent") end) after_each(function() @@ -83,13 +97,17 @@ describe("Sticky", function() end) context("when client doesn't have a cookie set and location is in cookie_locations", function() - it("picks an endpoint for the client", function() + + local function test_pick_endpoint(sticky) local sticky_balancer_instance = sticky:new(test_backend) local peer = sticky_balancer_instance:balance() - assert.equal(peer, test_backend_endpoint) - end) + assert.equal(test_backend_endpoint, peer) + end - it("sets a cookie on the client", function() + it("picks an endpoint for the client", function() test_pick_endpoint(sticky_balanced) end) + it("picks an endpoint for the client", function() test_pick_endpoint(sticky_persistent) end) + + local function test_set_cookie(sticky) local s = {} cookie.new = function(self) local cookie_instance = { @@ -112,9 +130,12 @@ describe("Sticky", function() local sticky_balancer_instance = sticky:new(b) assert.has_no.errors(function() sticky_balancer_instance:balance() end) assert.spy(s).was_called() - end) + end - it("sets a secure cookie on the client when being in ssl mode", function() + it("sets a cookie on the client", function() test_set_cookie(sticky_balanced) end) + it("sets a cookie on the client", function() test_set_cookie(sticky_persistent) end) + + local function test_set_ssl_cookie(sticky) ngx.var.https = "on" local s = {} cookie.new = function(self) @@ -138,10 +159,18 @@ describe("Sticky", function() local sticky_balancer_instance = sticky:new(b) assert.has_no.errors(function() sticky_balancer_instance:balance() end) assert.spy(s).was_called() + end + + it("sets a secure cookie on the client when being in ssl mode", function() + test_set_ssl_cookie(sticky_balanced) + end) + it("sets a secure cookie on the client when being in ssl mode", function() + test_set_ssl_cookie(sticky_persistent) end) end) - context("when client doesn't have a cookie set and cookie_locations contains a matching wildcard location", function() + context("when client doesn't have a cookie set and cookie_locations contains a matching wildcard location", + function() before_each(function () ngx.var.host = "dev.test.com" end) @@ -149,7 +178,7 @@ describe("Sticky", function() ngx.var.host = "test.com" end) - it("sets a cookie on the client", function() + local function test(sticky) local s = {} cookie.new = function(self) local cookie_instance = { @@ -173,17 +202,24 @@ describe("Sticky", function() local sticky_balancer_instance = sticky:new(b) assert.has_no.errors(function() sticky_balancer_instance:balance() end) assert.spy(s).was_called() - end) + end + + it("sets a cookie on the client", function() test(sticky_balanced) end) + it("sets a cookie on the client", function() test(sticky_persistent) end) end) context("when client doesn't have a cookie set and location not in cookie_locations", function() - it("picks an endpoint for the client", function() + + local function test_pick_endpoint(sticky) local sticky_balancer_instance = sticky:new(test_backend) local peer = sticky_balancer_instance:balance() assert.equal(peer, test_backend_endpoint) - end) + end - it("does not set a cookie on the client", function() + it("picks an endpoint for the client", function() test_pick_endpoint(sticky_balanced) end) + it("picks an endpoint for the client", function() test_pick_endpoint(sticky_persistent) end) + + local function test_no_cookie(sticky) local s = {} cookie.new = function(self) local cookie_instance = { @@ -202,11 +238,15 @@ describe("Sticky", function() local sticky_balancer_instance = sticky:new(get_test_backend()) assert.has_no.errors(function() sticky_balancer_instance:balance() end) assert.spy(s).was_not_called() - end) + end + + it("does not set a cookie on the client", function() test_no_cookie(sticky_balanced) end) + it("does not set a cookie on the client", function() test_no_cookie(sticky_persistent) end) end) context("when client has a cookie set", function() - it("does not set a cookie", function() + + local function test_no_cookie(sticky) local s = {} cookie.new = function(self) local return_obj = { @@ -219,13 +259,19 @@ describe("Sticky", function() local sticky_balancer_instance = sticky:new(test_backend) assert.has_no.errors(function() sticky_balancer_instance:balance() end) assert.spy(s).was_not_called() - end) + end - it("returns the correct endpoint for the client", function() + it("does not set a cookie", function() test_no_cookie(sticky_balanced) end) + it("does not set a cookie", function() test_no_cookie(sticky_persistent) end) + + local function test_correct_endpoint(sticky) local sticky_balancer_instance = sticky:new(test_backend) local peer = sticky_balancer_instance:balance() assert.equal(peer, test_backend_endpoint) - end) + end + + it("returns the correct endpoint for the client", function() test_correct_endpoint(sticky_balanced) end) + it("returns the correct endpoint for the client", function() test_correct_endpoint(sticky_persistent) end) end) end) @@ -238,7 +284,12 @@ describe("Sticky", function() }, sessionAffinityConfig = { name = "cookie", - cookieSessionAffinity = { name = "test_name", hash = "sha1", change_on_failure = change_on_failure } + cookieSessionAffinity = { + name = "test_name", + hash = "sha1", + change_on_failure = change_on_failure, + locations = { ['test.com'] = {'/'} } + } }, } end @@ -247,53 +298,58 @@ describe("Sticky", function() local mocked_cookie_new = cookie.new before_each(function() - package.loaded["balancer.sticky"] = nil - sticky = require("balancer.sticky") + package.loaded["balancer.sticky_balanced"] = nil + package.loaded["balancer.sticky_persistent"] = nil + sticky_balanced = require("balancer.sticky_balanced") + sticky_persistent = require("balancer.sticky_persistent") + mock_ngx({ var = { location_path = "/", host = "test.com" } }) end) after_each(function() - cookie.new = mocked_cookie_new + reset_ngx() end) context("when request to upstream fails", function() + + local function test(sticky, change_on_failure) + local sticky_balancer_instance = sticky:new(get_several_test_backends(change_on_failure)) + + local old_upstream = sticky_balancer_instance:balance() + assert.is.Not.Nil(old_upstream) + for _ = 1, 100 do + -- make sure upstream doesn't change on subsequent calls of balance() + assert.equal(old_upstream, sticky_balancer_instance:balance()) + end + + -- simulate request failure + sticky_balancer_instance.get_last_failure = function() + return "failed" + end + _G.ngx.var.upstream_addr = old_upstream + + for _ = 1, 100 do + local new_upstream = sticky_balancer_instance:balance() + if change_on_failure == false then + -- upstream should be the same inspite of error, if change_on_failure option is false + assert.equal(new_upstream, old_upstream) + else + -- upstream should change after error, if change_on_failure option is true + assert.not_equal(new_upstream, old_upstream) + end + end + end + it("changes upstream when change_on_failure option is true", function() - -- create sticky cookie - cookie.new = function(self) - local return_obj = { - set = function(v) return false, nil end, - get = function(k) return "" end, - } - return return_obj, false - end - - local options = {false, true} - - for _, option in ipairs(options) do - local sticky_balancer_instance = sticky:new(get_several_test_backends(option)) - - local old_upstream = sticky_balancer_instance:balance() - for _ = 1, 100 do - -- make sure upstream doesn't change on subsequent calls of balance() - assert.equal(old_upstream, sticky_balancer_instance:balance()) - end - - -- simulate request failure - sticky_balancer_instance.get_last_failure = function() - return "failed" - end - _G.ngx.var = { upstream_addr = old_upstream } - - for _ = 1, 100 do - local new_upstream = sticky_balancer_instance:balance() - if option == false then - -- upstream should be the same inspite of error, if change_on_failure option is false - assert.equal(new_upstream, old_upstream) - else - -- upstream should change after error, if change_on_failure option is true - assert.not_equal(new_upstream, old_upstream) - end - end - end + test(sticky_balanced, true) + end) + it("changes upstream when change_on_failure option is true", function() + test(sticky_balanced, false) + end) + it("changes upstream when change_on_failure option is true", function() + test(sticky_persistent, true) + end) + it("changes upstream when change_on_failure option is true", function() + test(sticky_persistent, false) end) end) end) diff --git a/rootfs/etc/nginx/lua/test/balancer_test.lua b/rootfs/etc/nginx/lua/test/balancer_test.lua index 2851f5853..a3a004896 100644 --- a/rootfs/etc/nginx/lua/test/balancer_test.lua +++ b/rootfs/etc/nginx/lua/test/balancer_test.lua @@ -23,9 +23,9 @@ local function reset_expected_implementations() ["access-router-production-web-80"] = package.loaded["balancer.round_robin"], ["my-dummy-app-1"] = package.loaded["balancer.round_robin"], ["my-dummy-app-2"] = package.loaded["balancer.chash"], - ["my-dummy-app-3"] = package.loaded["balancer.sticky"], + ["my-dummy-app-3"] = package.loaded["balancer.sticky_persistent"], ["my-dummy-app-4"] = package.loaded["balancer.ewma"], - ["my-dummy-app-5"] = package.loaded["balancer.sticky"], + ["my-dummy-app-5"] = package.loaded["balancer.sticky_balanced"] } end @@ -55,7 +55,7 @@ local function reset_backends() }, { name = "my-dummy-app-3", ["load-balance"] = "ewma", - sessionAffinityConfig = { name = "cookie", cookieSessionAffinity = { name = "route" } } + sessionAffinityConfig = { name = "cookie", mode = 'persistent', cookieSessionAffinity = { name = "route" } } }, { name = "my-dummy-app-4", ["load-balance"] = "ewma", }, { diff --git a/rootfs/etc/nginx/lua/test/util/nodemap_test.lua b/rootfs/etc/nginx/lua/test/util/nodemap_test.lua new file mode 100644 index 000000000..f012bb7ee --- /dev/null +++ b/rootfs/etc/nginx/lua/test/util/nodemap_test.lua @@ -0,0 +1,167 @@ +local util = require("util") +local nodemap = require("util.nodemap") + +local function get_test_backend_single() + return { + name = "access-router-production-web-80", + endpoints = { + { address = "10.184.7.40", port = "8080", maxFails = 0, failTimeout = 0 } + } + } +end + +local function get_test_backend_multi() + return { + name = "access-router-production-web-80", + endpoints = { + { address = "10.184.7.40", port = "8080", maxFails = 0, failTimeout = 0 }, + { address = "10.184.7.41", port = "8080", maxFails = 0, failTimeout = 0 } + } + } +end + +local function get_test_nodes_ignore(endpoint) + local ignore = {} + ignore[endpoint] = true + return ignore +end + +describe("Node Map", function() + + local test_backend_single = get_test_backend_single() + local test_backend_multi = get_test_backend_multi() + local test_salt = test_backend_single.name + local test_nodes_single = util.get_nodes(test_backend_single.endpoints) + local test_nodes_multi = util.get_nodes(test_backend_multi.endpoints) + local test_endpoint1 = test_backend_multi.endpoints[1].address .. ":" .. test_backend_multi.endpoints[1].port + local test_endpoint2 = test_backend_multi.endpoints[2].address .. ":" .. test_backend_multi.endpoints[2].port + local test_nodes_ignore = get_test_nodes_ignore(test_endpoint1) + + describe("new()", function() + context("when no salt has been provided", function() + it("random() returns an unsalted key", function() + local nodemap_instance = nodemap:new(test_nodes_single, nil) + local expected_endpoint = test_endpoint1 + local expected_hash_key = ngx.md5(expected_endpoint) + local actual_endpoint + local actual_hash_key + + actual_endpoint, actual_hash_key = nodemap_instance:random() + + assert.equal(actual_endpoint, expected_endpoint) + assert.equal(expected_hash_key, actual_hash_key) + end) + end) + + context("when a salt has been provided", function() + it("random() returns a salted key", function() + local nodemap_instance = nodemap:new(test_nodes_single, test_salt) + local expected_endpoint = test_endpoint1 + local expected_hash_key = ngx.md5(test_salt .. expected_endpoint) + local actual_endpoint + local actual_hash_key + + actual_endpoint, actual_hash_key = nodemap_instance:random() + + assert.equal(actual_endpoint, expected_endpoint) + assert.equal(expected_hash_key, actual_hash_key) + end) + end) + + context("when no nodes have been provided", function() + it("random() returns nil", function() + local nodemap_instance = nodemap:new({}, test_salt) + local actual_endpoint + local actual_hash_key + + actual_endpoint, actual_hash_key = nodemap_instance:random() + + assert.equal(actual_endpoint, nil) + assert.equal(expected_hash_key, nil) + end) + end) + end) + + describe("find()", function() + before_each(function() + package.loaded["util.nodemap"] = nil + nodemap = require("util.nodemap") + end) + + context("when a hash key is valid", function() + it("find() returns the correct endpoint", function() + local nodemap_instance = nodemap:new(test_nodes_single, test_salt) + local test_hash_key + local expected_endpoint + local actual_endpoint + + expected_endpoint, test_hash_key = nodemap_instance:random() + assert.not_equal(expected_endpoint, nil) + assert.not_equal(test_hash_key, nil) + + actual_endpoint = nodemap_instance:find(test_hash_key) + assert.equal(actual_endpoint, expected_endpoint) + end) + end) + + context("when a hash key is invalid", function() + it("find() returns nil", function() + local nodemap_instance = nodemap:new(test_nodes_single, test_salt) + local test_hash_key = "invalid or nonexistent hash key" + local actual_endpoint + + actual_endpoint = nodemap_instance:find(test_hash_key) + + assert.equal(actual_endpoint, nil) + end) + end) + end) + + + describe("random_except()", function() + before_each(function() + package.loaded["util.nodemap"] = nil + nodemap = require("util.nodemap") + end) + + context("when nothing has been excluded", function() + it("random_except() returns the correct endpoint", function() + local nodemap_instance = nodemap:new(test_nodes_single, test_salt) + local expected_endpoint = test_endpoint1 + local test_hash_key + local actual_endpoint + + actual_endpoint, test_hash_key = nodemap_instance:random_except({}) + assert.equal(expected_endpoint, actual_endpoint) + assert.not_equal(test_hash_key, nil) + end) + end) + + context("when everything has been excluded", function() + it("random_except() returns nil", function() + local nodemap_instance = nodemap:new(test_nodes_single, test_salt) + local actual_hash_key + local actual_endpoint + + actual_endpoint, actual_hash_key = nodemap_instance:random_except(test_nodes_ignore) + + assert.equal(actual_endpoint, nil) + assert.equal(actual_hash_key, nil) + end) + end) + + context("when an endpoint has been excluded", function() + it("random_except() does not return it", function() + local nodemap_instance = nodemap:new(test_nodes_multi, test_salt) + local expected_endpoint = test_endpoint2 + local actual_endpoint + local test_hash_key + + actual_endpoint, test_hash_key = nodemap_instance:random_except(test_nodes_ignore) + + assert.equal(actual_endpoint, expected_endpoint) + assert.not_equal(test_hash_key, nil) + end) + end) + end) +end) diff --git a/rootfs/etc/nginx/lua/util/nodemap.lua b/rootfs/etc/nginx/lua/util/nodemap.lua new file mode 100644 index 000000000..04d034023 --- /dev/null +++ b/rootfs/etc/nginx/lua/util/nodemap.lua @@ -0,0 +1,119 @@ +local math_random = require("math").random +local util_tablelength = require("util").tablelength + +local _M = {} + +--- create_map generates the node hash table +-- @tparam {[string]=number} nodes A table with the node as a key and its weight as a value. +-- @tparam string salt A salt that will be used to generate salted hash keys. +local function create_map(nodes, salt) + local hash_map = {} + + for endpoint, _ in pairs(nodes) do + -- obfuscate the endpoint with a shared key to prevent brute force + -- and rainbow table attacks which could reveal internal endpoints + local key = salt .. endpoint + local hash_key = ngx.md5(key) + hash_map[hash_key] = endpoint + end + + return hash_map +end + +--- get_random_node picks a random node from the given map. +-- @tparam {[string], ...} map A key to node hash table. +-- @treturn string,string The node and its key +local function get_random_node(map) + local size = util_tablelength(map) + + if size < 1 then + return nil, nil + end + + local index = math_random(1, size) + local count = 1 + + for key, endpoint in pairs(map) do + if count == index then + return endpoint, key + end + + count = count + 1 + end + + ngx.log(ngx.ERR, string.format("Failed to find node %d of %d! This is a bug, please report!", index, size)) + + return nil, nil +end + +--- new constructs a new instance of the node map +-- +-- The map uses MD5 to create hash keys for a given node. For security reasons it supports +-- salted hash keys, to prevent attackers from using rainbow tables or brute forcing +-- the node endpoints, which would reveal cluster internal network information. +-- +-- To make sure hash keys are reproducible on different ingress controller instances the salt +-- needs to be shared and therefore is not simply generated randomly. +-- +-- @tparam {[string]=number} endpoints A table with the node endpoint as a key and its weight as a value. +-- @tparam[opt] string hash_salt A optional hash salt that will be used to obfuscate the hash key. +function _M.new(self, endpoints, hash_salt) + if hash_salt == nil then + hash_salt = '' + end + + -- the endpoints have to be saved as 'nodes' to keep compatibility to balancer.resty + local o = { + salt = hash_salt, + nodes = endpoints, + map = create_map(endpoints, hash_salt) + } + + setmetatable(o, self) + self.__index = self + return o +end + +--- reinit reinitializes the node map reusing the original salt +-- @tparam {[string]=number} nodes A table with the node as a key and its weight as a value. +function _M.reinit(self, nodes) + self.nodes = nodes + self.map = create_map(nodes, self.salt) +end + +--- find looks up a node by hash key. +-- @tparam string key The hash key. +-- @treturn string The node. +function _M.find(self, key) + return self.map[key] +end + +--- random picks a random node from the hashmap. +-- @treturn string,string A random node and its key or both nil. +function _M.random(self) + return get_random_node(self.map) +end + +--- random_except picks a random node from the hashmap, ignoring the nodes in the given table +-- @tparam {string, } ignore_nodes A table of nodes to ignore, the node needs to be the key, +-- the value needs to be set to true +-- @treturn string,string A random node and its key or both nil. +function _M.random_except(self, ignore_nodes) + local valid_nodes = {} + + -- avoid generating the map if no ignores where provided + if ignore_nodes == nil or util_tablelength(ignore_nodes) == 0 then + return get_random_node(self.map) + end + + -- generate valid endpoints + for key, endpoint in pairs(self.map) do + if not ignore_nodes[endpoint] then + valid_nodes[key] = endpoint + end + end + + return get_random_node(valid_nodes) +end + +return _M diff --git a/test/manifests/configuration-a.json b/test/manifests/configuration-a.json index 673b12696..fcd000ed1 100644 --- a/test/manifests/configuration-a.json +++ b/test/manifests/configuration-a.json @@ -54,6 +54,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" } @@ -126,6 +127,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" } @@ -191,6 +193,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" } diff --git a/test/manifests/configuration-b.json b/test/manifests/configuration-b.json index 21a1cfc18..40a8d27a4 100644 --- a/test/manifests/configuration-b.json +++ b/test/manifests/configuration-b.json @@ -54,6 +54,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" } @@ -126,6 +127,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" } @@ -191,6 +193,7 @@ }], "sessionAffinityConfig": { "name": "", + "mode": "", "cookieSessionAffinity": { "name": "" }