Skip to content

Commit

Permalink
refactor balancer into more testable and extensible interface
Browse files Browse the repository at this point in the history
  • Loading branch information
ElvinEfendi committed May 28, 2018
1 parent 1b5db4b commit e9dc275
Show file tree
Hide file tree
Showing 20 changed files with 363 additions and 462 deletions.
4 changes: 3 additions & 1 deletion .luacheckrc
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
std = 'ngx_lua'
globals = {'_'}
globals = {
'_TEST'
}
exclude_files = {'./rootfs/etc/nginx/lua/test/**/*.lua'}
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ env:
- CHANGE_MINIKUBE_NONE_USER=true
- KUBERNETES_VERSION=v1.10.0
- DOCKER=docker
- BUSTED_VERSION=2.0.rc12
- GH_REF=github.com/kubernetes/ingress-nginx
- secure: LIS2XpZufWTcJ53jiRsSZy2Gi1EUJ1XmLg7z3f2ZHeMnyG2Jhk3GW4vod1FNru+PY4PWgddLdCdIl+jqOYXndFlbdAWF3/Oy5fEkYLXdYV7tdlHcPWDkqNFrfiyZ4guChN+b2Nk6FqU7o5fsZAIR7VAbgqNRF5XMo9Mhn/vhDCQRcnbXy7uq7JTrYUkqDbQoyYvT6b480GCY5gags1zp/xZfPDNZEe936o8i5IPTyiykRyNOXN/AH6kd3pR5e1xYgcvJ9KpSVPghcwFE7kJ4fOVMRhRG5ML+IyML+xD0jX43EMNoqRKZ/HS42kIMCInFbJEcxVde7DPNBZ7Y3GAqh7HO6qrE70Dn3ha6DID6zCoH2ArW39BxG4zempjn2VxYoMRGREyZszWQb++dwGoHmo5FHt6zvIrYBG0dA0H8ja9VkZkjFwtYTGHU1ooPzUfJK4O4VBayV8LqZibyZQR+GrmyQc0aagUY7J/fe4A2PJyI4DbkeZ7GX1ELj0ciDz4urQSzUc8l/T3aU3X+FuJItjgYtMLPmqcjA5uifDCtutE8Z9L2gSpanqUdvLSOozuxPho/KNl+2YlF7fXqPW3LnRf5mHD+NbOff306pvKlHJOb2Vmth+HBQ1XDzt/Cy5+sfwS3E0Vmh6UTq/NtkUXxwH10BDMF7FMVlQ4zdHQvyZ0=
- secure: rKDoy9IYYYy0fYBs4+9mwuBVq/TcxfFwMfE0ywYWhUUdgzrUYSJAwpoe/96EQ4YmESUefwC2nDNq4G3XzJKYOWf83PaIveb9Z//zmMrCQXjDuDBDLpwV3sXSh7evXiVDohJz4ogBCeMRUCMKYsyKBM9yWfa/iu+yI92dbphpK9peOKW6yBc0uspJlln4swN3GS2WT9LVuPY2Azv9U2UqrXufOPDKG/qEb/Vrn4yZ2lR/50r2k45e9nSvDoByvr10V8ubM5Zc0iP0vBuAUVRdByv6N53Q4gaBGapY6SxhIjIPC/h0rNnuT9EXp7MWaPT5FmBxLt9wnyleT9QhZJnFyaBYqFgcz/DKifYQkryY4M5dLMo/Rt3yATyAy8Y0df1TOoV2dKdqwOOwQ8bXB1wDfyrGxmQj9HY4Ffnphx3wPE1a+Sjuh+S5Epm7XJbPx5pZJqNO2hd4sTbk0Xp3gpPbihny2r/jtNwHl0wpFCfOM68RNrsVRlIwG3UhzbZvblbQ/M/mmWCdgzINjt07I2SGCJxfKG0e98Q49SKUoDoOgQTTRDqTC9IgOEDxyfAkT0Vr6BtlP88Nsgnf6kmboyigBrRAiaDQGTxn3SP6LnQI3CeopaRDYvFZe/rTwPXE9XlKoTn9FTWnAqF3MuWaLslDcDKYEh7OaYJjF01piu6g4Nc=
Expand Down
132 changes: 54 additions & 78 deletions rootfs/etc/nginx/lua/balancer.lua
Original file line number Diff line number Diff line change
@@ -1,81 +1,62 @@
local ngx_balancer = require("ngx.balancer")
local json = require("cjson")
local configuration = require("configuration")
local util = require("util")
local lrucache = require("resty.lrucache")
local round_robin = require("balancer.round_robin")
local chash = require("balancer.chash")
local sticky = require("balancer.sticky")
local ewma = require("balancer.ewma")
local resty_balancer = require("balancer.resty")

-- measured in seconds
-- for an Nginx worker to pick up the new list of upstream peers
-- it will take <the delay until controller POSTed the backend object to the Nginx endpoint> + BACKENDS_SYNC_INTERVAL
local BACKENDS_SYNC_INTERVAL = 1

local DEFAULT_LB_ALG = "round_robin"
local IMPLEMENTATIONS = {
round_robin = round_robin,
chash = chash,
sticky = sticky,
ewma = ewma,
}

local _M = {}
local balancers = {}

-- TODO(elvinefendi) we can probably avoid storing all backends here. We already store them in their respective
-- load balancer implementations
local backends, backends_err = lrucache.new(1024)
if not backends then
return error("failed to create the cache for backends: " .. (backends_err or "unknown"))
end

local function get_current_backend()
local backend_name = ngx.var.proxy_upstream_name
local backend = backends:get(backend_name)

if not backend then
-- TODO(elvinefendi) maybe force backend sync here?
ngx.log(ngx.WARN, "no backend configuration found for " .. tostring(backend_name))
end
local function get_implementation(backend)
local name = backend["load-balance"] or DEFAULT_LB_ALG

return backend
end

local function get_balancer(backend)
if not backend then
return nil
if backend["sessionAffinityConfig"] and backend["sessionAffinityConfig"]["name"] == "cookie" then
name = "sticky"
elseif backend["upstream-hash-by"] then
name = "chash"
end

local lb_alg = backend["load-balance"] or DEFAULT_LB_ALG
if resty_balancer.is_applicable(backend) then
return resty_balancer
elseif lb_alg ~= "ewma" then
if lb_alg ~= DEFAULT_LB_ALG then
ngx.log(ngx.WARN,
string.format("%s is not supported, falling back to %s", backend["load-balance"], DEFAULT_LB_ALG))
end
return resty_balancer
local implementation = IMPLEMENTATIONS[name]
if not implementation then
ngx.log(ngx.WARN, string.format("%s is not supported, falling back to %s", backend["load-balance"], DEFAULT_LB_ALG))
implementation = IMPLEMENTATIONS[DEFAULT_LB_ALG]
end

return ewma
end

local function balance()
local backend = get_current_backend()
local balancer = get_balancer(backend)
if not balancer then
return nil, nil
end

local endpoint = balancer.balance(backend)
if not endpoint then
return nil, nil
end

return endpoint.address, endpoint.port
return implementation
end

local function sync_backend(backend)
backends:set(backend.name, backend)
local implementation = get_implementation(backend)
local balancer = balancers[backend.name]

local balancer = get_balancer(backend)
if not balancer then
balancers[backend.name] = implementation:new(backend)
return
end
balancer.sync(backend)

if getmetatable(balancer) ~= implementation then
ngx.log(ngx.INFO,
string.format("LB algorithm changed from %s to %s, resetting the instance", balancer.name, implementation.name))
balancers[backend.name] = implementation:new(backend)
return
end

balancer:sync(backend)
end

local function sync_backends()
Expand All @@ -91,27 +72,8 @@ local function sync_backends()
end

for _, new_backend in pairs(new_backends) do
local backend = backends:get(new_backend.name)
local backend_changed = true

if backend then
backend_changed = not util.deep_compare(backend, new_backend)
end

if backend_changed then
sync_backend(new_backend)
end
end
end

local function after_balance()
local backend = get_current_backend()
local balancer = get_balancer(backend)
if not balancer then
return
sync_backend(new_backend)
end

balancer.after_balance()
end

function _M.init_worker()
Expand All @@ -124,15 +86,24 @@ end

function _M.call()
local phase = ngx.get_phase()
if phase == "log" then
after_balance()
if phase ~= "log" and phase ~= "balancer" then
ngx.log(ngx.ERR, "must be called in balancer or log, but was called in: " .. phase)
return
end
if phase ~= "balancer" then
return error("must be called in balancer or log, but was called in: " .. phase)

local backend_name = ngx.var.proxy_upstream_name
local balancer = balancers[backend_name]
if not balancer then
ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE
return ngx.exit(ngx.status)
end

local host, port = balance()
if phase == "log" then
balancer:after_balance()
return
end

local host, port = balancer:balance()
if not host then
ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE
return ngx.exit(ngx.status)
Expand All @@ -142,8 +113,13 @@ function _M.call()

local ok, err = ngx_balancer.set_current_peer(host, port)
if not ok then
ngx.log(ngx.ERR, string.format("error while setting current upstream peer to %s", tostring(err)))
ngx.log(ngx.ERR, "error while setting current upstream peer to " .. tostring(err))
end
end

if _TEST then
_M.get_implementation = get_implementation
_M.sync_backend = sync_backend
end

return _M
21 changes: 21 additions & 0 deletions rootfs/etc/nginx/lua/balancer/chash.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
local balancer_resty = require("balancer.resty")
local resty_chash = require("resty.chash")
local util = require("util")

local _M = balancer_resty:new({ factory = resty_chash, name = "chash" })

function _M.new(self, backend)
local nodes = util.get_nodes(backend.endpoints)
local o = { instance = self.factory:new(nodes), hash_by = backend["upstream-hash-by"] }
setmetatable(o, self)
self.__index = self
return o
end

function _M.balance(self)
local key = util.lua_ngx_var(self.hash_by)
local endpoint_string = self.instance:find(key)
return util.split_pair(endpoint_string, ":")
end

return _M
24 changes: 18 additions & 6 deletions rootfs/etc/nginx/lua/balancer/ewma.lua
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ local PICK_SET_SIZE = 2

local ewma_lock = resty_lock:new("locks", {timeout = 0, exptime = 0.1})

local _M = {}
local _M = { name = "ewma" }

local function lock(upstream)
local _, err = ewma_lock:lock(upstream .. LOCK_KEY)
Expand Down Expand Up @@ -117,17 +117,18 @@ local function pick_and_score(peers, k)
return peers[lowest_score_index]
end

function _M.balance(backend)
local peers = backend.endpoints
function _M.balance(self)
local peers = self.peers
if #peers == 1 then
return peers[1]
end
local k = (#peers < PICK_SET_SIZE) and #peers or PICK_SET_SIZE
local peer_copy = util.deepcopy(peers)
return pick_and_score(peer_copy, k)
local endpoint = pick_and_score(peer_copy, k)
return endpoint.address, endpoint.port
end

function _M.after_balance()
function _M.after_balance(_)
local response_time = tonumber(util.get_first_value(ngx.var.upstream_response_time)) or 0
local connect_time = tonumber(util.get_first_value(ngx.var.upstream_connect_time)) or 0
local rtt = connect_time + response_time
Expand All @@ -139,10 +140,21 @@ function _M.after_balance()
get_or_update_ewma(upstream, rtt, true)
end

function _M.sync(_)
function _M.sync(self, backend)
local changed = not util.deep_compare(self.peers, backend.endpoints)
if not changed then
return
end
-- TODO: Reset state of EWMA per backend
ngx.shared.balancer_ewma:flush_all()
ngx.shared.balancer_ewma_last_touched_at:flush_all()
end

function _M.new(self, backend)
local o = { peers = backend.endpoints }
setmetatable(o, self)
self.__index = self
return o
end

return _M
Loading

0 comments on commit e9dc275

Please sign in to comment.