used table functions of LuaJIT for better performance.
This commit is contained in:
parent
42d3f68992
commit
a36961f9f9
1 changed files with 10 additions and 5 deletions
|
@ -1,12 +1,16 @@
|
||||||
local socket = ngx.socket.tcp
|
local socket = ngx.socket.tcp
|
||||||
local cjson = require("cjson.safe")
|
local cjson = require("cjson.safe")
|
||||||
local assert = assert
|
local assert = assert
|
||||||
|
local new_tab = require "table.new"
|
||||||
|
local clear_tab = require "table.clear"
|
||||||
|
local clone_tab = require "table.clone"
|
||||||
|
|
||||||
local metrics_batch = {}
|
|
||||||
-- if an Nginx worker processes more than (MAX_BATCH_SIZE/FLUSH_INTERVAL) RPS then it will start dropping metrics
|
-- if an Nginx worker processes more than (MAX_BATCH_SIZE/FLUSH_INTERVAL) RPS then it will start dropping metrics
|
||||||
local MAX_BATCH_SIZE = 10000
|
local MAX_BATCH_SIZE = 10000
|
||||||
local FLUSH_INTERVAL = 1 -- second
|
local FLUSH_INTERVAL = 1 -- second
|
||||||
|
|
||||||
|
local metrics_batch = new_tab(MAX_BATCH_SIZE, 0)
|
||||||
|
|
||||||
local _M = {}
|
local _M = {}
|
||||||
|
|
||||||
local function send(payload)
|
local function send(payload)
|
||||||
|
@ -46,8 +50,8 @@ local function flush(premature)
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
local current_metrics_batch = metrics_batch
|
local current_metrics_batch = clone_tab(metrics_batch)
|
||||||
metrics_batch = {}
|
clear_tab(metrics_batch)
|
||||||
|
|
||||||
local payload, err = cjson.encode(current_metrics_batch)
|
local payload, err = cjson.encode(current_metrics_batch)
|
||||||
if not payload then
|
if not payload then
|
||||||
|
@ -66,12 +70,13 @@ function _M.init_worker()
|
||||||
end
|
end
|
||||||
|
|
||||||
function _M.call()
|
function _M.call()
|
||||||
if #metrics_batch >= MAX_BATCH_SIZE then
|
local metrics_size = #metrics_batch
|
||||||
|
if metrics_size >= MAX_BATCH_SIZE then
|
||||||
ngx.log(ngx.WARN, "omitting metrics for the request, current batch is full")
|
ngx.log(ngx.WARN, "omitting metrics for the request, current batch is full")
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
table.insert(metrics_batch, metrics())
|
metrics_batch[metrics_size + 1] = metrics()
|
||||||
end
|
end
|
||||||
|
|
||||||
if _TEST then
|
if _TEST then
|
||||||
|
|
Loading…
Reference in a new issue