Merge pull request #5361 from aledbf/clean
Cleanup parsing of annotations with lists
This commit is contained in:
commit
1475648dca
2 changed files with 76 additions and 35 deletions
|
@ -108,7 +108,7 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
//parse lua shared dict values
|
//parse lua shared dict values
|
||||||
if val, ok := conf[luaSharedDictsKey]; ok {
|
if val, ok := conf[luaSharedDictsKey]; ok {
|
||||||
delete(conf, luaSharedDictsKey)
|
delete(conf, luaSharedDictsKey)
|
||||||
lsd := strings.Split(val, ",")
|
lsd := splitAndTrimSpace(val, ",")
|
||||||
for _, v := range lsd {
|
for _, v := range lsd {
|
||||||
v = strings.Replace(v, " ", "", -1)
|
v = strings.Replace(v, " ", "", -1)
|
||||||
results := strings.SplitN(v, ":", 2)
|
results := strings.SplitN(v, ":", 2)
|
||||||
|
@ -137,9 +137,10 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
luaSharedDicts[k] = v
|
luaSharedDicts[k] = v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[customHTTPErrors]; ok {
|
if val, ok := conf[customHTTPErrors]; ok {
|
||||||
delete(conf, customHTTPErrors)
|
delete(conf, customHTTPErrors)
|
||||||
for _, i := range strings.Split(val, ",") {
|
for _, i := range splitAndTrimSpace(val, ",") {
|
||||||
j, err := strconv.Atoi(i)
|
j, err := strconv.Atoi(i)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
klog.Warningf("%v is not a valid http code: %v", i, err)
|
klog.Warningf("%v is not a valid http code: %v", i, err)
|
||||||
|
@ -148,27 +149,32 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[hideHeaders]; ok {
|
if val, ok := conf[hideHeaders]; ok {
|
||||||
delete(conf, hideHeaders)
|
delete(conf, hideHeaders)
|
||||||
hideHeadersList = strings.Split(val, ",")
|
hideHeadersList = splitAndTrimSpace(val, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[skipAccessLogUrls]; ok {
|
if val, ok := conf[skipAccessLogUrls]; ok {
|
||||||
delete(conf, skipAccessLogUrls)
|
delete(conf, skipAccessLogUrls)
|
||||||
skipUrls = strings.Split(val, ",")
|
skipUrls = splitAndTrimSpace(val, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[whitelistSourceRange]; ok {
|
if val, ok := conf[whitelistSourceRange]; ok {
|
||||||
delete(conf, whitelistSourceRange)
|
delete(conf, whitelistSourceRange)
|
||||||
whiteList = append(whiteList, strings.Split(val, ",")...)
|
whiteList = append(whiteList, splitAndTrimSpace(val, ",")...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[proxyRealIPCIDR]; ok {
|
if val, ok := conf[proxyRealIPCIDR]; ok {
|
||||||
delete(conf, proxyRealIPCIDR)
|
delete(conf, proxyRealIPCIDR)
|
||||||
proxyList = append(proxyList, strings.Split(val, ",")...)
|
proxyList = append(proxyList, splitAndTrimSpace(val, ",")...)
|
||||||
} else {
|
} else {
|
||||||
proxyList = append(proxyList, "0.0.0.0/0")
|
proxyList = append(proxyList, "0.0.0.0/0")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[bindAddress]; ok {
|
if val, ok := conf[bindAddress]; ok {
|
||||||
delete(conf, bindAddress)
|
delete(conf, bindAddress)
|
||||||
for _, i := range strings.Split(val, ",") {
|
for _, i := range splitAndTrimSpace(val, ",") {
|
||||||
ns := net.ParseIP(i)
|
ns := net.ParseIP(i)
|
||||||
if ns != nil {
|
if ns != nil {
|
||||||
if ing_net.IsIPV6(ns) {
|
if ing_net.IsIPV6(ns) {
|
||||||
|
@ -184,15 +190,17 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
|
|
||||||
if val, ok := conf[blockCIDRs]; ok {
|
if val, ok := conf[blockCIDRs]; ok {
|
||||||
delete(conf, blockCIDRs)
|
delete(conf, blockCIDRs)
|
||||||
blockCIDRList = strings.Split(val, ",")
|
blockCIDRList = splitAndTrimSpace(val, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[blockUserAgents]; ok {
|
if val, ok := conf[blockUserAgents]; ok {
|
||||||
delete(conf, blockUserAgents)
|
delete(conf, blockUserAgents)
|
||||||
blockUserAgentList = strings.Split(val, ",")
|
blockUserAgentList = splitAndTrimSpace(val, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[blockReferers]; ok {
|
if val, ok := conf[blockReferers]; ok {
|
||||||
delete(conf, blockReferers)
|
delete(conf, blockReferers)
|
||||||
blockRefererList = strings.Split(val, ",")
|
blockRefererList = splitAndTrimSpace(val, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[httpRedirectCode]; ok {
|
if val, ok := conf[httpRedirectCode]; ok {
|
||||||
|
@ -250,15 +258,12 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
delete(conf, globalAuthResponseHeaders)
|
delete(conf, globalAuthResponseHeaders)
|
||||||
|
|
||||||
if len(val) != 0 {
|
if len(val) != 0 {
|
||||||
harr := strings.Split(val, ",")
|
harr := splitAndTrimSpace(val, ",")
|
||||||
for _, header := range harr {
|
for _, header := range harr {
|
||||||
header = strings.TrimSpace(header)
|
if !authreq.ValidHeader(header) {
|
||||||
if len(header) > 0 {
|
klog.Warningf("Global auth location denied - %v.", "invalid headers list")
|
||||||
if !authreq.ValidHeader(header) {
|
} else {
|
||||||
klog.Warningf("Global auth location denied - %v.", "invalid headers list")
|
responseHeaders = append(responseHeaders, header)
|
||||||
} else {
|
|
||||||
responseHeaders = append(responseHeaders, header)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -267,19 +272,16 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
|
|
||||||
if val, ok := conf[globalAuthRequestRedirect]; ok {
|
if val, ok := conf[globalAuthRequestRedirect]; ok {
|
||||||
delete(conf, globalAuthRequestRedirect)
|
delete(conf, globalAuthRequestRedirect)
|
||||||
|
|
||||||
to.GlobalExternalAuth.RequestRedirect = val
|
to.GlobalExternalAuth.RequestRedirect = val
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[globalAuthSnippet]; ok {
|
if val, ok := conf[globalAuthSnippet]; ok {
|
||||||
delete(conf, globalAuthSnippet)
|
delete(conf, globalAuthSnippet)
|
||||||
|
|
||||||
to.GlobalExternalAuth.AuthSnippet = val
|
to.GlobalExternalAuth.AuthSnippet = val
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[globalAuthCacheKey]; ok {
|
if val, ok := conf[globalAuthCacheKey]; ok {
|
||||||
delete(conf, globalAuthCacheKey)
|
delete(conf, globalAuthCacheKey)
|
||||||
|
|
||||||
to.GlobalExternalAuth.AuthCacheKey = val
|
to.GlobalExternalAuth.AuthCacheKey = val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -318,23 +320,17 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
|
|
||||||
// Nginx Status whitelist
|
// Nginx Status whitelist
|
||||||
if val, ok := conf[nginxStatusIpv4Whitelist]; ok {
|
if val, ok := conf[nginxStatusIpv4Whitelist]; ok {
|
||||||
whitelist := make([]string, 0)
|
to.NginxStatusIpv4Whitelist = splitAndTrimSpace(val, ",")
|
||||||
whitelist = append(whitelist, strings.Split(val, ",")...)
|
|
||||||
to.NginxStatusIpv4Whitelist = whitelist
|
|
||||||
|
|
||||||
delete(conf, nginxStatusIpv4Whitelist)
|
delete(conf, nginxStatusIpv4Whitelist)
|
||||||
}
|
}
|
||||||
if val, ok := conf[nginxStatusIpv6Whitelist]; ok {
|
|
||||||
whitelist := make([]string, 0)
|
|
||||||
whitelist = append(whitelist, strings.Split(val, ",")...)
|
|
||||||
to.NginxStatusIpv6Whitelist = whitelist
|
|
||||||
|
|
||||||
|
if val, ok := conf[nginxStatusIpv6Whitelist]; ok {
|
||||||
|
to.NginxStatusIpv6Whitelist = splitAndTrimSpace(val, ",")
|
||||||
delete(conf, nginxStatusIpv6Whitelist)
|
delete(conf, nginxStatusIpv6Whitelist)
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[workerProcesses]; ok {
|
if val, ok := conf[workerProcesses]; ok {
|
||||||
to.WorkerProcesses = val
|
to.WorkerProcesses = val
|
||||||
|
|
||||||
if val == "auto" {
|
if val == "auto" {
|
||||||
to.WorkerProcesses = strconv.Itoa(runtime.NumCPU())
|
to.WorkerProcesses = strconv.Itoa(runtime.NumCPU())
|
||||||
}
|
}
|
||||||
|
@ -343,11 +339,7 @@ func ReadConfig(src map[string]string) config.Configuration {
|
||||||
}
|
}
|
||||||
|
|
||||||
if val, ok := conf[plugins]; ok {
|
if val, ok := conf[plugins]; ok {
|
||||||
to.Plugins = strings.Split(val, ",")
|
to.Plugins = splitAndTrimSpace(val, ",")
|
||||||
for i := range to.Plugins {
|
|
||||||
to.Plugins[i] = strings.TrimSpace(to.Plugins[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
delete(conf, plugins)
|
delete(conf, plugins)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,3 +397,16 @@ func filterErrors(codes []int) []int {
|
||||||
|
|
||||||
return fa
|
return fa
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func splitAndTrimSpace(s, sep string) []string {
|
||||||
|
f := func(c rune) bool {
|
||||||
|
return strings.EqualFold(string(c), sep)
|
||||||
|
}
|
||||||
|
|
||||||
|
values := strings.FieldsFunc(s, f)
|
||||||
|
for i := range values {
|
||||||
|
values[i] = strings.TrimSpace(values[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
|
@ -358,3 +358,39 @@ func TestLuaSharedDictsParsing(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSplitAndTrimSpace(t *testing.T) {
|
||||||
|
testsCases := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expect []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "empty string",
|
||||||
|
input: "",
|
||||||
|
expect: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "two elements",
|
||||||
|
input: "el1,el2",
|
||||||
|
expect: []string{"el1", "el2"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "two elements with spaces",
|
||||||
|
input: " el1, el2",
|
||||||
|
expect: []string{"el1", "el2"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty elements with spaces",
|
||||||
|
input: " el1, el2,el3,,",
|
||||||
|
expect: []string{"el1", "el2", "el3"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testsCases {
|
||||||
|
data := splitAndTrimSpace(tc.input, ",")
|
||||||
|
if !reflect.DeepEqual(data, tc.expect) {
|
||||||
|
t.Errorf("Testing %v. Expected \"%v\" but \"%v\" was returned", tc.name, tc.expect, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue