diff --git a/COPYRIGHT b/COPYRIGHT index e519faf64b32..6c79ac40cbfa 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -137,3 +137,39 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. %%%%%%%%% + +lua-resty-healthcheck + +https://github.com/Kong/lua-resty-healthcheck + +Apache License 2 + +Copyright 2017-2018 Kong Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +%%%%%%%%% + +lua-resty-worker-events + +https://github.com/Kong/lua-resty-worker-events + +Apache License 2 + +This module is licensed under the Apache 2.0 license. + +Copyright (C) 2016, by Thijs Schreijer, Kong Inc. + +All rights reserved. + +%%%%%%%%% diff --git a/Makefile b/Makefile index d1339ea61e94..6dc42789f95b 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,6 @@ help: ### dev: Create a development ENV .PHONY: dev dev: - ./utils/update_nginx_conf_dev.sh ifeq ($(UNAME),Darwin) luarocks install --lua-dir=$(LUA_JIT_DIR) rockspec/apisix-dev-0.rockspec --tree=deps --only-deps --local else ifneq ($(LUAROCKS_VER),'luarocks 3.') diff --git a/bin/apisix b/bin/apisix index 6c7476d11d21..0ac0f8a73198 100755 --- a/bin/apisix +++ b/bin/apisix @@ -90,6 +90,7 @@ http { lua_shared_dict plugin-limit-count 10m; lua_shared_dict prometheus-metrics 10m; lua_shared_dict plugin-limit-conn 10m; + lua_shared_dict worker-events 10m; lua_ssl_verify_depth 5; ssl_session_timeout 86400; diff --git a/conf/nginx.conf b/conf/nginx.conf index d409320a26e7..555d93038a57 100644 --- a/conf/nginx.conf +++ b/conf/nginx.conf @@ -1,7 +1,6 @@ master_process on; worker_processes 1; -worker_cpu_affinity auto; error_log logs/error.log warn; pid logs/nginx.pid; @@ -22,10 +21,12 @@ http { lua_package_path "$prefix/deps/share/lua/5.1/?.lua;$prefix/lua/?.lua;/usr/share/lua/5.1/?.lua;;"; lua_package_cpath "$prefix/deps/lib64/lua/5.1/?.so;$prefix/deps/lib/lua/5.1/?.so;/usr/lib64/lua/5.1/?.so;;"; - lua_shared_dict plugin-limit-req 10m; - lua_shared_dict plugin-limit-count 10m; - lua_shared_dict prometheus-metrics 10m; - lua_shared_dict plugin-limit-conn 10m; + lua_shared_dict plugin-limit-req 10m; + lua_shared_dict plugin-limit-count 10m; + lua_shared_dict prometheus-metrics 10m; + lua_shared_dict plugin-limit-conn 10m; + lua_shared_dict upstream-healthcheck 32m; + lua_shared_dict worker-events 10m; lua_ssl_verify_depth 5; ssl_session_timeout 86400; @@ -37,6 +38,9 @@ http { lua_http10_buffering off; + lua_regex_match_limit 100000; + lua_regex_cache_max_entries 8192; + log_format main '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time "$http_referer" "$http_user_agent" $upstream_addr $upstream_status $upstream_response_time'; access_log logs/access.log main buffer=32768 flush=3; diff --git a/lua/apisix.lua b/lua/apisix.lua index 284570d57d7e..ae480c1f0bf9 100644 --- a/lua/apisix.lua +++ b/lua/apisix.lua @@ -4,7 +4,6 @@ local require = require local core = require("apisix.core") local router = require("apisix.http.route").get local plugin = require("apisix.plugin") -local load_balancer = require("apisix.http.balancer").run local service_fetch = require("apisix.http.service").get local ssl_match = require("apisix.http.ssl").match local admin_init = require("apisix.admin.init") @@ -15,6 +14,8 @@ local ngx_exit = ngx.exit local ngx_ERROR = ngx.ERROR local math = math local match_opts = {} +local error = error +local load_balancer local _M = {version = 0.1} @@ -44,6 +45,14 @@ end function _M.http_init_worker() + local we = require("resty.worker.events") + local ok, err = we.configure({shm = "worker-events", interval = 0.1}) + if not ok then + error("failed to init worker event: " .. err) + end + + load_balancer = require("apisix.http.balancer").run + require("apisix.admin.init").init_worker() require("apisix.http.route").init_worker() diff --git a/lua/apisix/core/schema.lua b/lua/apisix/core/schema.lua index 640f0c7522da..5cf64a39afd0 100644 --- a/lua/apisix/core/schema.lua +++ b/lua/apisix/core/schema.lua @@ -45,66 +45,37 @@ local id_schema = { } } --- todo: chash and roundrobin have different properties, we may support --- this limitation later. - --- { --- "definitions": { --- "nodes": { --- "patternProperties": { --- ".*": { --- "minimum": 1, --- "type": "integer" --- } --- }, --- "minProperties": 1, --- "type": "object" --- } --- }, --- "type": "object", --- "anyOf": [ --- { --- "properties": { --- "type": { --- "type": "string", --- "enum": [ --- "roundrobin" --- ] --- }, --- "nodes": { --- "$ref": "#/definitions/nodes" --- } --- }, --- "required": [ --- "type", --- "nodes" --- ], --- "additionalProperties": false --- }, --- { --- "properties": { --- "type": { --- "type": "string", --- "enum": [ --- "chash" --- ] --- }, --- "nodes": { --- "$ref": "#/definitions/nodes" --- }, --- "key": { --- "type": "string" --- } --- }, --- "required": [ --- "key", --- "type", --- "nodes" --- ], --- "additionalProperties": false --- } --- ] --- } + +-- todo: support all option +-- default value: https://github.com/Kong/lua-resty-healthcheck/ +-- blob/master/lib/resty/healthcheck.lua#L1121 +local health_checker = { + type = "object", + properties = { + active = { + type = "object", + properties = { + http_path = {type = "string"}, + host = {type = "string"}, + healthy = { + type = "object", + properties = { + interval = {type = "integer", minimum = 1}, + successes = {type = "integer", minimum = 1} + } + }, + unhealthy = { + type = "object", + properties = { + interval = {type = "integer", minimum = 1}, + http_failures = {type = "integer", minimum = 1} + } + } + } + } + } +} + local upstream_schema = { type = "object", @@ -126,6 +97,7 @@ local upstream_schema = { type = "string", enum = {"chash", "roundrobin"} }, + checks = health_checker, key = { description = "the key of chash for dynamic load balancing", type = "string", diff --git a/lua/apisix/core/table.lua b/lua/apisix/core/table.lua index ee8bf22434e5..c82225781bb3 100644 --- a/lua/apisix/core/table.lua +++ b/lua/apisix/core/table.lua @@ -1,3 +1,6 @@ +local newproxy = newproxy +local getmetatable = getmetatable +local setmetatable = setmetatable local select = select local _M = { @@ -31,4 +34,13 @@ function _M.set(tab, ...) end +-- only work under lua51 or luajit +function _M.setmt__gc(t, mt) + local prox = newproxy(true) + getmetatable(prox).__gc = function() mt.__gc(t) end + t[prox] = true + return setmetatable(t, mt) +end + + return _M diff --git a/lua/apisix/http/balancer.lua b/lua/apisix/http/balancer.lua index f68693af768b..955ce51ef090 100644 --- a/lua/apisix/http/balancer.lua +++ b/lua/apisix/http/balancer.lua @@ -1,4 +1,5 @@ -local roundrobin = require("resty.roundrobin") +local healthcheck = require("resty.healthcheck") +local roundrobin = require("resty.roundrobin") local resty_chash = require("resty.chash") local balancer = require("ngx.balancer") local core = require("apisix.core") @@ -14,7 +15,7 @@ local tostring = tostring local module_name = "balancer" -local lrucache_get = core.lrucache.new({ttl = 300, count = 256}) +local lrucache_server_picker = core.lrucache.new({ttl = 300, count = 256}) local _M = { @@ -23,12 +24,52 @@ local _M = { } +local function parse_addr(addr) + local pos = find_str(addr, ":", 1, true) + if not pos then + return addr, 80 + end + + local host = sub_str(addr, 1, pos - 1) + local port = sub_str(addr, pos + 1) + return host, tonumber(port) +end + + +local function fetch_health_nodes(upstream) + if not upstream.checks then + return upstream.nodes + end + + local host = upstream.checks and upstream.checks.host + local checker = upstream.checker + local up_nodes = core.table.new(0, #upstream.nodes) + + for addr, weight in pairs(upstream.nodes) do + local ip, port = parse_addr(addr) + local ok = checker:get_target_status(ip, port, host) + if ok then + up_nodes[addr] = weight + end + end + + if core.table.nkeys(up_nodes) == 0 then + core.log.warn("all upstream nodes is unhealth, use default") + up_nodes = upstream.nodes + end + return up_nodes +end + + local function create_server_picker(upstream) core.log.info("create create_obj, type: ", upstream.type, " nodes: ", core.json.delay_encode(upstream.nodes)) if upstream.type == "roundrobin" then - local picker = roundrobin:new(upstream.nodes) + local up_nodes = fetch_health_nodes(upstream) + core.log.info("upstream nodes: ", core.json.delay_encode(up_nodes)) + + local picker = roundrobin:new(up_nodes) return { get = function () return picker:find() @@ -37,10 +78,11 @@ local function create_server_picker(upstream) end if upstream.type == "chash" then + local up_nodes = fetch_health_nodes(upstream) local str_null = str_char(0) local servers, nodes = {}, {} - for serv, weight in pairs(upstream.nodes) do + for serv, weight in pairs(up_nodes) do local id = str_gsub(serv, ":", str_null) servers[id] = serv @@ -62,18 +104,6 @@ local function create_server_picker(upstream) end -local function parse_addr(addr) - local pos = find_str(addr, ":", 1, true) - if not pos then - return addr, 80 - end - - local host = sub_str(addr, 1, pos - 1) - local port = sub_str(addr, pos + 1) - return host, tonumber(port) -end - - local function pick_server(route, ctx) core.log.info("route: ", core.json.delay_encode(route, true)) core.log.info("ctx: ", core.json.delay_encode(ctx, true)) @@ -107,8 +137,36 @@ local function pick_server(route, ctx) key = upstream.type .. "#route_" .. route.value.id end - local server_picker = lrucache_get(key, version, - create_server_picker, upstream) + if upstream.checks and not upstream.checker then + local checker = healthcheck.new({ + name = "upstream", + shm_name = "upstream-healthcheck", + checks = upstream.checks, + }) + + upstream.checker = checker + + -- stop the checker by `gc` + core.table.setmt__gc(upstream, {__gc=function() checker:stop() end}) + + for addr, weight in pairs(upstream.nodes) do + local ip, port = parse_addr(addr) + local ok, err = checker:add_target(ip, port, upstream.checks.host) + if not ok then + core.log.error("failed to add new health check target: ", addr, + " err: ", err) + end + end + + core.log.warn("create checks obj for upstream, check") + end + + if upstream.checks then + version = version .. "#" .. upstream.checker.status_ver + end + + local server_picker = lrucache_server_picker(key, version, + create_server_picker, upstream) if not server_picker then return nil, nil, "failed to fetch server picker" end diff --git a/rockspec/apisix-dev-0.rockspec b/rockspec/apisix-dev-0.rockspec index 195b67dd6ac8..fea5b3415a1e 100644 --- a/rockspec/apisix-dev-0.rockspec +++ b/rockspec/apisix-dev-0.rockspec @@ -22,6 +22,7 @@ dependencies = { "lua-resty-ngxvar = 0.3", "lua-resty-jit-uuid = 0.0.7", "rapidjson = 0.6.0-1", + "lua-resty-healthcheck-iresty = 1.0.0", } build = { diff --git a/t/APISix.pm b/t/APISix.pm index 3e6ec8fdc386..215baa0d1583 100644 --- a/t/APISix.pm +++ b/t/APISix.pm @@ -46,10 +46,12 @@ _EOC_ lua_package_path "$pwd/deps/share/lua/5.1/?.lua;$pwd/lua/?.lua;$pwd/t/?.lua;/usr/share/lua/5.1/?.lua;;"; lua_package_cpath "$pwd/deps/lib/lua/5.1/?.so;$pwd/deps/lib64/lua/5.1/?.so;/usr/lib64/lua/5.1/?.so;;"; - lua_shared_dict plugin-limit-req 10m; - lua_shared_dict plugin-limit-count 10m; - lua_shared_dict plugin-limit-conn 10m; - lua_shared_dict prometheus-metrics 10m; + lua_shared_dict plugin-limit-req 10m; + lua_shared_dict plugin-limit-count 10m; + lua_shared_dict plugin-limit-conn 10m; + lua_shared_dict prometheus-metrics 10m; + lua_shared_dict upstream-healthcheck 32m; + lua_shared_dict worker-events 10m; resolver ipv6=off local=on; resolver_timeout 5; diff --git a/t/admin/health-check.t b/t/admin/health-check.t new file mode 100644 index 000000000000..5020cbc35ccd --- /dev/null +++ b/t/admin/health-check.t @@ -0,0 +1,85 @@ +use t::APISix 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); +no_shuffle(); +log_level("info"); + +run_tests; + +__DATA__ + +=== TEST 1: sanity +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "methods": ["GET"], + "upstream": { + "nodes": { + "127.0.0.1:8080": 1 + }, + "type": "roundrobin", + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 2, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + }, + "uri": "/index.html" + }]], + [[{ + "node": { + "value": { + "methods": ["GET"], + "uri": "/index.html", + "upstream": { + "nodes": { + "127.0.0.1:8080": 1 + }, + "type": "roundrobin", + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 2, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + } + }, + "key": "/apisix/routes/1" + }, + "action": "set" + }]] + ) + + ngx.status = code + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] diff --git a/t/lib/server.lua b/t/lib/server.lua index 5f675e5a5541..14f9bca4f622 100644 --- a/t/lib/server.lua +++ b/t/lib/server.lua @@ -17,6 +17,11 @@ function _M.limit_conn() end +function _M.status() + ngx.say("ok") +end + + function _M.go() local action = string.sub(ngx.var.uri, 2) if not _M[action] then diff --git a/t/node/healthcheck.t b/t/node/healthcheck.t new file mode 100644 index 000000000000..781a692f536b --- /dev/null +++ b/t/node/healthcheck.t @@ -0,0 +1,415 @@ +BEGIN { + if ($ENV{TEST_NGINX_CHECK_LEAK}) { + $SkipReason = "unavailable for the hup tests"; + + } else { + $ENV{TEST_NGINX_USE_HUP} = 1; + undef $ENV{TEST_NGINX_USE_STAP}; + } +} + +use t::APISix 'no_plan'; + +repeat_each(1); +log_level('info'); +no_root_location(); +no_shuffle(); + +run_tests(); + +__DATA__ + +=== TEST 1: set route(two healthy upstream nodes) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "uri": "/server_port", + "upstream": { + "type": "roundrobin", + "nodes": { + "127.0.0.1:1980": 1, + "127.0.0.1:1981": 1 + }, + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 1, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + } + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 2: hit routes (two healthy nodes) +--- config + location /t { + content_by_lua_block { + ngx.sleep(2) -- wait for sync + + local http = require "resty.http" + local uri = "http://127.0.0.1:" .. ngx.var.server_port + .. "/server_port" + + local ports_count = {} + for i = 1, 12 do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + if not res then + ngx.say(err) + return + end + ports_count[res.body] = (ports_count[res.body] or 0) + 1 + end + + local ports_arr = {} + for port, count in pairs(ports_count) do + table.insert(ports_arr, {port = port, count = count}) + end + + local function cmd(a, b) + return a.port > b.port + end + table.sort(ports_arr, cmd) + + ngx.say(require("cjson").encode(ports_arr)) + ngx.exit(200) + } + } +--- request +GET /t +--- response_body +[{"count":6,"port":"1981"},{"count":6,"port":"1980"}] +--- no_error_log +[error] +--- timeout: 6 + + + +=== TEST 3: set route(two upstream node: one healthy + one unhealthy) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "uri": "/server_port", + "upstream": { + "type": "roundrobin", + "nodes": { + "127.0.0.1:1980": 1, + "127.0.0.1:1970": 1 + }, + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 1, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + } + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 4: hit routes (two upstream node: one healthy + one unhealthy) +--- config + location /t { + content_by_lua_block { + local http = require "resty.http" + local uri = "http://127.0.0.1:" .. ngx.var.server_port + .. "/server_port" + + do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + end + + ngx.sleep(2.5) + + local ports_count = {} + for i = 1, 12 do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + if not res then + ngx.say(err) + return + end + + ports_count[res.body] = (ports_count[res.body] or 0) + 1 + end + + local ports_arr = {} + for port, count in pairs(ports_count) do + table.insert(ports_arr, {port = port, count = count}) + end + + local function cmd(a, b) + return a.port > b.port + end + table.sort(ports_arr, cmd) + + ngx.say(require("cjson").encode(ports_arr)) + ngx.exit(200) + } + } +--- request +GET /t +--- response_body +[{"count":12,"port":"1980"}] +--- grep_error_log eval +qr/\[error\].*/ +--- grep_error_log_out eval +qr/Connection refused\) while connecting to upstream/ +--- timeout: 5 + + + +=== TEST 5: chash route (two healthy nodes) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "uri": "/server_port", + "upstream": { + "type": "chash", + "nodes": { + "127.0.0.1:1981": 1, + "127.0.0.1:1980": 1 + }, + "key": "remote_addr", + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 1, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + } + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 6: hit routes (two healthy nodes) +--- config + location /t { + content_by_lua_block { + ngx.sleep(2) -- wait for sync + + local http = require "resty.http" + local uri = "http://127.0.0.1:" .. ngx.var.server_port + .. "/server_port" + + local ports_count = {} + for i = 1, 12 do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + if not res then + ngx.say(err) + return + end + ports_count[res.body] = (ports_count[res.body] or 0) + 1 + end + + local ports_arr = {} + for port, count in pairs(ports_count) do + table.insert(ports_arr, {port = port, count = count}) + end + + local function cmd(a, b) + return a.port > b.port + end + table.sort(ports_arr, cmd) + + ngx.say(require("cjson").encode(ports_arr)) + ngx.exit(200) + } + } +--- request +GET /t +--- response_body +[{"count":12,"port":"1980"}] +--- no_error_log +[error] +--- timeout: 6 + + + +=== TEST 7: chash route (upstream nodes: 1 healthy + 8 unhealthy) +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "uri": "/server_port", + "upstream": { + "type": "chash", + "nodes": { + "127.0.0.1:1980": 1, + "127.0.0.1:1970": 1, + "127.0.0.1:1971": 1, + "127.0.0.1:1972": 1, + "127.0.0.1:1973": 1, + "127.0.0.1:1974": 1, + "127.0.0.1:1975": 1, + "127.0.0.1:1976": 1, + "127.0.0.1:1977": 1 + }, + "key": "remote_addr", + "checks": { + "active": { + "http_path": "/status", + "host": "foo.com", + "healthy": { + "interval": 1, + "successes": 1 + }, + "unhealthy": { + "interval": 1, + "http_failures": 2 + } + } + } + } + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 8: hit routes (upstream nodes: 1 healthy + 8 unhealthy) +--- config + location /t { + content_by_lua_block { + local http = require "resty.http" + local uri = "http://127.0.0.1:" .. ngx.var.server_port + .. "/server_port" + + do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + end + + ngx.sleep(2.5) + + local ports_count = {} + for i = 1, 12 do + local httpc = http.new() + local res, err = httpc:request_uri(uri, {method = "GET"}) + if not res then + ngx.say(err) + return + end + + ports_count[res.body] = (ports_count[res.body] or 0) + 1 + end + + local ports_arr = {} + for port, count in pairs(ports_count) do + table.insert(ports_arr, {port = port, count = count}) + end + + local function cmd(a, b) + return a.port > b.port + end + table.sort(ports_arr, cmd) + + ngx.say(require("cjson").encode(ports_arr)) + ngx.exit(200) + } + } +--- request +GET /t +--- response_body +[{"count":12,"port":"1980"}] +--- grep_error_log eval +qr/\[error\].*/ +--- grep_error_log_out eval +qr/Connection refused\) while connecting to upstream/ +--- timeout: 5 diff --git a/utils/lj-releng b/utils/lj-releng index bf1264bdadaa..2a7b241fb8d9 100755 --- a/utils/lj-releng +++ b/utils/lj-releng @@ -178,7 +178,7 @@ sub process_file { |pairs|ipairs|assert|module|package |coroutine|[gs]etfenv|next|rawget|rawset |loadstring|dofile - |rawlen|select|arg|bit|debug|ngx|ndk)$/x) + |rawlen|select|arg|bit|debug|ngx|ndk|newproxy)$/x) { next; } diff --git a/utils/update_nginx_conf_dev.sh b/utils/update_nginx_conf_dev.sh deleted file mode 100755 index c13419800c2b..000000000000 --- a/utils/update_nginx_conf_dev.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh - -lua_version=`lua -e "print(_VERSION)" 2>/dev/null | grep -o -E "(5.[0-9])"` - -if [ -z "$lua_version" ]; then - echo "Lua 5.x environment (luarocks included) should be installed in advance." - exit 1 -elif [ $lua_version = "5.1" ];then - echo "Current Lua version is 5.1, skip to update conf/nginx.conf." - exit -fi - -sed s"?lua/5.1?lua/$lua_version?" conf/nginx.conf > conf/nginx.conf.tmp -mv conf/nginx.conf.tmp conf/nginx.conf -echo "updated nginx.conf"