speedup subscribe

使用大雕源码的新版 lua 订阅脚本 由于增加了国旗匹配 速度不如原脚本 但也有极大提升
This commit is contained in:
jerrykuku 2020-01-18 19:00:20 +08:00
parent 27f8138195
commit 1ddf07e847
6 changed files with 354 additions and 274 deletions

View File

@ -1,8 +1,8 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=luci-app-vssr
PKG_VERSION:=1.02
PKG_RELEASE:=20191214-3
PKG_VERSION:=1.03
PKG_RELEASE:=20200118-1
PKG_CONFIG_DEPENDS:= CONFIG_PACKAGE_$(PKG_NAME)_INCLUDE_V2ray \
CONFIG_PACKAGE_$(PKG_NAME)_INCLUDE_ShadowsocksR_Server \

View File

@ -78,7 +78,7 @@ function get_subscribe()
end
luci.sys.call('uci commit vssr')
luci.sys.call(
"nohup /usr/share/vssr/subscribe.sh >/www/check_update.htm 2>/dev/null &")
"nohup /usr/bin/lua /usr/share/vssr/subscribe.lua >/www/check_update.htm 2>/dev/null &")
e.error = 0
else
e.error = 1

View File

@ -9,7 +9,7 @@ math.randomseed(os.time())
<div class="pure-g">
<div class="pure-u-1-2">
<span class="flag"><img src="/luci-static/vssr/flags/4x3/un.svg" class="pure-img"></span> <span
class="status-info"></span>
class="status-info">获取中...</span>
</div>
<div class="pure-u-1-2">
<div class="icon-con">

View File

@ -48,7 +48,7 @@ add_cron() {
sed -i '/vssr.log/d' $CRON_FILE
echo '0 1 * * 0 echo "" > /tmp/vssr.log' >>$CRON_FILE
[ -n "$(grep -w "/usr/share/vssr/subscribe.sh" $CRON_FILE)" ] && sed -i '/\/usr\/share\/vssr\/subscribe.sh/d' $CRON_FILE
[ $(uci_get_by_type server_subscribe auto_update 0) -eq 1 ] && echo "0 $(uci_get_by_type server_subscribe auto_update_time) * * * /usr/share/vssr/subscribe.sh" >>$CRON_FILE
[ $(uci_get_by_type server_subscribe auto_update 0) -eq 1 ] && echo "0 $(uci_get_by_type server_subscribe auto_update_time) * * * /usr/bin/lua /usr/share/vssr/subscribe.lua" >> $CRON_FILE
[ -z "$(grep -w "/usr/share/vssr/update.sh" $CRON_FILE)" ] && echo "0 5 * * 0 /usr/share/vssr/update.sh" >>$CRON_FILE
crontab $CRON_FILE
}

View File

@ -0,0 +1,349 @@
#!/usr/bin/lua
------------------------------------------------
-- This file is part of the luci-app-ssr-plus subscribe.lua
-- @author William Chan <root@williamchan.me>
------------------------------------------------
require 'nixio'
require 'luci.util'
require 'luci.jsonc'
require 'luci.sys'
-- these global functions are accessed all the time by the event handler
-- so caching them is worth the effort
local tinsert = table.insert
local ssub, slen, schar, srep, sbyte, sformat, sgsub =
string.sub, string.len, string.char, string.rep, string.byte, string.format, string.gsub
local cache = {}
local nodeResult = setmetatable({}, { __index = cache }) -- update result
local name = 'vssr'
local uciType = 'servers'
local ucic = luci.model.uci.cursor()
local proxy = ucic:get_first(name, 'server_subscribe', 'proxy', '0')
local subscribe_url = ucic:get_first(name, 'server_subscribe', 'subscribe_url', {})
local log = function(...)
print(os.date("%Y-%m-%d %H:%M:%S ") .. table.concat({ ... }, " "))
end
-- 分割字符串
local function split(full, sep)
full = full:gsub("%z", "") -- 这里不是很清楚 有时候结尾带个\0
local off, result = 1, {}
while true do
local nEnd = full:find(sep, off)
if not nEnd then
local res = ssub(full, off, slen(full))
if #res > 0 then -- 过滤掉 \0
tinsert(result, res)
end
break
else
tinsert(result, ssub(full, off, nEnd - 1))
off = nEnd + slen(sep)
end
end
return result
end
-- urlencode
local function get_urlencode(c)
return sformat("%%%02X", sbyte(c))
end
local function urlEncode(szText)
local str = szText:gsub("([^0-9a-zA-Z ])", get_urlencode)
str = str:gsub(" ", "+")
return str
end
local function get_urldecode(h)
return schar(tonumber(h, 16))
end
local function UrlDecode(szText)
return szText:gsub("+", " "):gsub("%%(%x%x)", get_urldecode)
end
-- trim
local function trim(text)
if not text or text == "" then
return ""
end
return (sgsub(text, "^%s*(.-)%s*$", "%1"))
end
-- md5
local function md5(content)
local stdout = luci.sys.exec('echo \"' .. urlEncode(content) .. '\" | md5sum | cut -d \" \" -f1')
-- assert(nixio.errno() == 0)
return trim(stdout)
end
-- base64
local function base64Decode(text, safe)
local raw = text
if not text then return '' end
text = text:gsub("%z", "")
if safe then
text = text:gsub("_", "/")
text = text:gsub("-", "+")
local mod4 = #text % 4
text = text .. string.sub('====', mod4 + 1)
end
local result = nixio.bin.b64decode(text)
if result then
return result:gsub("%z", "")
else
return raw
end
end
-- 处理数据
local function processData(szType, content)
local result = {
auth_enable = '0',
switch_enable = '1',
type = szType,
local_port = 1234,
timeout = 60, -- 不太确定 好像是死的
fast_open = 0,
kcp_enable = 0,
kcp_port = 0,
kcp_param = '--nocomp'
}
local hash
if type(content) == 'string' then
hash = md5(content)
else
hash = md5(luci.jsonc.stringify(content))
end
result.hashkey = hash
-- 如果节点内容为空,返回无效的节点信息
if content == '' then
result.server = ''
return result, hash
end
if szType == 'ssr' then
local dat = split(content, "/\\?")
local hostInfo = split(dat[1], ':')
result.server = hostInfo[1]
result.server_port = hostInfo[2]
result.protocol = hostInfo[3]
result.encrypt_method = hostInfo[4]
result.obfs = hostInfo[5]
result.password = base64Decode(hostInfo[6], true)
local params = {}
for k, v in pairs(split(dat[2], '&')) do
local t = split(v, '=')
params[t[1]] = t[2]
end
result.obfs_param = base64Decode(params.bfsparam, true)
result.protocol_param = base64Decode(params.protoparam, true)
local group = base64Decode(params.group, true)
if group then
result.alias = "[" .. group .. "] "
end
result.alias = result.alias .. base64Decode(params.remarks, true)
elseif szType == 'vmess' then
local info = luci.jsonc.parse(content)
result.type = 'v2ray'
result.server = info.add
result.server_port = info.port
result.tcp_guise = "none"
result.transport = info.net
result.alter_id = info.aid
result.vmess_id = info.id
result.alias = info.ps
result.ws_host = info.host
result.ws_path = info.path
result.h2_host = info.host
result.h2_path = info.path
if not info.security then
result.security = "auto"
end
if info.tls == "tls" or info.tls == "1" then
result.tls = "1"
else
result.tls = "0"
end
elseif szType == "ss" then
local info = content:sub(1, content:find("#") - 1)
local alias = content:sub(content:find("#") + 1, #content)
local hostInfo = split(base64Decode(info, true), "@")
local host = split(hostInfo[2], ":")
local userinfo = base64Decode(hostInfo[1], true)
local method = userinfo:sub(1, userinfo:find(":") - 1)
local password = userinfo:sub(userinfo:find(":") + 1, #userinfo)
result.alias = UrlDecode(alias)
result.type = "ss"
result.server = host[1]
if host[2]:find("/\\?") then
local query = split(host[2], "/\\?")
result.server_port = query[1]
-- local params = {}
-- for k, v in pairs(split(query[2], '&')) do
-- local t = split(v, '=')
-- params[t[1]] = t[2]
-- end
-- 这里似乎没什么用 我看数据结构没有写插件的支持 先抛弃
else
result.server_port = host[2]
end
result.encrypt_method_ss = method
result.password = password
elseif szType == "ssd" then
result.type = "ss"
result.server = content.server
result.server_port = content.port
result.password = content.password
result.encrypt_method_ss = content.encryption
result.alias = "[" .. content.airport .. "] " .. content.remarks
end
local flag = luci.sys.exec('/usr/share/'..name..'/getflag.sh "'..result.alias..'" '..result.server)
result.flag = string.gsub(flag, '\n', '')
return result, hash
end
-- wget
local function wget(url)
local stdout = luci.sys.exec('wget-ssl --user-agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36" --no-check-certificate -t 3 -T 10 -O- "' .. url .. '"')
return trim(stdout)
end
local execute = function()
-- exec
do
-- subscribe_url = {'https://www.google.comc'}
if proxy == '0' then -- 不使用代理更新的话先暂停
log('服务正在暂停')
luci.sys.init.stop(name)
end
for k, url in ipairs(subscribe_url) do
local raw = wget(url)
if #raw > 0 then
local node, szType
local groupHash = md5(url)
cache[groupHash] = {}
tinsert(nodeResult, {})
local index = #nodeResult
-- SSD 似乎是这种格式 ssd:// 开头的
if raw:find('ssd://') then
szType = 'ssd'
local nEnd = select(2, raw:find('ssd://'))
node = base64Decode(raw:sub(nEnd + 1, #raw), true)
node = luci.jsonc.parse(node)
local extra = {
airport = node.airport,
port = node.port,
encryption = node.encryption,
password = node.password
}
local servers = {}
-- SS里面包着 干脆直接这样
for _, server in ipairs(node.servers) do
tinsert(servers, setmetatable(server, { __index = extra }))
end
node = servers
else
-- ssd 外的格式
node = split(base64Decode(raw, true):gsub(" ", "\n"), "\n")
end
for _, v in ipairs(node) do
if v then
v = trim(v)
local result, hash
if szType == 'ssd' then
result, hash = processData(szType, v)
elseif not szType then
local dat = split(v, "://")
if dat and dat[1] and dat[2] then
if dat[1] == 'ss' then
result, hash = processData(dat[1], dat[2])
else
result, hash = processData(dat[1], base64Decode(dat[2], true))
end
end
else
log('跳过未知类型: ' .. szType)
end
-- log(hash, result)
if hash and result then
if result.alias:find("过期时间") or
result.alias:find("剩余流量") or
result.alias:find("QQ群") or
result.alias:find("官网") or
result.server == ''
then
log('丢弃无效节点: ' .. result.type ..' 节点, ' .. result.alias)
else
log('成功解析: ' .. result.type ..' 节点, ' .. result.alias)
result.grouphashkey = groupHash
tinsert(nodeResult[index], result)
cache[groupHash][hash] = nodeResult[index][#nodeResult[index]]
end
end
end
end
log('成功解析节点数量: ' ..#node)
end
end
end
-- diff
do
assert(next(nodeResult), "node result is empty")
local add, del = 0, 0
ucic:foreach(name, uciType, function(old)
if old.grouphashkey or old.hashkey then -- 没有 hash 的不参与删除
if not nodeResult[old.grouphashkey] or not nodeResult[old.grouphashkey][old.hashkey] then
ucic:delete(name, old['.name'])
del = del + 1
else
local dat = nodeResult[old.grouphashkey][old.hashkey]
ucic:tset(name, old['.name'], dat)
-- 标记一下
setmetatable(nodeResult[old.grouphashkey][old.hashkey], { __index = { _ignore = true } })
end
else
log('忽略手动添加的节点: ' .. old.alias)
end
end)
for k, v in ipairs(nodeResult) do
for kk, vv in ipairs(v) do
if not vv._ignore then
local section = ucic:add(name, uciType)
ucic:tset(name, section, vv)
add = add + 1
end
end
end
ucic:commit(name)
-- 如果服务器已经不见了把帮换一个
local globalServer = ucic:get_first(name, 'global', 'global_server', '')
local firstServer = ucic:get_first(name, uciType)
if not ucic:get(name, globalServer) then
if firstServer then
ucic:set(name, ucic:get_first(name, 'global'), 'global_server', firstServer)
ucic:commit(name)
log('当前主服务器已更新,正在自动更换。')
end
end
if firstServer then
luci.sys.call("/etc/init.d/" .. name .." restart > /dev/null 2>&1 &") -- 不加&的话日志会出现的更早
else
luci.sys.call("/etc/init.d/" .. name .." stop > /dev/null 2>&1 &") -- 不加&的话日志会出现的更早
end
log('新增节点数量: ' ..add, '删除节点数量: ' .. del)
log("END SUBSCRIBE")
log('更新成功服务正在启动')
end
end
if subscribe_url and #subscribe_url > 0 then
xpcall(execute, function(e)
log(e)
log(debug.traceback())
log('发生错误, 正在恢复服务')
local firstServer = ucic:get_first(name, uciType)
if firstServer then
luci.sys.call("/etc/init.d/" .. name .." restart > /dev/null 2>&1 &") -- 不加&的话日志会出现的更早
else
luci.sys.call("/etc/init.d/" .. name .." stop > /dev/null 2>&1 &") -- 不加&的话日志会出现的更早
end
end)
end

View File

@ -1,269 +0,0 @@
#!/bin/bash
# Copyright (C) 2017 XiaoShan https://www.mivm.cn
. /usr/share/libubox/jshn.sh
urlsafe_b64decode() {
local d="====" data=$(echo $1 | sed 's/_/\//g; s/-/+/g')
local mod4=$((${#data} % 4))
[ $mod4 -gt 0 ] && data=${data}${d:mod4}
echo $data | base64 -d
}
urldecode() {
: "${*//+/ }"
echo -e "${_//%/\\x}"
}
echo_date() {
echo $(TZ=UTC-8 date -R +%Y-%m-%d\ %X):$1
}
Server_Update() {
local uci_set="uci -q set $name.$1."
local flag=$(/usr/share/$name/getflag.sh "$ssr_remarks" $ssr_host)
${uci_set}grouphashkey="$ssr_grouphashkey"
${uci_set}hashkey="$ssr_hashkey"
${uci_set}alias="[$ssr_group] $ssr_remarks"
${uci_set}auth_enable="0"
${uci_set}switch_enable="1"
${uci_set}type="$ssr_type"
${uci_set}flag="$flag"
${uci_set}server="$ssr_host"
${uci_set}server_port="$ssr_port"
${uci_set}local_port="1234"
uci -q get $name.@servers[$1].timeout >/dev/null || ${uci_set}timeout="60"
${uci_set}password="$ssr_passwd"
${uci_set}encrypt_method="$ssr_method"
${uci_set}protocol="$ssr_protocol"
${uci_set}protocol_param="$ssr_protoparam"
${uci_set}obfs="$ssr_obfs"
${uci_set}obfs_param="$ssr_obfsparam"
${uci_set}fast_open="0"
${uci_set}kcp_enable="0"
${uci_set}kcp_port="0"
${uci_set}kcp_param="--nocomp"
if [ "$ssr_type" = "v2ray" ]; then
#v2ray
${uci_set}alter_id="$ssr_alter_id"
${uci_set}vmess_id="$ssr_vmess_id"
${uci_set}transport="$ssr_transport"
if [ "$ssr_transport" = "tcp" ]; then
${uci_set}tcp_guise="$ssr_tcp_guise"
fi
if [ "$ssr_transport" = "ws" ]; then
${uci_set}ws_host="$ssr_ws_host"
${uci_set}ws_path="$ssr_ws_path"
fi
if [ "$ssr_transport" = "h2" ]; then
${uci_set}h2_host="$ssr_ws_host"
${uci_set}h2_path="$ssr_ws_path"
fi
${uci_set}tls="$ssr_tls"
${uci_set}insecure="$ssr_insecure"
${uci_set}security="auto"
${uci_set}alias="$ssr_remarks"
fi
if [ "$ssr_type" = "ss" ]; then
${uci_set}encrypt_method_ss="$ss_method"
${uci_set}alias="$ssr_remarks"
fi
}
name=vssr
subscribe_url=($(uci get $name.@server_subscribe[0].subscribe_url)) #订阅服务器地址
[ ${#subscribe_url[@]} -eq 0 ] && exit 1
[ $(uci -q get $name.@server_subscribe[0].proxy || echo 0) -eq 0 ] && /etc/init.d/$name stop >/dev/null 2>&1
log_name=${name}_subscribe
for ((o = 0; o < ${#subscribe_url[@]}; o++)); do
echo_date "${subscribe_url[o]} 获取订阅"
echo_date "开始更新在线订阅列表..."
echo_date "开始下载订阅链接到本地临时文件,请稍等..."
subscribe_data=$(wget-ssl --user-agent="User-Agent:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36" --no-check-certificate -t 10 -T 10 -O- ${subscribe_url[o]})
curl_code=$?
# 计算group的hashkey
ssr_grouphashkey=$(echo "${subscribe_url[o]}" | md5sum | cut -d ' ' -f1)
if [ ! $curl_code -eq 0 ]; then
subscribe_data=$(wget-ssl --no-check-certificate -t 10 -T 10 -O- ${subscribe_url[o]})
curl_code=$?
fi
if [ $curl_code -eq 0 ]; then
echo_date "下载订阅成功..."
echo_date "开始解析节点信息..."
ssr_url=($(echo $subscribe_data | base64 -d | sed 's/\r//g')) # 解码数据并删除 \r 换行符
subscribe_max=$(echo ${ssr_url[0]} | grep -i MAX= | awk -F = '{print $2}')
subscribe_max_x=()
if [ -n "$subscribe_max" ]; then
while [ ${#subscribe_max_x[@]} -ne $subscribe_max ]; do
if [ ${#ssr_url[@]} -ge 10 ]; then
if [ $((${RANDOM:0:2} % 2)) -eq 0 ]; then
temp_x=${RANDOM:0:1}
else
temp_x=${RANDOM:0:2}
fi
else
temp_x=${RANDOM:0:1}
fi
[ $temp_x -lt ${#ssr_url[@]} -a -z "$(echo "${subscribe_max_x[*]}" | grep -w $temp_x)" ] && subscribe_max_x[${#subscribe_max_x[@]}]="$temp_x"
done
else
subscribe_max=${#ssr_url[@]}
fi
echo_date "共计$subscribe_max个节点"
ssr_group=$(urlsafe_b64decode $(urlsafe_b64decode ${ssr_url[$((${#ssr_url[@]} - 1))]//ssr:\/\//} | sed 's/&/\n/g' | grep group= | awk -F = '{print $2}'))
if [ -z "$ssr_group" ]; then
ssr_group="default"
fi
if [ -n "$ssr_group" ]; then
subscribe_i=0
subscribe_n=0
subscribe_o=0
subscribe_x=""
temp_host_o=()
curr_ssr=$(uci show $name | grep @servers | grep -c server=)
for ((x = 0; x < $curr_ssr; x++)); do # 循环已有服务器信息,匹配当前订阅群组
temp_alias=$(uci -q get $name.@servers[$x].grouphashkey | grep "$ssr_grouphashkey")
[ -n "$temp_alias" ] && temp_host_o[${#temp_host_o[@]}]=$(uci get $name.@servers[$x].hashkey)
done
for ((x = 0; x < $subscribe_max; x++)); do # 循环链接
[ ${#subscribe_max_x[@]} -eq 0 ] && temp_x=$x || temp_x=${subscribe_max_x[x]}
result=$(echo ${ssr_url[temp_x]} | grep "ss")
subscribe_url_type=$(echo "$ssr_url" | awk -F ':' '{print $1}')
if [ "$subscribe_url_type" = "ss" ]; then
temp_info=${ssr_url[temp_x]//ss:\/\//} # 解码 SS 链接
# 计算hashkey
ssr_hashkey=$(echo "$temp_info" | md5sum | cut -d ' ' -f1)
info=$(urlsafe_b64decode $(echo "$temp_info" | awk -F '@' '{print $1}'))
temp_info_array=(${info//:/ })
ssr_type="ss"
ss_method=${temp_info_array[0]}
ssr_passwd=${temp_info_array[1]}
info=$(echo "$temp_info" | awk -F '@' '{print $2}' | awk -F '#' '{print $1}')
temp_info_array=(${info//:/ })
ssr_host=${temp_info_array[0]}
ssr_port=${temp_info_array[1]}
ssr_remarks=$(urldecode $(echo "$temp_info" | awk -F '#' '{print $2}'))
fi
if [ "$subscribe_url_type" = "ssr" ]; then
temp_info=$(urlsafe_b64decode ${ssr_url[temp_x]//ssr:\/\//}) # 解码 SSR 链接
# 计算hashkey
ssr_hashkey=$(echo "$temp_info" | md5sum | cut -d ' ' -f1)
info=${temp_info///?*/}
temp_info_array=(${info//:/ })
ssr_type="ssr"
ssr_host=${temp_info_array[0]}
ssr_port=${temp_info_array[1]}
ssr_protocol=${temp_info_array[2]}
ssr_method=${temp_info_array[3]}
ssr_obfs=${temp_info_array[4]}
ssr_passwd=$(urlsafe_b64decode ${temp_info_array[5]})
info=${temp_info:$((${#info} + 2))}
info=(${info//&/ })
ssr_protoparam=""
ssr_obfsparam=""
ssr_remarks="$temp_x"
for ((i = 0; i < ${#info[@]}; i++)); do # 循环扩展信息
temp_info=($(echo ${info[i]} | sed 's/=/ /g'))
case "${temp_info[0]}" in
protoparam)
ssr_protoparam=$(urlsafe_b64decode ${temp_info[1]})
;;
obfsparam)
ssr_obfsparam=$(urlsafe_b64decode ${temp_info[1]})
;;
remarks)
ssr_remarks=$(urlsafe_b64decode ${temp_info[1]})
;;
esac
done
fi
if [ "$subscribe_url_type" = "vmess" ]; then
temp_info=$(urlsafe_b64decode ${ssr_url[temp_x]//vmess:\/\//}) # 解码 Vmess 链接
# 计算hashkey
ssr_hashkey=$(echo "$temp_info" | md5sum | cut -d ' ' -f1)
ssr_type="v2ray"
json_load "$temp_info"
json_get_var ssr_host add
json_get_var ssr_port port
json_get_var ssr_alter_id aid
json_get_var ssr_vmess_id id
json_get_var ssr_transport net
json_get_var ssr_remarks ps
ssr_tcp_guise="none"
json_get_var ssr_ws_host host
json_get_var ssr_ws_path path
json_get_var ssr_tls tls
if [ "$ssr_tls" == "tls" -o "$ssr_tls" == "1" ]; then
ssr_tls="1"
ssr_insecure="1"
else
ssr_tls="0"
fi
fi
if [ -z "ssr_remarks" ]; then # 没有备注的话则生成一个
ssr_remarks="$ssr_host:$ssr_port"
fi
uci_name_tmp=$(uci show $name | grep -w "$ssr_hashkey" | awk -F . '{print $2}')
if [ -z "$uci_name_tmp" ]; then # 判断当前服务器信息是否存在
uci_name_tmp=$(uci add $name servers)
subscribe_n=$(($subscribe_n + 1))
fi
Server_Update $uci_name_tmp
subscribe_x=$subscribe_x$ssr_hashkey" "
ssrtype=$(echo $ssr_type | tr '[a-z]' '[A-Z]')
echo_date "$ssrtype节点:【$ssr_remarks"
# SSR
# echo "服务器地址: $ssr_host"
# echo "服务器端口 $ssr_port"
# echo "密码: $ssr_passwd"
# echo "SS加密: $ss_method"
# echo "加密: $ssr_method"
# echo "协议: $ssr_protocol"
# echo "协议参数: $ssr_protoparam"
# echo "混淆: $ssr_obfs"
# echo "混淆参数: $ssr_obfsparam"
# echo "备注: $ssr_remarks"
done
for ((x = 0; x < ${#temp_host_o[@]}; x++)); do # 新旧服务器信息匹配,如果旧服务器信息不存在于新服务器信息则删除
if [ -z "$(echo "$subscribe_x" | grep -w ${temp_host_o[x]})" ]; then
uci_name_tmp=$(uci show $name | grep ${temp_host_o[x]} | awk -F . '{print $2}')
uci delete $name.$uci_name_tmp
subscribe_o=$(($subscribe_o + 1))
fi
done
echo_date "本次更新订阅来源 【$ssr_group】 服务器数量: ${#ssr_url[@]} 新增服务器: $subscribe_n 删除服务器: $subscribe_o"
echo_date "在线订阅列表更新完成!请等待网页自动刷新!"
subscribe_log="$ssr_group 服务器订阅更新成功 服务器数量: ${#ssr_url[@]} 新增服务器: $subscribe_n 删除服务器: $subscribe_o"
logger -st $log_name[$$] -p6 "$subscribe_log"
uci commit $name
else
echo_date "${subscribe_url[$o]} 订阅数据解析失败 无法获取 Group"
logger -st $log_name[$$] -p3 "${subscribe_url[$o]} 订阅数据解析失败 无法获取 Group"
fi
else
echo_date "${subscribe_url[$o]} 订阅数据获取失败 错误代码: $curl_code"
logger -st $log_name[$$] -p3 "${subscribe_url[$o]} 订阅数据获取失败 错误代码: $curl_code"
fi
done
echo "END SUBSCRIBE"
/etc/init.d/$name restart >/dev/null 2>&1