diff --git a/package/lean/luci-app-accesscontrol/Makefile b/package/lean/luci-app-accesscontrol/Makefile deleted file mode 100644 index 1da4e9dfc..000000000 --- a/package/lean/luci-app-accesscontrol/Makefile +++ /dev/null @@ -1,18 +0,0 @@ - -# Copyright (C) 2016 Openwrt.org -# -# This is free software, licensed under the Apache License, Version 2.0 . -# - -include $(TOPDIR)/rules.mk - -LUCI_TITLE:=LuCI Access Control Configuration -LUCI_DEPENDS:= -LUCI_PKGARCH:=all -PKG_NAME:=luci-app-accesscontrol -PKG_VERSION:=1 -PKG_RELEASE:=11 - -include $(TOPDIR)/feeds/luci/luci.mk - -# call BuildPackage - OpenWrt buildroot signature diff --git a/package/lean/luci-app-accesscontrol/luasrc/controller/mia.lua b/package/lean/luci-app-accesscontrol/luasrc/controller/mia.lua deleted file mode 100644 index e3e541623..000000000 --- a/package/lean/luci-app-accesscontrol/luasrc/controller/mia.lua +++ /dev/null @@ -1,17 +0,0 @@ -module("luci.controller.mia",package.seeall) - -function index() - if not nixio.fs.access("/etc/config/mia") then - return - end - - entry({"admin", "services", "mia"}, cbi("mia"), _("Internet Access Schedule Control"), 30).dependent = true - entry({"admin", "services", "mia", "status"}, call("act_status")).leaf = true -end - -function act_status() - local e = {} - e.running = luci.sys.call("iptables -L INPUT |grep MIA >/dev/null") == 0 - luci.http.prepare_content("application/json") - luci.http.write_json(e) -end diff --git a/package/lean/luci-app-accesscontrol/luasrc/model/cbi/mia.lua b/package/lean/luci-app-accesscontrol/luasrc/model/cbi/mia.lua deleted file mode 100644 index 1f55ee944..000000000 --- a/package/lean/luci-app-accesscontrol/luasrc/model/cbi/mia.lua +++ /dev/null @@ -1,68 +0,0 @@ -a = Map("mia") -a.title = translate("Internet Access Schedule Control") -a.description = translate("Access Schedule Control Settins") - -a:section(SimpleSection).template = "mia/mia_status" - -t = a:section(TypedSection, "basic") -t.anonymous = true - -e = t:option(Flag, "enable", translate("Enabled")) -e.rmempty = false - -e = t:option(Flag, "strict", translate("Strict Mode")) -e.description = translate("Strict Mode will degrade CPU performance, but it can achieve better results") -e.rmempty = false - -t = a:section(TypedSection, "macbind", translate("Client Rules")) -t.template = "cbi/tblsection" -t.anonymous = true -t.addremove = true - -e = t:option(Flag, "enable", translate("Enabled")) -e.rmempty = false -e.default = "1" - -e = t:option(Value, "macaddr", translate("MAC address (Computer Name)")) -e.rmempty = true -luci.sys.net.mac_hints(function(t,a) -e:value(t,"%s (%s)"%{t,a}) -end) - -e = t:option(Value, "timeon", translate("Start time")) -e.optional = false -e.default = "00:00" - -e = t:option(Value, "timeoff", translate("End time")) -e.optional=false -e.default = "23:59" - -e = t:option(Flag, "z1", translate("Mon")) -e.rmempty = true -e.default = 1 - -e = t:option(Flag, "z2", translate("Tue")) -e.rmempty = true -e.default=1 - -e = t:option(Flag, "z3", translate("Wed")) -e.rmempty = true -e.default = 1 - -e = t:option(Flag, "z4", translate("Thu")) -e.rmempty = true -e.default = 1 - -e = t:option(Flag, "z5", translate("Fri")) -e.rmempty = true -e.default = 1 - -e = t:option(Flag, "z6", translate("Sat")) -e.rmempty = true -e.default = 1 - -e = t:option(Flag, "z7", translate("Sun")) -e.rmempty = true -e.default = 1 - -return a diff --git a/package/lean/luci-app-accesscontrol/luasrc/view/mia/mia_status.htm b/package/lean/luci-app-accesscontrol/luasrc/view/mia/mia_status.htm deleted file mode 100644 index c971ee3c3..000000000 --- a/package/lean/luci-app-accesscontrol/luasrc/view/mia/mia_status.htm +++ /dev/null @@ -1,22 +0,0 @@ - - -
diff --git a/package/lean/luci-app-accesscontrol/po/zh-cn/mia.po b/package/lean/luci-app-accesscontrol/po/zh-cn/mia.po deleted file mode 100644 index 8f7c16aae..000000000 --- a/package/lean/luci-app-accesscontrol/po/zh-cn/mia.po +++ /dev/null @@ -1,50 +0,0 @@ -msgid "Internet Access Schedule Control" -msgstr "上网时间控制" - -msgid "Access Schedule Control Settins" -msgstr "设置客户端禁止访问互联网的时间" - -msgid "General switch" -msgstr "开启/关闭" - -msgid "Strict Mode" -msgstr "严格模式" - -msgid "Strict Mode will degrade CPU performance, but it can achieve better results" -msgstr "严格模式会损耗部分CPU资源,但可以按照时间规则立即拦截数据包,效果更好" - -msgid "Client Rules" -msgstr "客户端规则" - -msgid "Description" -msgstr "描述" - -msgid "MAC address (Computer Name)" -msgstr "MAC 地址 (主机名)" - -msgid "Start time" -msgstr "开始时间" - -msgid "End time" -msgstr "结束时间" - -msgid "Mon" -msgstr "一" - -msgid "Tue" -msgstr "二" - -msgid "Wed" -msgstr "三" - -msgid "Thu" -msgstr "四" - -msgid "Fri" -msgstr "五" - -msgid "Sat" -msgstr "六" - -msgid "Sun" -msgstr "日" diff --git a/package/lean/luci-app-accesscontrol/root/etc/config/mia b/package/lean/luci-app-accesscontrol/root/etc/config/mia deleted file mode 100644 index 0c4c4f6bc..000000000 --- a/package/lean/luci-app-accesscontrol/root/etc/config/mia +++ /dev/null @@ -1,3 +0,0 @@ - -config basic - option enable '0' diff --git a/package/lean/luci-app-accesscontrol/root/etc/init.d/mia b/package/lean/luci-app-accesscontrol/root/etc/init.d/mia deleted file mode 100755 index 7a84f51f9..000000000 --- a/package/lean/luci-app-accesscontrol/root/etc/init.d/mia +++ /dev/null @@ -1,88 +0,0 @@ -#!/bin/sh /etc/rc.common -# -# Copyright (C) 2015 OpenWrt-dist -# -# This is free software, licensed under the GNU General Public License v3. -# See /LICENSE for more information. -# - -START=30 - -CONFIG=mia - -uci_get_by_type() { - local index=0 - if [ -n $4 ]; then - index=$4 - fi - local ret=$(uci get $CONFIG.@$1[$index].$2 2>/dev/null) - echo ${ret:=$3} -} - -add_rule(){ -for i in $(seq 0 100) -do - local enable=$(uci_get_by_type macbind enable '' $i) - local macaddr=$(uci_get_by_type macbind macaddr '' $i) - local timeon=$(uci_get_by_type macbind timeon '' $i) - local timeoff=$(uci_get_by_type macbind timeoff '' $i) - local z1=$(uci_get_by_type macbind z1 '' $i) - local z2=$(uci_get_by_type macbind z2 '' $i) - local z3=$(uci_get_by_type macbind z3 '' $i) - local z4=$(uci_get_by_type macbind z4 '' $i) - local z5=$(uci_get_by_type macbind z5 '' $i) - local z6=$(uci_get_by_type macbind z6 '' $i) - local z7=$(uci_get_by_type macbind z7 '' $i) - [ "$z1" == "1" ] && Z1="Mon," - [ "$z2" == "1" ] && Z2="Tue," - [ "$z3" == "1" ] && Z3="Wed," - [ "$z4" == "1" ] && Z4="Thu," - [ "$z5" == "1" ] && Z5="Fri," - [ "$z6" == "1" ] && Z6="Sat," - [ "$z7" == "1" ] && Z7="Sun" - if [ -z $enable ] || [ -z $macaddr ] || [ -z $timeoff ] || [ -z $timeon ]; then - break - fi - if [ "$enable" == "1" ]; then - iptables -t filter -I MIA -m mac --mac-source $macaddr -m time --kerneltz --timestart $timeon --timestop $timeoff --weekdays $Z1$Z2$Z3$Z4$Z5$Z6$Z7 -j DROP - fi - for n in $(seq 1 7) - do - unset "Z$n" - done -done -} - -del_rule(){ - type=$1 - blackMacAdd=$(iptables -t nat -L $type | grep -w RETURN | grep -w "MAC" | awk '{print $7}') - [ -n "$blackMacAdd" ] && { - for macaddrb in $blackMacAdd - do - iptables -t nat -D $type -m mac --mac-source $macaddrb -j RETURN - done - } -} - -start(){ - stop - enable=$(uci get mia.@basic[0].enable) - [ $enable -eq 0 ] && exit 0 - iptables -t filter -N MIA - iptables -I INPUT -p udp --dport 53 -m comment --comment "Rule For Control" -j MIA - iptables -I INPUT -p tcp --dport 53 -m comment --comment "Rule For Control" -j MIA - iptables -t nat -A PREROUTING -p udp --dport 53 -j REDIRECT --to-ports 53 -m comment --comment "Rule For Control" - iptables -t nat -A PREROUTING -p tcp --dport 53 -j REDIRECT --to-ports 53 -m comment --comment "Rule For Control" - strict=$(uci get mia.@basic[0].strict) - [ $strict -eq 1 ] && iptables -t filter -I FORWARD -m comment --comment "Rule For Control" -j MIA - add_rule -} -stop(){ - iptables -t filter -D FORWARD -m comment --comment "Rule For Control" -j MIA 2>/dev/null - iptables -D INPUT -p udp --dport 53 -m comment --comment "Rule For Control" -j MIA 2>/dev/null - iptables -D INPUT -p tcp --dport 53 -m comment --comment "Rule For Control" -j MIA 2>/dev/null - iptables -t nat -D PREROUTING -p udp --dport 53 -j REDIRECT --to-ports 53 -m comment --comment "Rule For Control" 2>/dev/null - iptables -t nat -D PREROUTING -p tcp --dport 53 -j REDIRECT --to-ports 53 -m comment --comment "Rule For Control" 2>/dev/null - iptables -t filter -F MIA 2>/dev/null - iptables -t filter -X MIA 2>/dev/null -} diff --git a/package/lean/luci-app-accesscontrol/root/etc/mia.include b/package/lean/luci-app-accesscontrol/root/etc/mia.include deleted file mode 100755 index 4d7859f31..000000000 --- a/package/lean/luci-app-accesscontrol/root/etc/mia.include +++ /dev/null @@ -1 +0,0 @@ -/etc/init.d/mia restart \ No newline at end of file diff --git a/package/lean/luci-app-accesscontrol/root/etc/uci-defaults/luci-mia b/package/lean/luci-app-accesscontrol/root/etc/uci-defaults/luci-mia deleted file mode 100755 index e0d2c3133..000000000 --- a/package/lean/luci-app-accesscontrol/root/etc/uci-defaults/luci-mia +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh - -uci -q batch <<-EOF >/dev/null - delete ucitrack.@mia[-1] - add ucitrack mia - set ucitrack.@mia[-1].init=mia - commit ucitrack - delete firewall.mia - set firewall.mia=include - set firewall.mia.type=script - set firewall.mia.path=/etc/mia.include - set firewall.mia.reload=1 - commit firewall -EOF - -rm -f /tmp/luci-indexcache -exit 0 \ No newline at end of file diff --git a/package/lean/luci-app-adbyby-plus/Makefile b/package/lean/luci-app-adbyby-plus/Makefile deleted file mode 100644 index f7f3628b6..000000000 --- a/package/lean/luci-app-adbyby-plus/Makefile +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2020 Openwrt.org -# -# This is free software, licensed under the Apache License, Version 2.0 . -# - -include $(TOPDIR)/rules.mk - -LUCI_TITLE:=LuCI support for Adbyby -LUCI_DEPENDS:=+adbyby +uclient-fetch +ipset +dnsmasq-full -LUCI_PKGARCH:=all -PKG_NAME:=luci-app-adbyby-plus -PKG_VERSION:=2.0 -PKG_RELEASE:=75 - -include $(TOPDIR)/feeds/luci/luci.mk - -# call BuildPackage - OpenWrt buildroot signature diff --git a/package/lean/luci-app-adbyby-plus/luasrc/controller/adbyby.lua b/package/lean/luci-app-adbyby-plus/luasrc/controller/adbyby.lua deleted file mode 100644 index 85adb1466..000000000 --- a/package/lean/luci-app-adbyby-plus/luasrc/controller/adbyby.lua +++ /dev/null @@ -1,86 +0,0 @@ - -module("luci.controller.adbyby", package.seeall) - -function index() - if not nixio.fs.access("/etc/config/adbyby") then - return - end - - entry({"admin", "services", "adbyby"}, alias("admin", "services", "adbyby", "base"), _("ADBYBY Plus +"), 9).dependent = true - - entry({"admin", "services", "adbyby", "base"}, cbi("adbyby/base"), _("Base Setting"), 10).leaf = true - entry({"admin", "services", "adbyby", "advanced"}, cbi("adbyby/advanced"), _("Advance Setting"), 20).leaf = true - entry({"admin", "services", "adbyby", "help"}, form("adbyby/help"), _("Plus+ Domain List"), 30).leaf = true - entry({"admin", "services", "adbyby", "esc"}, form("adbyby/esc"), _("Bypass Domain List"), 40).leaf = true - entry({"admin", "services", "adbyby", "black"}, form("adbyby/black"), _("Block Domain List"), 50).leaf = true - entry({"admin", "services", "adbyby", "block"}, form("adbyby/block"), _("Block IP List"), 60).leaf = true - entry({"admin", "services", "adbyby", "user"}, form("adbyby/user"), _("User-defined Rule"), 70).leaf = true - - entry({"admin", "services", "adbyby", "refresh"}, call("refresh_data")) - entry({"admin", "services", "adbyby", "run"}, call("act_status")).leaf = true -end - -function act_status() - local e = {} - e.running = luci.sys.call("pgrep adbyby >/dev/null") == 0 - luci.http.prepare_content("application/json") - luci.http.write_json(e) -end - - -function refresh_data() - local set = luci.http.formvalue("set") - local icount = 0 - -if set == "rule_data" then -luci.sys.exec("/usr/share/adbyby/rule-update") - icount = luci.sys.exec("/usr/share/adbyby/rule-count '/tmp/rules/'") - - if tonumber(icount)>0 then - if nixio.fs.access("/usr/share/adbyby/rules/") then - oldcount = luci.sys.exec("/usr/share/adbyby/rule-count '/usr/share/adbyby/rules/'") - else - oldcount=0 - end - else - retstring ="-1" - end - - if tonumber(icount) ~= tonumber(oldcount) then - luci.sys.exec("rm -f /usr/share/adbyby/rules/data/* /usr/share/adbyby/rules/host/* && cp -a /tmp/rules /usr/share/adbyby/") - luci.sys.exec("/etc/init.d/adbyby restart &") - retstring=tostring(math.ceil(tonumber(icount))) - else - retstring ="0" - end -else -refresh_cmd = "uclient-fetch -q --no-check-certificate -O - 'https://easylist-downloads.adblockplus.org/easylistchina+easylist.txt' > /tmp/adnew.conf" -sret = luci.sys.call(refresh_cmd .. " 2>/dev/null") -if sret== 0 then - luci.sys.call("/usr/share/adbyby/ad-update") - icount = luci.sys.exec("cat /tmp/ad.conf | wc -l") - if tonumber(icount)>0 then - if nixio.fs.access("/usr/share/adbyby/dnsmasq.adblock") then - oldcount = luci.sys.exec("cat /usr/share/adbyby/dnsmasq.adblock | wc -l") - else - oldcount=0 - end - if tonumber(icount) ~= tonumber(oldcount) then - luci.sys.exec("cp -f /tmp/ad.conf /usr/share/adbyby/dnsmasq.adblock") - luci.sys.exec("cp -f /tmp/ad.conf /tmp/etc/dnsmasq-adbyby.d/adblock") - luci.sys.exec("/etc/init.d/adbyby restart &") - retstring=tostring(math.ceil(tonumber(icount))) - else - retstring ="0" - end - else - retstring ="-1" - end - luci.sys.exec("rm -f /tmp/ad.conf") -else - retstring ="-1" -end -end -luci.http.prepare_content("application/json") -luci.http.write_json({ ret=retstring ,retcount=icount}) -end diff --git a/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/advanced.lua b/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/advanced.lua deleted file mode 100644 index 1d9faa69a..000000000 --- a/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/advanced.lua +++ /dev/null @@ -1,57 +0,0 @@ - -local SYS = require "luci.sys" -local ND = SYS.exec("cat /usr/share/adbyby/dnsmasq.adblock | wc -l") - -local ad_count=0 -if nixio.fs.access("/usr/share/adbyby/dnsmasq.adblock") then -ad_count=tonumber(SYS.exec("cat /usr/share/adbyby/dnsmasq.adblock | wc -l")) -end - -local rule_count=0 -if nixio.fs.access("/usr/share/adbyby/rules/") then -rule_count=tonumber(SYS.exec("/usr/share/adbyby/rule-count '/usr/share/adbyby/rules/'")) -end - -m = Map("adbyby") - -s = m:section(TypedSection, "adbyby") -s.anonymous = true - -o = s:option(Flag, "block_ios") -o.title = translate("Block Apple iOS OTA update") -o.default = 0 -o.rmempty = false - -o = s:option(Flag, "block_cnshort") -o.title = translate("Block CNshort APP and Website") -o.default = 0 -o.rmempty = false - -o = s:option(Flag, "cron_mode") -o.title = translate("Update the rule at 6 a.m. every morning and restart adbyby") -o.default = 0 -o.rmempty = false - -o=s:option(DummyValue,"ad_data",translate("Adblock Plus Data")) -o.rawhtml = true -o.template = "adbyby/refresh" -o.value =ad_count .. " " .. translate("Records") - -o=s:option(DummyValue,"rule_data",translate("Subscribe 3rd Rules Data")) -o.rawhtml = true -o.template = "adbyby/refresh" -o.value =rule_count .. " " .. translate("Records") -o.description = translate("AdGuardHome / Host / DNSMASQ rules auto-convert") - -o = s:option(Button,"delete",translate("Delete All Subscribe Rules")) -o.inputstyle = "reset" -o.write = function() - SYS.exec("rm -f /usr/share/adbyby/rules/data/* /usr/share/adbyby/rules/host/*") - SYS.exec("/etc/init.d/adbyby restart 2>&1 &") - luci.http.redirect(luci.dispatcher.build_url("admin", "services", "adbyby", "advanced")) -end - -o = s:option(DynamicList, "subscribe_url", translate("Anti-AD Rules Subscribe")) -o.rmempty = true - -return m diff --git a/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/base.lua b/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/base.lua deleted file mode 100644 index b9872cb19..000000000 --- a/package/lean/luci-app-adbyby-plus/luasrc/model/cbi/adbyby/base.lua +++ /dev/null @@ -1,66 +0,0 @@ -local NXFS = require "nixio.fs" -local SYS = require "luci.sys" -local HTTP = require "luci.http" -local DISP = require "luci.dispatcher" - -local DL = SYS.exec("head -1 /tmp/adbyby/data/lazy.txt | awk -F' ' '{print $3,$4}'") or "" -local DV = SYS.exec("head -1 /tmp/adbyby/data/video.txt | awk -F' ' '{print $3,$4}'") or "" -local NR = SYS.exec("grep -v '^!' /usr/share/adbyby/data/rules.txt | wc -l") -local NU = SYS.exec("cat /usr/share/adbyby/data/user.txt | wc -l") -local UD = NXFS.readfile("/tmp/adbyby.updated") or "1970-01-01 00:00:00" - -m = Map("adbyby") -m.title = translate("Adbyby Plus + Settings") -m.description = translate("Adbyby Plus + can filter all kinds of banners, popups, video ads, and prevent tracking, privacy theft and a variety of malicious websites" .. translate(Value) .. "
" -end - -m = Map("amule", translate("aMule"), translate("aMule is a ED2K/KAD client for all platforms.") .. "
-local fs = require "nixio.fs"
-local CONFIG_FILE = "/tmp/dnsforwarder.conf";
-
-function sync_value_to_file(value, file)
- value = value:gsub("\r\n?", "\n")
- local old_value = nixio.fs.readfile(file)
- if value ~= old_value then
- nixio.fs.writefile(file, value)
- end
-
-end
-local state_msg = ""
-
-local dnsforwarder_on = (luci.sys.call("pidof dnsforwarder > /dev/null") == 0)
-local resolv_file = luci.sys.exec("uci get dhcp.@dnsmasq[0].resolvfile")
-local listen_port = luci.sys.exec("uci get dhcp.@dnsmasq[0].server")
-
-if dnsforwarder_on then
- state_msg = "" .. translate("Running") .. ""
-else
- state_msg = "" .. translate("Not running") .. ""
-end
-
-
-if dnsforwarder_on and string.sub(listen_port,1,14) == "127.0.0.1#5053" then
- state_msg = state_msg .. ",DNSmasq已经将流量定向至本软件"
-elseif dnsforwarder_on then
- state_msg = state_msg .. ""
-end
-
-if resolv_file=="" then
-
-else if dnsforwarder_on then
- state_msg = state_msg .. ""
- end
-end
-
-m = Map("dnsforwarder",translate("dnsforwarder"),translate("Dnsforwarder是企业级DNS服务器,可以通过TCP协议进行DNS解析,可以方便的使用iptables进行透明代理,配合ipset、GFWList使用效果更佳。默认上游服务器为114DNS,SSR和Redsocks的GFWList模式要依赖本软件包提供的功能").. "
状态 - " .. state_msg)
-s = m:section(TypedSection,"arguments", translate("通用配置"))
-s.addremove=false
-s.anonymous=true
- view_enable = s:option(Flag,"enabled",translate("Enable"))
- view_dnsmasq = s:option(Flag,"dnsmasq",translate("设置成DNSmasq的上游服务器"),translate("让DNSMasq从本软件获得解析结果,支持GFWList模式"))
- view_addr = s:option(Value,"addr",translate("转发地址"),translate("请填写dnsforwarder的监听地址,默认127.0.0.1:5053,如果填写208.67.222.222:5353那么可不通过该软件获得无污染结果"))
- view_addr.default = "127.0.0.1:5053"
-
--- ---------------------------------------------------
-s1 = m:section(TypedSection,"config",translate("标准配置"))
-s1.addremove = false
-s1.anonymous = true
- log_enable = s1:option(Flag,"log",translate("打开文件日志"))
- log_enable.rmempty = false
- log_enable.default = "false"
- log_enable.disabled = "false"
- log_enable.enabled = "true"
- log_size = s1:option(Value,"log_size",translate("单个日志文件大小的临界值(字节)"),translate("当日志文件大小超过这个临界值后,当前的日志文件将会被重命名,然后建立一个新的日志文件,继续记录日志"))
- log_size.datatype = "uinteger"
- log_size.default = "102400"
- log_size:depends ({log="true"})
- log_path = s1:option(Value,"log_path",translate("设定日志文件所在的文件夹"),translate("日志文件初始的文件名为 `dnsforwarder.log',当超过临界值之后,将会被重命名为 `dnsforwarder.log.1'、`dnsforwarder.log.2' 等等,然后重新建立一个 dnsforwarder.log' 文件"))
- log_path.datatype = "directory"
- log_path.default = "/var/log/"
- log_path.readonly = true
- log_path:depends ({log="true"})
- gfw_enable = s1:option(Flag,"gfw",translate("使用GFW列表"))
- gfw_enable.rmempty = false
- gfw_enable.default = "true"
- gfw_enable.disabled = "false"
- gfw_enable.enabled = "true"
- udp_local = s1:option(DynamicList,"udp_local",translate("本地监听"),translate("设置在本地开启的接口的IP地址和端口,可以是本地回环地址 (127.0.0.1) ,本地局域网,以及互联网
如果是 IPv6 地址,请在IP两端加上方括号(不包含端口部分),例如 [::1]:53 (本地回环)、[fe80::699c:f79a:9bb6:1]:5353
如果不指定端口,则默认为 53"))
- tcp_group = s1:option(DynamicList,"tcp_group",translate("解析策略(TCP)"),translate([[格式:<IP1[:PORT],IP2[:PORT],...> <DOMAIN1,DOMAIN2,...> <on|PROXY1[:PORT],PROXY2[:PORT],...>
- 设置一个 TCP 上游服务器组,并指定通过这些上游服务器查询的域名
- 第一部分参数“<IP1[:PORT],IP2[:PORT],...>”,用于指定 TCP 上游服务器,不写端口则默认使用 53 端口
- 第二部分参数“<DOMAIN1,DOMAIN2,...>”,用于指定通过这些 TCP 上游服务器查询的域名,可以包含通配符
- 第三部分参数“<no|PROXY1[:PORT],PROXY2[:PORT],...>”,用于指定查询时使用的 Socks5 代理,不写端口则默认使用 1080 端口。`no' 代表不使用代理]]))
- tcp_group.placeholder = "8.8.8.8 * on"
- tcp_group.default = tcp_group.placeholder
- udp_group = s1:option(DynamicList,"udp_group",translate("解析策略(UDP)"),translate([[格式:<IP1[:PORT],IP2[:PORT],...> <DOMAIN1,DOMAIN2,...> <on|off>
- 设置一个 UDP 上游服务器组,并指定通过这些上游服务器查询的域名
- 第一部分参数“<IP1[:PORT],IP2[:PORT],...>”,用于指定 UDP 上游服务器,不写端口则默认使用 53 端口
- 第二部分参数“<DOMAIN1,DOMAIN2,...>”,用于指定通过这些 UDP 上游服务器查询的域名,可以包含通配符
- 第三部分参数“<on|off>”,用于指定是否开启并发查询,`on' 为开启,`off' 为不开启
- 并发查询指的是,向所有指定的 UDP 服务器发送查询请求,取最先接受到的未屏蔽的回复作为查询结果,并丢弃其余的回复]]))
- udp_group.placeholder = "9.9.9.9,119.29.29.29,223.5.5.5,114.114.114.114 * on"
- udp_group.default = udp_group.placeholder
- group_file = s1:option(DynamicList,"group_file",translate("解析策略(文件)"),translate([[
- 从文件加载服务器组,
- 服务器组文件的写法点击这里查看。]]))
- group_file.datatype = "file"
- s1:option(DummyValue,"nothing"," ",translate([[
- 解析策略优先级:解析策略(TCP)>解析策略(UDP)>解析策略(文件)
- 1.对于没有指定服务器的域名,会随机选择一个服务器组进行查询。
- 2.如果某域名匹配多个服务器组,则选择的顺序如下:
- 2.1 优先选择不含通配符的匹配项,如果仍然有多条匹配,则选择匹配度最高的那个,
- 如果匹配度相同的情况下仍然有多条匹配,则选择最后一个。
- (例如 `ipv6.microsoft.com' 比 `microsoft.com' 对于域名 `teredo.ipv6.microsoft.com' 的匹配度更高)
- 2.2 然后选择包含通配符的匹配项,如果有多条匹配,则选择第一个.
- ]]))
- block_ip = s1:option(DynamicList,"block_ip",translate("DNS黑名单"),translate("阻挡含有以上 IP 地址的 DNS 数据包,IPv6 地址不用加方括号"))
- block_ip.datatype = "ipaddr"
- block_ip.delimiter = ","
- ip_substituting = s1:option(DynamicList,"ip_substituting",translate("替换 DNS 数据包"),translate("替换 DNS 数据包中的 IP 地址(只支持 IPv4 地址),例如:
127.0.0.1 1.2.0.127
的效果是把 DNS 数据包中所有的 127.0.0.1 地址替换为 1.2.0.127,仅替换通过服务器(TCP 和 UDP)查询而来的 DNS 数据包,对于缓存中和 Hosts 中的结果无效"))
- ip_substituting.placeholder = "127.0.0.1 1.2.0.127"
- block_negative_resp = s1:option(Flag,"block_negative_resp",translate("过滤失败请求"),translate("是否过滤来自上游服务器的查询未成功的响应。查询未成功指:格式错误、服务器错误、域名不存在和服务器拒绝请求等,参见 RFC 6895,`2.3. RCODE Assignment'"))
- block_negative_resp.rmempty = false
- block_negative_resp.default = "false"
- block_negative_resp.disabled = "false"
- block_negative_resp.enabled = "true"
- append_host = s1:option(DynamicList,"append_host",translate("附加hosts"),translate([[
- 写法与HOSTS文件中的一样,同样支持通配符,IPv6 地址不用加方括号
- 也可以定义 CName 的 Hosts 项,例如:www.google.cn *.google.com
,这样所有匹配 *.google.com 的域名都将指向 www.google.cn 的 IP 地址
- 如果使某些域名跳过在 hosts 中的查询,可以@@ *.012345.com
这样所有匹配 *.012345.com 的域名都不会在 hosts 中查询
- 各种 hosts 的优先级从高到低:1.带 `@@' 的禁止项 2.一般 hosts 项 3.CName hosts 项
- ]]))
- block_ipv6 = s1:option(Flag,"block_ipv6",translate("拒绝IPv6 地址"),translate("如果一个域名在 Hosts 被指定了一个 IPv4 地址(包括文件 Hosts 和 `AppendHosts' 指定的 Hosts),那么拒绝对这个域名的 IPv6 地址查询"))
- block_ipv6.rmempty = false
- block_ipv6.default = "false"
- block_ipv6.disabled = "false"
- block_ipv6.enabled = "true"
-
- cache = s1:option(Flag,"cache",translate("使用缓存"),translate("是否使用缓存"))
- cache.rmempty = false
- cache.default = "false"
- cache.disabled = "false"
- cache.enabled = "true"
- cache_size = s1:option(Value,"cache_size",translate("缓存大小 (字节)"),translate("缓存大小不能小于 102400 (100KB)"))
- cache_size.datatype = "min(102400)"
- cache_size.default = "1048576"
- cache_size:depends ({cache="true"})
- cache_file = s1:option(Value,"cache_file",translate("缓存文件"))
- cache_file.default="/tmp/dnsforwarder.cache"
- cache_file.readonly=true
- cache_file:depends ({cache="true"})
- cache_ignore = s1:option(Flag,"cache_ignore",translate("忽略TTL"),translate("为 `true' 时将忽略 TTL,此时所有的缓存条目将不会被移除,为 `false' 时不忽略 TTL,缓存条目将会按照 TTL 的时间来移除"))
- cache_ignore.rmempty=false
- cache_ignore.default = "false"
- cache_ignore.disabled = "false"
- cache_ignore.enabled = "true"
- cache_ignore:depends ({cache="true"})
- cache_control = s1:option(DynamicList,"cache_control",translate("控制域名的缓存行为"),translate([[
- 单条格式:<DOMAIN> [$ [$] ]<TYPE> [ARGUMENT]
- <DOMAIN> 是域名;[$ [$] ] 是传染设定;<TYPE> 是类型;[ARGUMENT] 是参数
- <DOMAIN> 支持通配符,支持尾部匹配
- <TYPE> 有以下可选项(均为小写):
- orig : 使用原先的 TTL,即不改变指定域名记录的 TTL,此类型无需 [ARGUMENT] 参数
- nocache : 不缓存指定域名记录,此类型无需 [ARGUMENT] 参数
- fixed : 将指定域名记录的 TTL 修改为一个预先指定的常数值,这个值通过 [ARGUMENT] 参数指定
- vari : 将指定域名记录的 TTL 修改为一个预先规定的变量,这个变量通过 [ARGUMENT] 参数指定,此时 [ARGUMENT] 参数的格式为 `ax+b',其中 a、b 为非负整数(需用户手工指定),x 为记录原先的 TTL 值(不需要用户指定)。此选项必须为 `ax+b' 的形式,不能是 `ax-b' 或者其它的变体形
-
传染机制:
- 主动传染 (此为默认情况):主动传染 : 所有的域名记录不论是否存在单独指定的控制条目,均使用上级域名控制条目
- 被动传染(<TYPE> 前加一个 `$'): 如果存在单独指定的控制条目,那么使用单独指定的控制条目,否则使用上级域名的控制条目
- 不传染( 前加两个 `$'): 如果存在单独指定的控制条目,那么使用单独指定的控制条目,如果没有则不修改记录的 TTL 值
- 只有被查询的域名控制条目才具有传染性质
-
常见例子:
- * fixed 300
强制使所有缓存的条目的 TTL 为 300
- * vari 2x+0
将所有缓存条目的 TTL 加倍。即原来为300,现在为600
- ]]))
- cache_control:depends ({cache="true"})
-
- domain_statistic = s1:option(Flag,"domain_statistic",translate("启用域名统计"),translate("域名信息统计会依照模板文件记录域名的查询情况目前只能生成到/root/.dnsforwarder/statistic.html
会影响闪存寿命不建议开启"))
- domain_statistic.rmempty = false
- domain_statistic.default = "false"
- domain_statistic.disabled = "false"
- domain_statistic.enabled = "true"
-
- view_cfg = s1:option(TextValue, "1", nil)
- view_cfg.rmempty = false
- view_cfg.rows = 24
- view_cfg.readonly = true
-
- function view_cfg.cfgvalue()
- return nixio.fs.readfile(CONFIG_FILE) or ""
- end
- function view_cfg.write(self, section, value)
- end
-
--- ---------------------------------------------------
-local apply = luci.http.formvalue("cbi.apply")
-if apply then
- os.execute("/etc/init.d/dnsforwarder makeconfig >/dev/null 2>&1 &")
- os.execute("/etc/init.d/dnsforwarder restart >/dev/null 2>&1 &")
-end
-
-return m
diff --git a/package/lean/luci-app-dnsforwarder/luasrc/model/cbi/dnsforwarder/log.lua b/package/lean/luci-app-dnsforwarder/luasrc/model/cbi/dnsforwarder/log.lua
deleted file mode 100644
index 499fd375d..000000000
--- a/package/lean/luci-app-dnsforwarder/luasrc/model/cbi/dnsforwarder/log.lua
+++ /dev/null
@@ -1,28 +0,0 @@
-local fs = require "nixio.fs"
-local log = "/var/log/dnsforwarder.log"
-
-function sync_value_to_file(value, file)
- value = value:gsub("\r\n?", "\n")
- local old_value = nixio.fs.readfile(file)
- if value ~= old_value then
- nixio.fs.writefile(file, value)
- end
-end
-
-m = Map("dnsforwarder")
-m.title = translate("运行日志")
-m.description = translate("如果想观察日志,需要在配置文件中写明LogOn true 和 LogFileFolder /var/log")
-
-s = m:section(TypedSection,"arguments","")
-s.addremove = false
-s.anonymous = true
-
-gfwlist = s:option(TextValue, "gfwlist",nil, nil)
-gfwlist.description = translate("日志文件默认位置:" .. log)
-gfwlist.rows = 26
-gfwlist.wrap = "off"
-gfwlist.cfgvalue = function(self, section)
- return fs.readfile(log) or ""
-end
-
-return m
diff --git a/package/lean/luci-app-dnsforwarder/po/zh-cn/dnsforwarder.po b/package/lean/luci-app-dnsforwarder/po/zh-cn/dnsforwarder.po
deleted file mode 100644
index 7fd8e0285..000000000
--- a/package/lean/luci-app-dnsforwarder/po/zh-cn/dnsforwarder.po
+++ /dev/null
@@ -1,8 +0,0 @@
-msgid "Enable"
-msgstr "启用"
-
-msgid "Running"
-msgstr "运行中"
-
-msgid "Not running"
-msgstr "未运行"
diff --git a/package/lean/luci-app-docker/Makefile b/package/lean/luci-app-docker/Makefile
deleted file mode 100644
index d468ad0dc..000000000
--- a/package/lean/luci-app-docker/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2008-2014 The LuCI Team
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=Luci for Docker-CE
-LUCI_DEPENDS:=+dockerd +docker +e2fsprogs +fdisk
-LUCI_PKGARCH:=all
-PKG_VERSION:=1
-PKG_RELEASE:=12
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-docker/luasrc/controller/docker.lua b/package/lean/luci-app-docker/luasrc/controller/docker.lua
deleted file mode 100644
index eb5dadbc2..000000000
--- a/package/lean/luci-app-docker/luasrc/controller/docker.lua
+++ /dev/null
@@ -1,17 +0,0 @@
-module("luci.controller.docker", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/dockerd") then
- return
- end
-
- entry({"admin", "services", "docker"}, cbi("docker"), _("Docker CE Container"), 199).dependent = true
- entry({"admin", "services", "docker", "status"}, call("act_status")).leaf = true
-end
-
-function act_status()
- local e = {}
- e.running = luci.sys.call("pgrep /usr/bin/dockerd >/dev/null") == 0
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-docker/luasrc/model/cbi/docker.lua b/package/lean/luci-app-docker/luasrc/model/cbi/docker.lua
deleted file mode 100644
index 037f3ee87..000000000
--- a/package/lean/luci-app-docker/luasrc/model/cbi/docker.lua
+++ /dev/null
@@ -1,23 +0,0 @@
-local running = (luci.sys.call("pidof portainer >/dev/null") == 0)
-local button = ""
-
-if running then
- button = "
"
-end
-
-m = Map("dockerd", "Docker CE", translate("Docker is a set of platform-as-a-service (PaaS) products that use OS-level virtualization to deliver software in packages called containers.") .. button)
-
-
-m:section(SimpleSection).template = "docker/docker_status"
-
-s = m:section(TypedSection, "docker")
-s.anonymous = true
-
-wan_mode = s:option(Flag, "wan_mode", translate("Enable WAN access Docker"), translate("Enable WAN access docker mapped ports"))
-wan_mode.default = 0
-wan_mode.rmempty = false
-
-o = s:option(DummyValue,"readme",translate(" "))
-o.description = translate(""..translate("Download DockerReadme.pdf").."")
-
-return m
diff --git a/package/lean/luci-app-docker/luasrc/view/docker/docker_status.htm b/package/lean/luci-app-docker/luasrc/view/docker/docker_status.htm
deleted file mode 100644
index f3cf6b761..000000000
--- a/package/lean/luci-app-docker/luasrc/view/docker/docker_status.htm
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-docker/po/zh-cn/docker.po b/package/lean/luci-app-docker/po/zh-cn/docker.po
deleted file mode 100644
index 0e378bc29..000000000
--- a/package/lean/luci-app-docker/po/zh-cn/docker.po
+++ /dev/null
@@ -1,39 +0,0 @@
-msgid ""
-msgstr ""
-"Project-Id-Version: Luci ARP Bind\n"
-"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2015-06-23 20:16+0800\n"
-"PO-Revision-Date: 2015-06-23 20:17+0800\n"
-"Last-Translator: coolsnowwolf \n"
-"Language-Team: PandoraBox Team\n"
-"Language: zh_CN\n"
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=1; plural=0;\n"
-"X-Generator: Poedit 1.8.1\n"
-"X-Poedit-SourceCharset: UTF-8\n"
-
-msgid "Docker CE Container"
-msgstr "Docker CE 容器"
-
-msgid "Open Portainer Docker Admin"
-msgstr "打开 Portainer Docker 管理页面"
-
-msgid "Docker is a set of platform-as-a-service (PaaS) products that use OS-level virtualization to deliver software in packages called containers."
-msgstr "Docker是一组平台即服务(platform-as-a-service,PaaS)产品,它使用操作系统级容器虚拟化来交付软件包。"
-
-msgid "Enable WAN access Docker"
-msgstr "允许 WAN 访问 Docker"
-
-msgid "Enable WAN access docker mapped ports"
-msgstr "允许 WAN 访问 Docker 映射后的端口(易受攻击!)。
推荐禁用该选项后,用系统防火墙选择性映射 172.17.0.X:XX 端口到 WAN"
-
-msgid "Docker Readme First"
-msgstr "Docker 初始化无脑配置教程"
-
-msgid "Download DockerReadme.pdf"
-msgstr "下载 Docker 初始化无脑配置教程"
-
-msgid "Please download DockerReadme.pdf to read when first-running"
-msgstr "初次在OpenWrt中运行Docker必读(只需执行一次流程)"
diff --git a/package/lean/luci-app-docker/root/etc/config/dockerd b/package/lean/luci-app-docker/root/etc/config/dockerd
deleted file mode 100644
index 7f1e0880b..000000000
--- a/package/lean/luci-app-docker/root/etc/config/dockerd
+++ /dev/null
@@ -1,3 +0,0 @@
-
-config docker
- option wan_mode '0'
diff --git a/package/lean/luci-app-docker/root/etc/docker-init b/package/lean/luci-app-docker/root/etc/docker-init
deleted file mode 100755
index 9bade865f..000000000
--- a/package/lean/luci-app-docker/root/etc/docker-init
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-dtype=`fdisk -l /dev/sda | grep 'Disklabel type' | awk '{print $3}'`
-partid="0"
-
-if [ "$dtype" = "gpt" ]
-then
- partid=`echo "n
-
-
-
-w
-" | fdisk /dev/sda | grep 'Created a new partition' | awk '{print $5}'`
-
-elif [ "$dtype" = "dos" ]
-then
- partid=`echo "n
-p
-
-
-
-w
-" | fdisk /dev/sda | grep 'Created a new partition' | awk '{print $5}'`
-fi
-
-echo "y" | mkfs.ext4 /dev/sda$partid
-
diff --git a/package/lean/luci-app-docker/root/etc/docker-web b/package/lean/luci-app-docker/root/etc/docker-web
deleted file mode 100755
index 361589b39..000000000
--- a/package/lean/luci-app-docker/root/etc/docker-web
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-docker run -d --restart=always --name="portainer" -p 9999:9000 -v /var/run/docker.sock:/var/run/docker.sock -v portainer_data:/data portainer/portainer-ce
\ No newline at end of file
diff --git a/package/lean/luci-app-docker/root/etc/docker/daemon.json b/package/lean/luci-app-docker/root/etc/docker/daemon.json
deleted file mode 100644
index b8a99a4ce..000000000
--- a/package/lean/luci-app-docker/root/etc/docker/daemon.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-"data-root": "/opt/",
-"log-level": "warn"
-}
diff --git a/package/lean/luci-app-docker/root/etc/init.d/dockerd b/package/lean/luci-app-docker/root/etc/init.d/dockerd
deleted file mode 100755
index 5d6f36bf2..000000000
--- a/package/lean/luci-app-docker/root/etc/init.d/dockerd
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh /etc/rc.common
-
-USE_PROCD=1
-START=25
-
-start_service() {
- local nofile=$(cat /proc/sys/fs/nr_open)
- local wanmode=$(uci get dockerd.@docker[0].wan_mode)
-
- if [ $wanmode = "1" ] ;then
- dockerwan=" "
- else
- dockerwan="--iptables=false"
- fi
-
- procd_open_instance
- procd_set_param stderr 1
- procd_set_param command /usr/bin/dockerd $dockerwan
- procd_set_param limits nofile="${nofile} ${nofile}"
- procd_close_instance
-
-}
diff --git a/package/lean/luci-app-docker/root/etc/uci-defaults/luci-docker b/package/lean/luci-app-docker/root/etc/uci-defaults/luci-docker
deleted file mode 100755
index e03f47783..000000000
--- a/package/lean/luci-app-docker/root/etc/uci-defaults/luci-docker
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@dockerd[-1]
- add ucitrack dockerd
- set ucitrack.@dockerd[-1].init=dockerd
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-docker/root/www/DockerReadme.pdf b/package/lean/luci-app-docker/root/www/DockerReadme.pdf
deleted file mode 100644
index 8a9a94b83..000000000
Binary files a/package/lean/luci-app-docker/root/www/DockerReadme.pdf and /dev/null differ
diff --git a/package/lean/luci-app-dockerman/Makefile b/package/lean/luci-app-dockerman/Makefile
deleted file mode 100644
index 7c553c86c..000000000
--- a/package/lean/luci-app-dockerman/Makefile
+++ /dev/null
@@ -1,30 +0,0 @@
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=Docker Manager interface for LuCI
-LUCI_DEPENDS:=+luci-lib-docker +luci-lib-ip +docker +dockerd +ttyd
-PKG_NAME:=luci-app-dockerman
-PKG_VERSION:=v0.5.25
-PKG_RELEASE:=1
-PKG_MAINTAINER:=lisaac
-PKG_LICENSE:=AGPL-3.0
-
-define Package/luci-app-dockerman/postinst
-#!/bin/sh
-
-/init.sh env
-touch /etc/config/dockerd
-uci set dockerd.dockerman=dockerman
-uci set dockerd.dockerman.socket_path=`uci get dockerd.dockerman.socket_path 2&> /dev/null || echo '/var/run/docker.sock'`
-uci set dockerd.dockerman.status_path=`uci get dockerd.dockerman.status_path 2&> /dev/null || echo '/tmp/.docker_action_status'`
-uci set dockerd.dockerman.debug=`uci get dockerd.dockerman.debug 2&> /dev/null || echo 'false'`
-uci set dockerd.dockerman.debug_path=`uci get dockerd.dockerman.debug_path 2&> /dev/null || echo '/tmp/.docker_debug'`
-uci set dockerd.dockerman.remote_port=`uci get dockerd.dockerman.remote_port 2&> /dev/null || echo '2375'`
-uci set dockerd.dockerman.remote_endpoint=`uci get dockerd.dockerman.remote_endpoint 2&> /dev/null || echo '0'`
-uci del_list dockerd.dockerman.ac_allowed_interface='br-lan'
-uci add_list dockerd.dockerman.ac_allowed_interface='br-lan'
-uci commit dockerd
-endef
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/containers.svg b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/containers.svg
deleted file mode 100644
index 4165f90bd..000000000
--- a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/containers.svg
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-icon.png b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-icon.png
deleted file mode 100644
index f156dc1c7..000000000
Binary files a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-icon.png and /dev/null differ
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-manager.css b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-manager.css
deleted file mode 100644
index 911693b62..000000000
--- a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/file-manager.css
+++ /dev/null
@@ -1,91 +0,0 @@
-.fb-container {
- margin-top: 1rem;
-}
-.fb-container .cbi-button {
- height: 1.8rem;
-}
-.fb-container .cbi-input-text {
- margin-bottom: 1rem;
- width: 100%;
-}
-.fb-container .panel-title {
- padding-bottom: 0;
- width: 50%;
- border-bottom: none;
-}
-.fb-container .panel-container {
- display: flex;
- align-items: center;
- justify-content: space-between;
- padding-bottom: 1rem;
- border-bottom: 1px solid #eee;
-}
-.fb-container .upload-container {
- display: none;
- margin: 1rem 0;
-}
-.fb-container .upload-file {
- margin-right: 2rem;
-}
-.fb-container .cbi-value-field {
- text-align: left;
-}
-.fb-container .parent-icon strong {
- margin-left: 1rem;
-}
-.fb-container td[class$="-icon"] {
- cursor: pointer;
-}
-.fb-container .file-icon, .fb-container .folder-icon, .fb-container .link-icon {
- position: relative;
-}
-.fb-container .file-icon:before, .fb-container .folder-icon:before, .fb-container .link-icon:before {
- display: inline-block;
- width: 1.5rem;
- height: 1.5rem;
- content: '';
- background-size: contain;
- margin: 0 0.5rem 0 1rem;
- vertical-align: middle;
-}
-.fb-container .file-icon:before {
- background-image: url(file-icon.png);
-}
-.fb-container .folder-icon:before {
- background-image: url(folder-icon.png);
-}
-.fb-container .link-icon:before {
- background-image: url(link-icon.png);
-}
-@media screen and (max-width: 480px) {
- .fb-container .upload-file {
- width: 14.6rem;
- }
- .fb-container .cbi-value-owner,
- .fb-container .cbi-value-perm {
- display: none;
- }
-}
-
-.cbi-section-table {
- width: 100%;
-}
-
-.cbi-section-table-cell {
- text-align: right;
-}
-
-.cbi-button-install {
-border-color: #c44;
- color: #c44;
- margin-left: 3px;
-}
-
-.cbi-value-field {
- padding: 10px 0;
-}
-
-.parent-icon {
- height: 1.8rem;
- padding: 10px 0;
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/folder-icon.png b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/folder-icon.png
deleted file mode 100644
index 1370df3ad..000000000
Binary files a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/folder-icon.png and /dev/null differ
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/images.svg b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/images.svg
deleted file mode 100644
index 90ca5a1c7..000000000
--- a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/images.svg
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/link-icon.png b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/link-icon.png
deleted file mode 100644
index 03cc82cdf..000000000
Binary files a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/link-icon.png and /dev/null differ
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/networks.svg b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/networks.svg
deleted file mode 100644
index 3eb12a393..000000000
--- a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/networks.svg
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/tar.min.js b/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/tar.min.js
deleted file mode 100644
index d9c06667f..000000000
--- a/package/lean/luci-app-dockerman/htdocs/luci-static/resources/dockerman/tar.min.js
+++ /dev/null
@@ -1,185 +0,0 @@
-// https://github.com/thiscouldbebetter/TarFileExplorer
-class TarFileTypeFlag
-{constructor(value,name)
-{this.value=value;this.id="_"+this.value;this.name=name;}
-static _instances;static Instances()
-{if(TarFileTypeFlag._instances==null)
-{TarFileTypeFlag._instances=new TarFileTypeFlag_Instances();}
-return TarFileTypeFlag._instances;}}
-class TarFileTypeFlag_Instances
-{constructor()
-{this.Normal=new TarFileTypeFlag("0","Normal");this.HardLink=new TarFileTypeFlag("1","Hard Link");this.SymbolicLink=new TarFileTypeFlag("2","Symbolic Link");this.CharacterSpecial=new TarFileTypeFlag("3","Character Special");this.BlockSpecial=new TarFileTypeFlag("4","Block Special");this.Directory=new TarFileTypeFlag("5","Directory");this.FIFO=new TarFileTypeFlag("6","FIFO");this.ContiguousFile=new TarFileTypeFlag("7","Contiguous File");this.LongFilePath=new TarFileTypeFlag("L","././@LongLink");this._All=[this.Normal,this.HardLink,this.SymbolicLink,this.CharacterSpecial,this.BlockSpecial,this.Directory,this.FIFO,this.ContiguousFile,this.LongFilePath,];for(var i=0;ia+=String.fromCharCode(b),"");entryNext.header.fileName=entryNext.header.fileName.replace(/\0/g,"");entries.splice(i,1);i--;}}}
-downloadAs(fileNameToSaveAs)
-{return FileHelper.saveBytesAsFile
-(this.toBytes(),fileNameToSaveAs)}
-entriesForDirectories()
-{return this.entries.filter(x=>x.header.typeFlag.name==TarFileTypeFlag.Instances().Directory);}
-toBytes()
-{this.toBytes_PrependLongPathEntriesAsNeeded();var fileAsBytes=[];var entriesAsByteArrays=this.entries.map(x=>x.toBytes());this.consolidateLongPathEntries();for(var i=0;imaxLength)
-{var entryFileNameAsBytes=entryFileName.split("").map(x=>x.charCodeAt(0));var entryContainingLongPathToPrepend=TarFileEntry.fileNew
-(typeFlagLongPath.name,entryFileNameAsBytes);entryContainingLongPathToPrepend.header.typeFlag=typeFlagLongPath;entryContainingLongPathToPrepend.header.timeModifiedInUnixFormat=entryHeader.timeModifiedInUnixFormat;entryContainingLongPathToPrepend.header.checksumCalculate();entryHeader.fileName=entryFileName.substr(0,maxLength)+String.fromCharCode(0);entries.splice(i,0,entryContainingLongPathToPrepend);i++;}}}
-toString()
-{var newline="\n";var returnValue="[TarFile]"+newline;for(var i=0;i{var fileLoadedAsBinaryString=fileLoadedEvent.target.result;var fileLoadedAsBytes=ByteHelper.stringUTF8ToBytes(fileLoadedAsBinaryString);callback(fileToLoad.name,fileLoadedAsBytes);}
-fileReader.readAsBinaryString(fileToLoad);}
-static loadFileAsText(fileToLoad,callback)
-{var fileReader=new FileReader();fileReader.onload=(fileLoadedEvent)=>{var textFromFileLoaded=fileLoadedEvent.target.result;callback(fileToLoad.name,textFromFileLoaded);};fileReader.readAsText(fileToLoad);}
-static saveBytesAsFile(bytesToWrite,fileNameToSaveAs)
-{var bytesToWriteAsArrayBuffer=new ArrayBuffer(bytesToWrite.length);var bytesToWriteAsUIntArray=new Uint8Array(bytesToWriteAsArrayBuffer);for(var i=0;i
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/controller/dockerman.lua b/package/lean/luci-app-dockerman/luasrc/controller/dockerman.lua
deleted file mode 100644
index ebc89c2df..000000000
--- a/package/lean/luci-app-dockerman/luasrc/controller/dockerman.lua
+++ /dev/null
@@ -1,620 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
--- local uci = (require "luci.model.uci").cursor()
-
-module("luci.controller.dockerman",package.seeall)
-
-function index()
- entry({"admin", "docker"},
- alias("admin", "docker", "config"),
- _("Docker"),
- 40).acl_depends = { "luci-app-dockerman" }
-
- entry({"admin", "docker", "config"},cbi("dockerman/configuration"),_("Configuration"), 8).leaf=true
-
- -- local uci = (require "luci.model.uci").cursor()
- -- if uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- -- local host = uci:get("dockerd", "dockerman", "remote_host")
- -- local port = uci:get("dockerd", "dockerman", "remote_port")
- -- if not host or not port then
- -- return
- -- end
- -- else
- -- local socket = uci:get("dockerd", "dockerman", "socket_path") or "/var/run/docker.sock"
- -- if socket and not nixio.fs.access(socket) then
- -- return
- -- end
- -- end
-
- -- if (require "luci.model.docker").new():_ping().code ~= 200 then
- -- return
- -- end
-
- entry({"admin", "docker", "overview"}, form("dockerman/overview"),_("Overview"), 2).leaf=true
- entry({"admin", "docker", "containers"}, form("dockerman/containers"), _("Containers"), 3).leaf=true
- entry({"admin", "docker", "images"}, form("dockerman/images"), _("Images"), 4).leaf=true
- entry({"admin", "docker", "networks"}, form("dockerman/networks"), _("Networks"), 5).leaf=true
- entry({"admin", "docker", "volumes"}, form("dockerman/volumes"), _("Volumes"), 6).leaf=true
- entry({"admin", "docker", "events"}, call("action_events"), _("Events"), 7)
-
- entry({"admin", "docker", "newcontainer"}, form("dockerman/newcontainer")).leaf=true
- entry({"admin", "docker", "newnetwork"}, form("dockerman/newnetwork")).leaf=true
- entry({"admin", "docker", "container"}, form("dockerman/container")).leaf=true
-
- entry({"admin", "docker", "call"}, call("action_call_docker")).leaf=true
-
- entry({"admin", "docker", "container_stats"}, call("action_get_container_stats")).leaf=true
- entry({"admin", "docker", "containers_stats"}, call("action_get_containers_stats")).leaf=true
- entry({"admin", "docker", "get_system_df"}, call("action_get_system_df")).leaf=true
- entry({"admin", "docker", "container_get_archive"}, call("download_archive")).leaf=true
- entry({"admin", "docker", "container_put_archive"}, call("upload_archive")).leaf=true
- entry({"admin", "docker", "container_list_file"}, call("list_file")).leaf=true
- entry({"admin", "docker", "container_remove_file"}, call("remove_file")).leaf=true
- entry({"admin", "docker", "container_rename_file"}, call("rename_file")).leaf=true
- entry({"admin", "docker", "container_export"}, call("export_container")).leaf=true
- entry({"admin", "docker", "images_save"}, call("save_images")).leaf=true
- entry({"admin", "docker", "images_load"}, call("load_images")).leaf=true
- entry({"admin", "docker", "images_import"}, call("import_images")).leaf=true
- entry({"admin", "docker", "images_get_tags"}, call("get_image_tags")).leaf=true
- entry({"admin", "docker", "images_tag"}, call("tag_image")).leaf=true
- entry({"admin", "docker", "images_untag"}, call("untag_image")).leaf=true
- entry({"admin", "docker", "confirm"}, call("action_confirm")).leaf=true
-end
-
-function action_call_docker()
-
-end
-
-function action_get_system_df()
- local res = docker.new():df()
- luci.http.status(res.code, res.message)
- luci.http.prepare_content("application/json")
- luci.http.write_json(res.body)
-end
-
-function scandir(id, directory)
- local cmd_docker = luci.util.exec("command -v docker"):match("^.+docker") or nil
- if not cmd_docker or cmd_docker:match("^%s+$") then
- return
- end
- local i, t, popen = 0, {}, io.popen
- local uci = (require "luci.model.uci").cursor()
- local remote = uci:get_bool("dockerd", "dockerman", "remote_endpoint")
- local socket_path = not remote and uci:get("dockerd", "dockerman", "socket_path") or nil
- local host = remote and uci:get("dockerd", "dockerman", "remote_host") or nil
- local port = remote and uci:get("dockerd", "dockerman", "remote_port") or nil
- if remote and host and port then
- hosts = "tcp://" .. host .. ':'.. port
- elseif socket_path then
- hosts = "unix://" .. socket_path
- else
- return
- end
- local pfile = popen(cmd_docker .. ' -H "'.. hosts ..'" exec ' ..id .." ls -lh \""..directory.."\" | egrep -v '^total'")
- for fileinfo in pfile:lines() do
- i = i + 1
- t[i] = fileinfo
- end
- pfile:close()
- return t
-end
-
-function list_response(id, path, success)
- luci.http.prepare_content("application/json")
- local result
- if success then
- local rv = scandir(id, path)
- result = {
- ec = 0,
- data = rv
- }
- else
- result = {
- ec = 1
- }
- end
- luci.http.write_json(result)
-end
-
-function list_file(id)
- local path = luci.http.formvalue("path")
- list_response(id, path, true)
-end
-
-function rename_file(id)
- local filepath = luci.http.formvalue("filepath")
- local newpath = luci.http.formvalue("newpath")
- local cmd_docker = luci.util.exec("command -v docker"):match("^.+docker") or nil
- if not cmd_docker or cmd_docker:match("^%s+$") then
- return
- end
- local uci = (require "luci.model.uci").cursor()
- local remote = uci:get_bool("dockerd", "dockerman", "remote_endpoint")
- local socket_path = not remote and uci:get("dockerd", "dockerman", "socket_path") or nil
- local host = remote and uci:get("dockerd", "dockerman", "remote_host") or nil
- local port = remote and uci:get("dockerd", "dockerman", "remote_port") or nil
- if remote and host and port then
- hosts = "tcp://" .. host .. ':'.. port
- elseif socket_path then
- hosts = "unix://" .. socket_path
- else
- return
- end
- local success = os.execute(cmd_docker .. ' -H "'.. hosts ..'" exec '.. id ..' mv "'..filepath..'" "'..newpath..'"')
- list_response(nixio.fs.dirname(filepath), success)
-end
-
-function remove_file(id)
- local path = luci.http.formvalue("path")
- local isdir = luci.http.formvalue("isdir")
- local cmd_docker = luci.util.exec("command -v docker"):match("^.+docker") or nil
- if not cmd_docker or cmd_docker:match("^%s+$") then
- return
- end
- local uci = (require "luci.model.uci").cursor()
- local remote = uci:get_bool("dockerd", "dockerman", "remote_endpoint")
- local socket_path = not remote and uci:get("dockerd", "dockerman", "socket_path") or nil
- local host = remote and uci:get("dockerd", "dockerman", "remote_host") or nil
- local port = remote and uci:get("dockerd", "dockerman", "remote_port") or nil
- if remote and host and port then
- hosts = "tcp://" .. host .. ':'.. port
- elseif socket_path then
- hosts = "unix://" .. socket_path
- else
- return
- end
- path = path:gsub("<>", "/")
- path = path:gsub(" ", "\ ")
- local success
- if isdir then
- success = os.execute(cmd_docker .. ' -H "'.. hosts ..'" exec '.. id ..' rm -r "'..path..'"')
- else
- success = os.remove(path)
- end
- list_response(nixio.fs.dirname(path), success)
-end
-
-function action_events()
- local logs = ""
- local query ={}
-
- local dk = docker.new()
- query["until"] = os.time()
- local events = dk:events({query = query})
-
- if events.code == 200 then
- for _, v in ipairs(events.body) do
- local date = "unknown"
- if v and v.time then
- date = os.date("%Y-%m-%d %H:%M:%S", v.time)
- end
-
- local name = v.Actor.Attributes.name or "unknown"
- local action = v.Action or "unknown"
-
- if v and v.Type == "container" then
- local id = v.Actor.ID or "unknown"
- logs = logs .. string.format("[%s] %s %s Container ID: %s Container Name: %s\n", date, v.Type, action, id, name)
- elseif v.Type == "network" then
- local container = v.Actor.Attributes.container or "unknown"
- local network = v.Actor.Attributes.type or "unknown"
- logs = logs .. string.format("[%s] %s %s Container ID: %s Network Name: %s Network type: %s\n", date, v.Type, action, container, name, network)
- elseif v.Type == "image" then
- local id = v.Actor.ID or "unknown"
- logs = logs .. string.format("[%s] %s %s Image: %s Image name: %s\n", date, v.Type, action, id, name)
- end
- end
- end
-
- luci.template.render("dockerman/logs", {self={syslog = logs, title="Events"}})
-end
-
-local calculate_cpu_percent = function(d)
- if type(d) ~= "table" then
- return
- end
-
- local cpu_count = tonumber(d["cpu_stats"]["online_cpus"])
- local cpu_percent = 0.0
- local cpu_delta = tonumber(d["cpu_stats"]["cpu_usage"]["total_usage"]) - tonumber(d["precpu_stats"]["cpu_usage"]["total_usage"])
- local system_delta = tonumber(d["cpu_stats"]["system_cpu_usage"]) -- tonumber(d["precpu_stats"]["system_cpu_usage"])
- if system_delta > 0.0 then
- cpu_percent = string.format("%.2f", cpu_delta / system_delta * 100.0 * cpu_count)
- end
-
- return cpu_percent
-end
-
-local get_memory = function(d)
- if type(d) ~= "table" then
- return
- end
-
- -- local limit = string.format("%.2f", tonumber(d["memory_stats"]["limit"]) / 1024 / 1024)
- -- local usage = string.format("%.2f", (tonumber(d["memory_stats"]["usage"]) - tonumber(d["memory_stats"]["stats"]["total_cache"])) / 1024 / 1024)
- -- return usage .. "MB / " .. limit.. "MB"
-
- local limit =tonumber(d["memory_stats"]["limit"])
- local usage = tonumber(d["memory_stats"]["usage"])
- -- - tonumber(d["memory_stats"]["stats"]["total_cache"])
-
- return usage, limit
-end
-
-local get_rx_tx = function(d)
- if type(d) ~="table" then
- return
- end
-
- local data = {}
- if type(d["networks"]) == "table" then
- for e, v in pairs(d["networks"]) do
- data[e] = {
- bw_tx = tonumber(v.tx_bytes),
- bw_rx = tonumber(v.rx_bytes)
- }
- end
- end
-
- return data
-end
-
-local function get_stat(container_id)
- if container_id then
- local dk = docker.new()
- local response = dk.containers:inspect({id = container_id})
- if response.code == 200 and response.body.State.Running then
- response = dk.containers:stats({id = container_id, query = {stream = false, ["one-shot"] = true}})
- if response.code == 200 then
- local container_stats = response.body
- local cpu_percent = calculate_cpu_percent(container_stats)
- local mem_useage, mem_limit = get_memory(container_stats)
- local bw_rxtx = get_rx_tx(container_stats)
- return response.code, response.body.message, {
- cpu_percent = cpu_percent,
- memory = {
- mem_useage = mem_useage,
- mem_limit = mem_limit
- },
- bw_rxtx = bw_rxtx
- }
- else
- return response.code, response.body.message
- end
- else
- if response.code == 200 then
- return 500, "container "..container_id.." not running"
- else
- return response.code, response.body.message
- end
- end
- else
- return 404, "No container name or id"
- end
-end
-function action_get_container_stats(container_id)
- local code, msg, res = get_stat(container_id)
- luci.http.status(code, msg)
- luci.http.prepare_content("application/json")
- luci.http.write_json(res)
-end
-
-function action_get_containers_stats()
- local res = luci.http.formvalue(containers) or ""
- local stats = {}
- res = luci.jsonc.parse(res.containers)
- if res and type(res) == "table" then
- for i, v in ipairs(res) do
- _,_,stats[v] = get_stat(v)
- end
- end
- luci.http.status(200, "OK")
- luci.http.prepare_content("application/json")
- luci.http.write_json(stats)
-end
-
-function action_confirm()
- local data = docker:read_status()
- if data then
- data = data:gsub("\n","
"):gsub(" "," ")
- code = 202
- msg = data
- else
- code = 200
- msg = "finish"
- data = "finish"
- end
-
- luci.http.status(code, msg)
- luci.http.prepare_content("application/json")
- luci.http.write_json({info = data})
-end
-
-function export_container(id)
- local dk = docker.new()
- local first
-
- local cb = function(res, chunk)
- if res.code == 200 then
- if not first then
- first = true
- luci.http.header('Content-Disposition', 'inline; filename="'.. id ..'.tar"')
- luci.http.header('Content-Type', 'application\/x-tar')
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- else
- if not first then
- first = true
- luci.http.prepare_content("text/plain")
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- end
- end
-
- local res = dk.containers:export({id = id}, cb)
-end
-
-function download_archive()
- local id = luci.http.formvalue("id")
- local path = luci.http.formvalue("path")
- local filename = luci.http.formvalue("filename") or "archive"
- local dk = docker.new()
- local first
-
- local cb = function(res, chunk)
- if res and res.code and res.code == 200 then
- if not first then
- first = true
- luci.http.header('Content-Disposition', 'inline; filename="'.. filename .. '.tar"')
- luci.http.header('Content-Type', 'application\/x-tar')
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- else
- if not first then
- first = true
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("text/plain")
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- end
- end
-
- local res = dk.containers:get_archive({
- id = id,
- query = {
- path = luci.http.urlencode(path)
- }
- }, cb)
-end
-
-function upload_archive(container_id)
- local path = luci.http.formvalue("upload-path")
- local dk = docker.new()
- local ltn12 = require "luci.ltn12"
-
- local rec_send = function(sinkout)
- luci.http.setfilehandler(function (meta, chunk, eof)
- if chunk then
- ltn12.pump.step(ltn12.source.string(chunk), sinkout)
- end
- end)
- end
-
- local res = dk.containers:put_archive({
- id = container_id,
- query = {
- path = luci.http.urlencode(path)
- },
- body = rec_send
- })
-
- local msg = res and res.body and res.body.message or nil
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = msg or "unknow"})
-end
-
--- function save_images()
--- local names = luci.http.formvalue("names")
--- local dk = docker.new()
--- local first
-
--- local cb = function(res, chunk)
--- if res.code == 200 then
--- if not first then
--- first = true
--- luci.http.status(res.code, res.message)
--- luci.http.header('Content-Disposition', 'inline; filename="'.. "images" ..'.tar"')
--- luci.http.header('Content-Type', 'application\/x-tar')
--- end
--- luci.ltn12.pump.all(chunk, luci.http.write)
--- else
--- if not first then
--- first = true
--- luci.http.prepare_content("text/plain")
--- end
--- luci.ltn12.pump.all(chunk, luci.http.write)
--- end
--- end
-
--- docker:write_status("Images: saving" .. " " .. names .. "...")
--- local res = dk.images:get({
--- query = {
--- names = luci.http.urlencode(names)
--- }
--- }, cb)
--- docker:clear_status()
-
--- local msg = res and res.body and res.body.message or nil
--- luci.http.status(res.code, msg)
--- luci.http.prepare_content("application/json")
--- luci.http.write_json({message = msg})
--- end
-
-function load_images()
- local archive = luci.http.formvalue("upload-archive")
- local dk = docker.new()
- local ltn12 = require "luci.ltn12"
-
- local rec_send = function(sinkout)
- luci.http.setfilehandler(function (meta, chunk, eof)
- if chunk then
- ltn12.pump.step(ltn12.source.string(chunk), sinkout)
- end
- end)
- end
-
- docker:write_status("Images: loading...")
- local res = dk.images:load({body = rec_send})
- local msg = res and res.body and ( res.body.message or res.body.stream or res.body.error ) or nil
- if res and res.code == 200 and msg and msg:match("Loaded image ID") then
- docker:clear_status()
- else
- docker:append_status("code:" .. (res and res.code or "500") .." ".. (msg or "unknow"))
- end
-
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = msg or "unknow"})
-end
-
-function import_images()
- local src = luci.http.formvalue("src")
- local itag = luci.http.formvalue("tag")
- local dk = docker.new()
- local ltn12 = require "luci.ltn12"
-
- local rec_send = function(sinkout)
- luci.http.setfilehandler(function (meta, chunk, eof)
- if chunk then
- ltn12.pump.step(ltn12.source.string(chunk), sinkout)
- end
- end)
- end
-
- docker:write_status("Images: importing".. " ".. itag .."...\n")
- local repo = itag and itag:match("^([^:]+)")
- local tag = itag and itag:match("^[^:]-:([^:]+)")
- local res = dk.images:create({
- query = {
- fromSrc = luci.http.urlencode(src or "-"),
- repo = repo or nil,
- tag = tag or nil
- },
- body = not src and rec_send or nil
- }, docker.import_image_show_status_cb)
-
- local msg = res and res.body and ( res.body.message )or nil
- if not msg and #res.body == 0 then
- msg = res.body.status or res.body.error
- elseif not msg and #res.body >= 1 then
- msg = res.body[#res.body].status or res.body[#res.body].error
- end
-
- if res.code == 200 and msg and msg:match("sha256:") then
- docker:clear_status()
- else
- docker:append_status("code:" .. (res and res.code or "500") .." ".. (msg or "unknow"))
- end
-
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = msg or "unknow"})
-end
-
-function get_image_tags(image_id)
- if not image_id then
- luci.http.status(400, "no image id")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = "no image id"})
- return
- end
-
- local dk = docker.new()
- local res = dk.images:inspect({
- id = image_id
- })
- local msg = res and res.body and res.body.message or nil
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
-
- if res.code == 200 then
- local tags = res.body.RepoTags
- luci.http.write_json({tags = tags})
- else
- local msg = res and res.body and res.body.message or nil
- luci.http.write_json({message = msg or "unknow"})
- end
-end
-
-function tag_image(image_id)
- local src = luci.http.formvalue("tag")
- local image_id = image_id or luci.http.formvalue("id")
-
- if type(src) ~= "string" or not image_id then
- luci.http.status(400, "no image id or tag")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = "no image id or tag"})
- return
- end
-
- local repo = src:match("^([^:]+)")
- local tag = src:match("^[^:]-:([^:]+)")
- local dk = docker.new()
- local res = dk.images:tag({
- id = image_id,
- query={
- repo=repo,
- tag=tag
- }
- })
- local msg = res and res.body and res.body.message or nil
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
-
- if res.code == 201 then
- local tags = res.body.RepoTags
- luci.http.write_json({tags = tags})
- else
- local msg = res and res.body and res.body.message or nil
- luci.http.write_json({message = msg or "unknow"})
- end
-end
-
-function untag_image(tag)
- local tag = tag or luci.http.formvalue("tag")
-
- if not tag then
- luci.http.status(400, "no tag name")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = "no tag name"})
- return
- end
-
- local dk = docker.new()
- local res = dk.images:inspect({name = tag})
-
- if res.code == 200 then
- local tags = res.body.RepoTags
- if #tags > 1 then
- local r = dk.images:remove({name = tag})
- local msg = r and r.body and r.body.message or nil
- luci.http.status(r.code, msg)
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = msg})
- else
- luci.http.status(500, "Cannot remove the last tag")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = "Cannot remove the last tag"})
- end
- else
- local msg = res and res.body and res.body.message or nil
- luci.http.status(res and res.code or 500, msg or "unknow")
- luci.http.prepare_content("application/json")
- luci.http.write_json({message = msg or "unknow"})
- end
-end
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua
deleted file mode 100644
index f62650fe5..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua
+++ /dev/null
@@ -1,152 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2021 Florian Eckert
-Copyright 2021 lisaac
-]]--
-
-local uci = (require "luci.model.uci").cursor()
-
-local m, s, o
-
-m = Map("dockerd",
- translate("Docker - Configuration"),
- translate("DockerMan is a simple docker manager client for LuCI"))
-
-if nixio.fs.access("/usr/bin/dockerd") and not m.uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- s = m:section(NamedSection, "globals", "section", translate("Docker Daemon settings"))
-
- o = s:option(Flag, "auto_start", translate("Auto start"))
- o.rmempty = false
- o.write = function(self, section, value)
- if value == "1" then
- luci.util.exec("/etc/init.d/dockerd enable")
- else
- luci.util.exec("/etc/init.d/dockerd disable")
- end
- m.uci:set("dockerd", "globals", "auto_start", value)
- end
-
- o = s:option(Value, "data_root",
- translate("Docker Root Dir"))
- o.placeholder = "/opt/docker/"
- o:depends("remote_endpoint", 0)
-
- o = s:option(Value, "bip",
- translate("Default bridge"),
- translate("Configure the default bridge network"))
- o.placeholder = "172.17.0.1/16"
- o.datatype = "ipaddr"
- o:depends("remote_endpoint", 0)
-
- o = s:option(DynamicList, "registry_mirrors",
- translate("Registry Mirrors"),
- translate("It replaces the daemon registry mirrors with a new set of registry mirrors"))
- o:value("https://hub-mirror.c.163.com", "https://hub-mirror.c.163.com")
- o:depends("remote_endpoint", 0)
- o.forcewrite = true
-
- o = s:option(ListValue, "log_level",
- translate("Log Level"),
- translate('Set the logging level'))
- o:value("debug", translate("Debug"))
- o:value("", translate("Info")) -- This is the default debug level from the deamon is optin is not set
- o:value("warn", translate("Warning"))
- o:value("error", translate("Error"))
- o:value("fatal", translate("Fatal"))
- o.rmempty = true
- o:depends("remote_endpoint", 0)
-
- o = s:option(DynamicList, "hosts",
- translate("Client connection"),
- translate('Specifies where the Docker daemon will listen for client connections (default: unix:///var/run/docker.sock)'))
- o:value("unix:///var/run/docker.sock", "unix:///var/run/docker.sock")
- o:value("tcp://0.0.0.0:2375", "tcp://0.0.0.0:2375")
- o.rmempty = true
- o:depends("remote_endpoint", 0)
-end
-
-s = m:section(NamedSection, "dockerman", "section", translate("DockerMan settings"))
-s:tab("ac", translate("Access Control"))
-s:tab("dockerman", translate("DockerMan"))
-
-o = s:taboption("dockerman", Flag, "remote_endpoint",
- translate("Remote Endpoint"),
- translate("Connect to remote docker endpoint"))
-o.rmempty = false
-o.validate = function(self, value, sid)
- local res = luci.http.formvaluetable("cbid.dockerd")
- if res["dockerman.remote_endpoint"] == "1" then
- if res["dockerman.remote_port"] and res["dockerman.remote_port"] ~= "" and res["dockerman.remote_host"] and res["dockerman.remote_host"] ~= "" then
- return 1
- else
- return nil, translate("Please input the PORT or HOST IP of remote docker instance!")
- end
- else
- if not res["dockerman.socket_path"] then
- return nil, translate("Please input the SOCKET PATH of docker daemon!")
- end
- end
- return 0
-end
-
-o = s:taboption("dockerman", Value, "socket_path",
- translate("Docker Socket Path"))
-o.default = "/var/run/docker.sock"
-o.placeholder = "/var/run/docker.sock"
-o:depends("remote_endpoint", 0)
-
-o = s:taboption("dockerman", Value, "remote_host",
- translate("Remote Host"),
- translate("Host or IP Address for the connection to a remote docker instance"))
-o.datatype = "host"
-o.placeholder = "10.1.1.2"
-o:depends("remote_endpoint", 1)
-
-o = s:taboption("dockerman", Value, "remote_port",
- translate("Remote Port"))
-o.placeholder = "2375"
-o.datatype = "port"
-o:depends("remote_endpoint", 1)
-
--- o = s:taboption("dockerman", Value, "status_path", translate("Action Status Tempfile Path"), translate("Where you want to save the docker status file"))
--- o = s:taboption("dockerman", Flag, "debug", translate("Enable Debug"), translate("For debug, It shows all docker API actions of luci-app-dockerman in Debug Tempfile Path"))
--- o.enabled="true"
--- o.disabled="false"
--- o = s:taboption("dockerman", Value, "debug_path", translate("Debug Tempfile Path"), translate("Where you want to save the debug tempfile"))
-
-if nixio.fs.access("/usr/bin/dockerd") and not m.uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- o = s:taboption("ac", DynamicList, "ac_allowed_interface", translate("Allowed access interfaces"), translate("Which interface(s) can access containers under the bridge network, fill-in Interface Name"))
- local interfaces = luci.sys and luci.sys.net and luci.sys.net.devices() or {}
- for i, v in ipairs(interfaces) do
- o:value(v, v)
- end
- o = s:taboption("ac", DynamicList, "ac_allowed_ports", translate("Ports allowed to be accessed"), translate("Which Port(s) can be accessed, it's not restricted by the Allowed Access interfaces configuration. Use this configuration with caution!"))
- o.placeholder = "8080/tcp"
- local docker = require "luci.model.docker"
- local containers, res, lost_state
- local dk = docker.new()
- if dk:_ping().code ~= 200 then
- lost_state = true
- else
- lost_state = false
- res = dk.containers:list()
- if res and res.code and res.code < 300 then
- containers = res.body
- end
- end
-
- -- allowed_container.placeholder = "container name_or_id"
- if containers then
- for i, v in ipairs(containers) do
- if v.State == "running" and v.Ports then
- for _, port in ipairs(v.Ports) do
- if port.PublicPort and port.IP and not string.find(port.IP,":") then
- o:value(port.PublicPort.."/"..port.Type, v.Names[1]:sub(2) .. " | " .. port.PublicPort .. " | " .. port.Type)
- end
- end
- end
- end
- end
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua
deleted file mode 100644
index 66cd2742d..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua
+++ /dev/null
@@ -1,802 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-require "luci.util"
-
-local docker = require "luci.model.docker"
-local dk = docker.new()
-
-container_id = arg[1]
-local action = arg[2] or "info"
-
-local m, s, o
-local images, networks, container_info, res
-
-if not container_id then
- return
-end
-
-res = dk.containers:inspect({id = container_id})
-if res.code < 300 then
- container_info = res.body
-else
- return
-end
-
-local get_ports = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.PortBindings then
- for inter, out in pairs(d.HostConfig.PortBindings) do
- data = (data and (data .. "
") or "") .. out[1]["HostPort"] .. ":" .. inter
- end
- end
-
- return data
-end
-
-local get_env = function(d)
- local data
-
- if d.Config and d.Config.Env then
- for _,v in ipairs(d.Config.Env) do
- data = (data and (data .. "
") or "") .. v
- end
- end
-
- return data
-end
-
-local get_command = function(d)
- local data
-
- if d.Config and d.Config.Cmd then
- for _,v in ipairs(d.Config.Cmd) do
- data = (data and (data .. " ") or "") .. v
- end
- end
-
- return data
-end
-
-local get_mounts = function(d)
- local data
-
- if d.Mounts then
- for _,v in ipairs(d.Mounts) do
- local v_sorce_d, v_dest_d
- local v_sorce = ""
- local v_dest = ""
- for v_sorce_d in v["Source"]:gmatch('[^/]+') do
- if v_sorce_d and #v_sorce_d > 12 then
- v_sorce = v_sorce .. "/" .. v_sorce_d:sub(1,12) .. "..."
- else
- v_sorce = v_sorce .."/".. v_sorce_d
- end
- end
- for v_dest_d in v["Destination"]:gmatch('[^/]+') do
- if v_dest_d and #v_dest_d > 12 then
- v_dest = v_dest .. "/" .. v_dest_d:sub(1,12) .. "..."
- else
- v_dest = v_dest .."/".. v_dest_d
- end
- end
- data = (data and (data .. "
") or "") .. v_sorce .. ":" .. v["Destination"] .. (v["Mode"] ~= "" and (":" .. v["Mode"]) or "")
- end
- end
-
- return data
-end
-
-local get_device = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.Devices then
- for _,v in ipairs(d.HostConfig.Devices) do
- data = (data and (data .. "
") or "") .. v["PathOnHost"] .. ":" .. v["PathInContainer"] .. (v["CgroupPermissions"] ~= "" and (":" .. v["CgroupPermissions"]) or "")
- end
- end
-
- return data
-end
-
-local get_links = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.Links then
- for _,v in ipairs(d.HostConfig.Links) do
- data = (data and (data .. "
") or "") .. v
- end
- end
-
- return data
-end
-
-local get_tmpfs = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.Tmpfs then
- for k, v in pairs(d.HostConfig.Tmpfs) do
- data = (data and (data .. "
") or "") .. k .. (v~="" and ":" or "")..v
- end
- end
-
- return data
-end
-
-local get_dns = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.Dns then
- for _, v in ipairs(d.HostConfig.Dns) do
- data = (data and (data .. "
") or "") .. v
- end
- end
-
- return data
-end
-
-local get_sysctl = function(d)
- local data
-
- if d.HostConfig and d.HostConfig.Sysctls then
- for k, v in pairs(d.HostConfig.Sysctls) do
- data = (data and (data .. "
") or "") .. k..":"..v
- end
- end
-
- return data
-end
-
-local get_networks = function(d)
- local data={}
-
- if d.NetworkSettings and d.NetworkSettings.Networks and type(d.NetworkSettings.Networks) == "table" then
- for k,v in pairs(d.NetworkSettings.Networks) do
- data[k] = v.IPAddress or ""
- end
- end
-
- return data
-end
-
-
-local start_stop_remove = function(m, cmd)
- local res
-
- docker:clear_status()
- docker:append_status("Containers: " .. cmd .. " " .. container_id .. "...")
-
- if cmd ~= "upgrade" then
- res = dk.containers[cmd](dk, {id = container_id})
- else
- res = dk.containers_upgrade(dk, {id = container_id})
- end
-
- if res and res.code >= 300 then
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message))
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/container/"..container_id))
- else
- docker:clear_status()
- if cmd ~= "remove" and cmd ~= "upgrade" then
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/container/"..container_id))
- else
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/containers"))
- end
- end
-end
-
-m=SimpleForm("docker",
- translatef("Docker - Container (%s)", container_info.Name:sub(2)),
- translate("On this page, the selected container can be managed."))
-m.redirect = luci.dispatcher.build_url("admin/docker/containers")
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err=docker:read_status()
-s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(Table,{{}})
-s.notitle=true
-s.rowcolors=false
-s.template = "cbi/nullsection"
-
-o = s:option(Button, "_start")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Start")
-o.inputstyle = "apply"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"start")
-end
-
-o = s:option(Button, "_restart")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Restart")
-o.inputstyle = "reload"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"restart")
-end
-
-o = s:option(Button, "_stop")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Stop")
-o.inputstyle = "reset"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"stop")
-end
-
-o = s:option(Button, "_kill")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Kill")
-o.inputstyle = "reset"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"kill")
-end
-
-o = s:option(Button, "_export")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Export")
-o.inputstyle = "apply"
-o.forcewrite = true
-o.write = function(self, section)
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/container_export/"..container_id))
-end
-
-o = s:option(Button, "_upgrade")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Upgrade")
-o.inputstyle = "reload"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"upgrade")
-end
-
-o = s:option(Button, "_duplicate")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Duplicate/Edit")
-o.inputstyle = "add"
-o.forcewrite = true
-o.write = function(self, section)
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newcontainer/duplicate/"..container_id))
-end
-
-o = s:option(Button, "_remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle=translate("Remove")
-o.inputstyle = "remove"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"remove")
-end
-
-s = m:section(SimpleSection)
-s.template = "dockerman/container"
-
-if action == "info" then
- res = dk.networks:list()
- if res.code < 300 then
- networks = res.body
- else
- return
- end
- m.submit = false
- m.reset = false
- table_info = {
- ["01name"] = {
- _key = translate("Name"),
- _value = container_info.Name:sub(2) or "-",
- _button=translate("Update")
- },
- ["02id"] = {
- _key = translate("ID"),
- _value = container_info.Id or "-"
- },
- ["03image"] = {
- _key = translate("Image"),
- _value = container_info.Config.Image .. "
" .. container_info.Image
- },
- ["04status"] = {
- _key = translate("Status"),
- _value = container_info.State and container_info.State.Status or "-"
- },
- ["05created"] = {
- _key = translate("Created"),
- _value = container_info.Created or "-"
- },
- }
-
- if container_info.State.Status == "running" then
- table_info["06start"] = {
- _key = translate("Start Time"),
- _value = container_info.State and container_info.State.StartedAt or "-"
- }
- else
- table_info["06start"] = {
- _key = translate("Finish Time"),
- _value = container_info.State and container_info.State.FinishedAt or "-"
- }
- end
-
- table_info["07healthy"] = {
- _key = translate("Healthy"),
- _value = container_info.State and container_info.State.Health and container_info.State.Health.Status or "-"
- }
- table_info["08restart"] = {
- _key = translate("Restart Policy"),
- _value = container_info.HostConfig and container_info.HostConfig.RestartPolicy and container_info.HostConfig.RestartPolicy.Name or "-",
- _button=translate("Update")
- }
- table_info["081user"] = {
- _key = translate("User"),
- _value = container_info.Config and (container_info.Config.User ~="" and container_info.Config.User or "-") or "-"
- }
- table_info["09mount"] = {
- _key = translate("Mount/Volume"),
- _value = get_mounts(container_info) or "-"
- }
- table_info["10cmd"] = {
- _key = translate("Command"),
- _value = get_command(container_info) or "-"
- }
- table_info["11env"] = {
- _key = translate("Env"),
- _value = get_env(container_info) or "-"
- }
- table_info["12ports"] = {
- _key = translate("Ports"),
- _value = get_ports(container_info) or "-"
- }
- table_info["13links"] = {
- _key = translate("Links"),
- _value = get_links(container_info) or "-"
- }
- table_info["14device"] = {
- _key = translate("Device"),
- _value = get_device(container_info) or "-"
- }
- table_info["15tmpfs"] = {
- _key = translate("Tmpfs"),
- _value = get_tmpfs(container_info) or "-"
- }
- table_info["16dns"] = {
- _key = translate("DNS"),
- _value = get_dns(container_info) or "-"
- }
- table_info["17sysctl"] = {
- _key = translate("Sysctl"),
- _value = get_sysctl(container_info) or "-"
- }
-
- info_networks = get_networks(container_info)
- list_networks = {}
- for _, v in ipairs (networks) do
- if v and v.Name then
- local parent = v.Options and v.Options.parent or nil
- local ip = v.IPAM and v.IPAM.Config and v.IPAM.Config[1] and v.IPAM.Config[1].Subnet or nil
- ipv6 = v.IPAM and v.IPAM.Config and v.IPAM.Config[2] and v.IPAM.Config[2].Subnet or nil
- local network_name = v.Name .. " | " .. v.Driver .. (parent and (" | " .. parent) or "") .. (ip and (" | " .. ip) or "").. (ipv6 and (" | " .. ipv6) or "")
- list_networks[v.Name] = network_name
- end
- end
-
- if type(info_networks)== "table" then
- for k,v in pairs(info_networks) do
- table_info["14network"..k] = {
- _key = translate("Network"),
- _value = k.. (v~="" and (" | ".. v) or ""),
- _button=translate("Disconnect")
- }
- list_networks[k]=nil
- end
- end
-
- table_info["15connect"] = {
- _key = translate("Connect Network"),
- _value = list_networks ,_opts = "",
- _button=translate("Connect")
- }
-
- s = m:section(Table,table_info)
- s.nodescr=true
- s.formvalue=function(self, section)
- return table_info
- end
-
- o = s:option(DummyValue, "_key", translate("Info"))
- o.width = "20%"
-
- o = s:option(ListValue, "_value")
- o.render = function(self, section, scope)
- if table_info[section]._key == translate("Name") then
- self:reset_values()
- self.template = "cbi/value"
- self.size = 30
- self.keylist = {}
- self.vallist = {}
- self.default=table_info[section]._value
- Value.render(self, section, scope)
- elseif table_info[section]._key == translate("Restart Policy") then
- self.template = "cbi/lvalue"
- self:reset_values()
- self.size = nil
- self:value("no", "No")
- self:value("unless-stopped", "Unless stopped")
- self:value("always", "Always")
- self:value("on-failure", "On failure")
- self.default=table_info[section]._value
- ListValue.render(self, section, scope)
- elseif table_info[section]._key == translate("Connect Network") then
- self.template = "cbi/lvalue"
- self:reset_values()
- self.size = nil
- for k,v in pairs(list_networks) do
- if k ~= "host" then
- self:value(k,v)
- end
- end
- self.default=table_info[section]._value
- ListValue.render(self, section, scope)
- else
- self:reset_values()
- self.rawhtml=true
- self.template = "cbi/dvalue"
- self.default=table_info[section]._value
- DummyValue.render(self, section, scope)
- end
- end
- o.forcewrite = true
- o.write = function(self, section, value)
- table_info[section]._value=value
- end
- o.validate = function(self, value)
- return value
- end
-
- o = s:option(Value, "_opts")
- o.forcewrite = true
- o.write = function(self, section, value)
- table_info[section]._opts=value
- end
- o.validate = function(self, value)
- return value
- end
- o.render = function(self, section, scope)
- if table_info[section]._key==translate("Connect Network") then
- self.template = "cbi/value"
- self.keylist = {}
- self.vallist = {}
- self.placeholder = "10.1.1.254"
- self.datatype = "ip4addr"
- self.default=table_info[section]._opts
- Value.render(self, section, scope)
- else
- self.rawhtml=true
- self.template = "cbi/dvalue"
- self.default=table_info[section]._opts
- DummyValue.render(self, section, scope)
- end
- end
-
- o = s:option(Button, "_button")
- o.forcewrite = true
- o.render = function(self, section, scope)
- if table_info[section]._button and table_info[section]._value ~= nil then
- self.inputtitle=table_info[section]._button
- self.template = "cbi/button"
- self.inputstyle = "edit"
- Button.render(self, section, scope)
- else
- self.template = "cbi/dvalue"
- self.default=""
- DummyValue.render(self, section, scope)
- end
- end
- o.write = function(self, section, value)
- local res
-
- docker:clear_status()
-
- if section == "01name" then
- docker:append_status("Containers: rename " .. container_id .. "...")
- local new_name = table_info[section]._value
- res = dk.containers:rename({
- id = container_id,
- query = {
- name=new_name
- }
- })
- elseif section == "08restart" then
- docker:append_status("Containers: update " .. container_id .. "...")
- local new_restart = table_info[section]._value
- res = dk.containers:update({
- id = container_id,
- body = {
- RestartPolicy = {
- Name = new_restart
- }
- }
- })
- elseif table_info[section]._key == translate("Network") then
- local _,_,leave_network
-
- _, _, leave_network = table_info[section]._value:find("(.-) | .+")
- leave_network = leave_network or table_info[section]._value
- docker:append_status("Network: disconnect " .. leave_network .. container_id .. "...")
- res = dk.networks:disconnect({
- name = leave_network,
- body = {
- Container = container_id
- }
- })
- elseif section == "15connect" then
- local connect_network = table_info[section]._value
- local network_opiton
- if connect_network ~= "none"
- and connect_network ~= "bridge"
- and connect_network ~= "host" then
-
- network_opiton = table_info[section]._opts ~= "" and {
- IPAMConfig={
- IPv4Address=table_info[section]._opts
- }
- } or nil
- end
- docker:append_status("Network: connect " .. connect_network .. container_id .. "...")
- res = dk.networks:connect({
- name = connect_network,
- body = {
- Container = container_id,
- EndpointConfig= network_opiton
- }
- })
- end
-
- if res and res.code > 300 then
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message))
- else
- docker:clear_status()
- end
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/container/"..container_id.."/info"))
- end
-elseif action == "resources" then
- s = m:section(SimpleSection)
- o = s:option( Value, "cpus",
- translate("CPUs"),
- translate("Number of CPUs. Number is a fractional number. 0.000 means no limit."))
- o.placeholder = "1.5"
- o.rmempty = true
- o.datatype="ufloat"
- o.default = container_info.HostConfig.NanoCpus / (10^9)
-
- o = s:option(Value, "cpushares",
- translate("CPU Shares Weight"),
- translate("CPU shares relative weight, if 0 is set, the system will ignore the value and use the default of 1024."))
- o.placeholder = "1024"
- o.rmempty = true
- o.datatype="uinteger"
- o.default = container_info.HostConfig.CpuShares
-
- o = s:option(Value, "memory",
- translate("Memory"),
- translate("Memory limit (format: []). Number is a positive integer. Unit can be one of b, k, m, or g. Minimum is 4M."))
- o.placeholder = "128m"
- o.rmempty = true
- o.default = container_info.HostConfig.Memory ~=0 and ((container_info.HostConfig.Memory / 1024 /1024) .. "M") or 0
-
- o = s:option(Value, "blkioweight",
- translate("Block IO Weight"),
- translate("Block IO weight (relative weight) accepts a weight value between 10 and 1000."))
- o.placeholder = "500"
- o.rmempty = true
- o.datatype="uinteger"
- o.default = container_info.HostConfig.BlkioWeight
-
- m.handle = function(self, state, data)
- if state == FORM_VALID then
- local memory = data.memory
- if memory and memory ~= 0 then
- _,_,n,unit = memory:find("([%d%.]+)([%l%u]+)")
- if n then
- unit = unit and unit:sub(1,1):upper() or "B"
- if unit == "M" then
- memory = tonumber(n) * 1024 * 1024
- elseif unit == "G" then
- memory = tonumber(n) * 1024 * 1024 * 1024
- elseif unit == "K" then
- memory = tonumber(n) * 1024
- else
- memory = tonumber(n)
- end
- end
- end
-
- request_body = {
- BlkioWeight = tonumber(data.blkioweight),
- NanoCPUs = tonumber(data.cpus)*10^9,
- Memory = tonumber(memory),
- CpuShares = tonumber(data.cpushares)
- }
-
- docker:write_status("Containers: update " .. container_id .. "...")
- local res = dk.containers:update({id = container_id, body = request_body})
- if res and res.code >= 300 then
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message))
- else
- docker:clear_status()
- end
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/container/"..container_id.."/resources"))
- end
- end
-
-elseif action == "file" then
- m.submit = false
- m.reset = false
- s= m:section(SimpleSection)
- s.template = "dockerman/container_file_manager"
- s.container = container_id
- m.redirect = nil
-elseif action == "inspect" then
- s = m:section(SimpleSection)
- s.syslog = luci.jsonc.stringify(container_info, true)
- s.title = translate("Container Inspect")
- s.template = "dockerman/logs"
- m.submit = false
- m.reset = false
-elseif action == "logs" then
- local logs = ""
- local query ={
- stdout = 1,
- stderr = 1,
- tail = 1000
- }
-
- s = m:section(SimpleSection)
-
- logs = dk.containers:logs({id = container_id, query = query})
- if logs.code == 200 then
- s.syslog=logs.body
- else
- s.syslog="Get Logs ERROR\n"..logs.code..": "..logs.body
- end
-
- s.title=translate("Container Logs")
- s.template = "dockerman/logs"
- m.submit = false
- m.reset = false
-elseif action == "console" then
- m.submit = false
- m.reset = false
- local cmd_docker = luci.util.exec("command -v docker"):match("^.+docker") or nil
- local cmd_ttyd = luci.util.exec("command -v ttyd"):match("^.+ttyd") or nil
-
- if cmd_docker and cmd_ttyd and container_info.State.Status == "running" then
- local cmd = "/bin/sh"
- local uid
-
- s = m:section(SimpleSection)
-
- o = s:option(Value, "command", translate("Command"))
- o:value("/bin/sh", "/bin/sh")
- o:value("/bin/ash", "/bin/ash")
- o:value("/bin/bash", "/bin/bash")
- o.default = "/bin/sh"
- o.forcewrite = true
- o.write = function(self, section, value)
- cmd = value
- end
-
- o = s:option(Value, "uid", translate("UID"))
- o.forcewrite = true
- o.write = function(self, section, value)
- uid = value
- end
-
- o = s:option(Button, "connect")
- o.render = function(self, section, scope)
- self.inputstyle = "add"
- self.title = " "
- self.inputtitle = translate("Connect")
- Button.render(self, section, scope)
- end
- o.write = function(self, section)
- local cmd_docker = luci.util.exec("command -v docker"):match("^.+docker") or nil
- local cmd_ttyd = luci.util.exec("command -v ttyd"):match("^.+ttyd") or nil
-
- if not cmd_docker or not cmd_ttyd or cmd_docker:match("^%s+$") or cmd_ttyd:match("^%s+$") then
- return
- end
-
- local pid = luci.util.trim(luci.util.exec("netstat -lnpt | grep :7682 | grep ttyd | tr -s ' ' | cut -d ' ' -f7 | cut -d'/' -f1"))
- if pid and pid ~= "" then
- luci.util.exec("kill -9 " .. pid)
- end
-
- local hosts
- local uci = (require "luci.model.uci").cursor()
- local remote = uci:get_bool("dockerd", "dockerman", "remote_endpoint") or false
- local host = nil
- local port = nil
- local socket = nil
-
- if remote then
- host = uci:get("dockerd", "dockerman", "remote_host") or nil
- port = uci:get("dockerd", "dockerman", "remote_port") or nil
- else
- socket = uci:get("dockerd", "dockerman", "socket_path") or "/var/run/docker.sock"
- end
-
- if remote and host and port then
- hosts = "tcp://" .. host .. ':'.. port
- elseif socket then
- hosts = "unix://" .. socket
- else
- return
- end
-
- if uid and uid ~= "" then
- uid = "-u " .. uid
- else
- uid = ""
- end
-
- local start_cmd = string.format('%s -d 2 --once -p 7682 %s -H "%s" exec -it %s %s %s&', cmd_ttyd, cmd_docker, hosts, uid, container_id, cmd)
-
- os.execute(start_cmd)
-
- o = s:option(DummyValue, "console")
- o.container_id = container_id
- o.template = "dockerman/container_console"
- end
- end
-elseif action == "stats" then
- local response = dk.containers:top({id = container_id, query = {ps_args="-aux"}})
- local container_top
-
- if response.code == 200 then
- container_top=response.body
- else
- response = dk.containers:top({id = container_id})
- if response.code == 200 then
- container_top=response.body
- end
- end
-
- if type(container_top) == "table" then
- s = m:section(SimpleSection)
- s.container_id = container_id
- s.template = "dockerman/container_stats"
- table_stats = {
- cpu={
- key=translate("CPU Useage"),
- value='-'
- },
- memory={
- key=translate("Memory Useage"),
- value='-'
- }
- }
-
- container_top = response.body
- s = m:section(Table, table_stats, translate("Stats"))
- s:option(DummyValue, "key", translate("Stats")).width="33%"
- s:option(DummyValue, "value")
- top_section = m:section(Table, container_top.Processes, translate("TOP"))
- for i, v in ipairs(container_top.Titles) do
- top_section:option(DummyValue, i, translate(v))
- end
- end
-
- m.submit = false
- m.reset = false
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua
deleted file mode 100644
index fa7765261..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua
+++ /dev/null
@@ -1,284 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local http = require "luci.http"
-local docker = require "luci.model.docker"
-
-local m, s, o
-local images, networks, containers, res, lost_state
-local urlencode = luci.http.protocol and luci.http.protocol.urlencode or luci.util.urlencode
-local dk = docker.new()
-
-if dk:_ping().code ~= 200 then
- lost_state = true
-else
- res = dk.images:list()
- if res and res.code and res.code < 300 then
- images = res.body
- end
-
- res = dk.networks:list()
- if res and res.code and res.code < 300 then
- networks = res.body
- end
-
- res = dk.containers:list({
- query = {
- all = true
- }
- })
- if res and res.code and res.code < 300 then
- containers = res.body
- end
-end
-
-function get_containers()
- local data = {}
- if type(containers) ~= "table" then
- return nil
- end
-
- for i, v in ipairs(containers) do
- local index = (10^12 - v.Created) .. "_id_" .. v.Id
-
- data[index]={}
- data[index]["_selected"] = 0
- data[index]["_id"] = v.Id:sub(1,12)
- -- data[index]["name"] = v.Names[1]:sub(2)
- data[index]["_status"] = v.Status
-
- if v.Status:find("^Up") then
- data[index]["_name"] = ""..v.Names[1]:sub(2)..""
- data[index]["_status"] = "".. data[index]["_status"] .. "" .. "
"
- else
- data[index]["_name"] = ""..v.Names[1]:sub(2)..""
- data[index]["_status"] = ''.. data[index]["_status"] .. ""
- end
-
- if (type(v.NetworkSettings) == "table" and type(v.NetworkSettings.Networks) == "table") then
- for networkname, netconfig in pairs(v.NetworkSettings.Networks) do
- data[index]["_network"] = (data[index]["_network"] ~= nil and (data[index]["_network"] .." | ") or "").. networkname .. (netconfig.IPAddress ~= "" and (": " .. netconfig.IPAddress) or "")
- end
- end
-
- -- networkmode = v.HostConfig.NetworkMode ~= "default" and v.HostConfig.NetworkMode or "bridge"
- -- data[index]["_network"] = v.NetworkSettings.Networks[networkmode].IPAddress or nil
- -- local _, _, image = v.Image:find("^sha256:(.+)")
- -- if image ~= nil then
- -- image=image:sub(1,12)
- -- end
-
- if v.Ports and next(v.Ports) ~= nil then
- data[index]["_ports"] = nil
- local ip = require "luci.ip"
- for _,v2 in ipairs(v.Ports) do
- -- display ipv4 only
- if ip.new(v2.IP or "0.0.0.0"):is4() then
- data[index]["_ports"] = (data[index]["_ports"] and (data[index]["_ports"] .. ", ") or "")
- .. ((v2.PublicPort and v2.Type and v2.Type == "tcp") and ('') or "")
- .. (v2.PublicPort and (v2.PublicPort .. ":") or "") .. (v2.PrivatePort and (v2.PrivatePort .."/") or "") .. (v2.Type and v2.Type or "")
- .. ((v2.PublicPort and v2.Type and v2.Type == "tcp")and "" or "")
- end
- end
- end
-
- for ii,iv in ipairs(images) do
- if iv.Id == v.ImageID then
- data[index]["_image"] = iv.RepoTags and iv.RepoTags[1] or (iv.RepoDigests[1]:gsub("(.-)@.+", "%1") .. ":<none>")
- end
- end
- data[index]["_id_name"] = ''.. data[index]["_name"] .. "
ID: " .. data[index]["_id"]
- .. "
Image: " .. (data[index]["_image"] or "<none>")
- .. "
"
-
- if type(v.Mounts) == "table" and next(v.Mounts) then
- for _, v2 in pairs(v.Mounts) do
- if v2.Type ~= "volume" then
- local v_sorce_d, v_dest_d
- local v_sorce = ""
- local v_dest = ""
- for v_sorce_d in v2["Source"]:gmatch('[^/]+') do
- if v_sorce_d and #v_sorce_d > 12 then
- v_sorce = v_sorce .. "/" .. v_sorce_d:sub(1,8) .. ".."
- else
- v_sorce = v_sorce .."/".. v_sorce_d
- end
- end
- for v_dest_d in v2["Destination"]:gmatch('[^/]+') do
- if v_dest_d and #v_dest_d > 12 then
- v_dest = v_dest .. "/" .. v_dest_d:sub(1,8) .. ".."
- else
- v_dest = v_dest .."/".. v_dest_d
- end
- end
- data[index]["_mounts"] = (data[index]["_mounts"] and (data[index]["_mounts"] .. "
") or "") .. '' .. v_sorce .. "→" .. v_dest..''
- end
- end
- end
-
- data[index]["_image_id"] = v.ImageID:sub(8,20)
- data[index]["_command"] = v.Command
- end
- return data
-end
-
-local container_list = not lost_state and get_containers() or {}
-
-m = SimpleForm("docker",
- translate("Docker - Containers"),
- translate("This page displays all containers that have been created on the connected docker host."))
-m.submit=false
-m.reset=false
-m:append(Template("dockerman/containers_running_stats"))
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err=docker:read_status()
-s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(Table, container_list, translate("Containers"))
-s.nodescr=true
-s.config="containers"
-
-o = s:option(Flag, "_selected","")
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-o.width = "1%"
-o.write=function(self, section, value)
- container_list[section]._selected = value
-end
-
--- o = s:option(DummyValue, "_id", translate("ID"))
--- o.width="10%"
-
--- o = s:option(DummyValue, "_name", translate("Container Name"))
--- o.rawhtml = true
-
-o = s:option(DummyValue, "_id_name", translate("Container Info"))
-o.rawhtml = true
-o.width="15%"
-
-o = s:option(DummyValue, "_status", translate("Status"))
-o.width="15%"
-o.rawhtml=true
-
-o = s:option(DummyValue, "_network", translate("Network"))
-o.width="10%"
-
-o = s:option(DummyValue, "_ports", translate("Ports"))
-o.width="5%"
-o.rawhtml = true
-o = s:option(DummyValue, "_mounts", translate("Mounts"))
-o.width="25%"
-o.rawhtml = true
-
--- o = s:option(DummyValue, "_image", translate("Image"))
--- o.width="8%"
-
-o = s:option(DummyValue, "_command", translate("Command"))
-o.width="15%"
-
-local start_stop_remove = function(m, cmd)
- local container_selected = {}
- -- 遍历table中sectionid
- for k in pairs(container_list) do
- -- 得到选中项的名字
- if container_list[k]._selected == 1 then
- container_selected[#container_selected + 1] = container_list[k]["_id"]
- end
- end
- if #container_selected > 0 then
- local success = true
-
- docker:clear_status()
- for _, cont in ipairs(container_selected) do
- docker:append_status("Containers: " .. cmd .. " " .. cont .. "...")
- local res = dk.containers[cmd](dk, {id = cont})
- if res and res.code and res.code >= 300 then
- success = false
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message).. "\n")
- else
- docker:append_status("done\n")
- end
- end
-
- if success then
- docker:clear_status()
- end
-
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/containers"))
- end
-end
-
-s = m:section(Table,{{}})
-s.notitle=true
-s.rowcolors=false
-s.template="cbi/nullsection"
-
-o = s:option(Button, "_new")
-o.inputtitle = translate("Add")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "add"
-o.forcewrite = true
-o.write = function(self, section)
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newcontainer"))
-end
-o.disable = lost_state
-
-o = s:option(Button, "_start")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle = translate("Start")
-o.inputstyle = "apply"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"start")
-end
-o.disable = lost_state
-
-o = s:option(Button, "_restart")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle = translate("Restart")
-o.inputstyle = "reload"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"restart")
-end
-o.disable = lost_state
-
-o = s:option(Button, "_stop")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle = translate("Stop")
-o.inputstyle = "reset"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"stop")
-end
-o.disable = lost_state
-
-o = s:option(Button, "_kill")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle = translate("Kill")
-o.inputstyle = "reset"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m,"kill")
-end
-o.disable = lost_state
-
-o = s:option(Button, "_remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputtitle = translate("Remove")
-o.inputstyle = "remove"
-o.forcewrite = true
-o.write = function(self, section)
- start_stop_remove(m, "remove")
-end
-o.disable = lost_state
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua
deleted file mode 100644
index c3d3eab0d..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua
+++ /dev/null
@@ -1,284 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-local dk = docker.new()
-
-local containers, images, res, lost_state
-local m, s, o
-
-if dk:_ping().code ~= 200 then
- lost_state = true
-else
- res = dk.images:list()
- if res and res.code and res.code < 300 then
- images = res.body
- end
-
- res = dk.containers:list({ query = { all = true } })
- if res and res.code and res.code < 300 then
- containers = res.body
- end
-end
-
-function get_images()
- local data = {}
-
- for i, v in ipairs(images) do
- local index = v.Created .. v.Id
-
- data[index]={}
- data[index]["_selected"] = 0
- data[index]["id"] = v.Id:sub(8)
- data[index]["_id"] = '' .. v.Id:sub(8,20) .. ''
-
- if v.RepoTags and next(v.RepoTags)~=nil then
- for i, v1 in ipairs(v.RepoTags) do
- data[index]["_tags"] =(data[index]["_tags"] and ( data[index]["_tags"] .. "
" )or "") .. ((v1:match("") or (#v.RepoTags == 1)) and v1 or ('' .. v1 .. ''))
-
- if not data[index]["tag"] then
- data[index]["tag"] = v1
- end
- end
- else
- data[index]["_tags"] = v.RepoDigests[1] and v.RepoDigests[1]:match("^(.-)@.+")
- data[index]["_tags"] = (data[index]["_tags"] and data[index]["_tags"] or "" ).. ":"
- end
-
- data[index]["_tags"] = data[index]["_tags"]:gsub("","<none>")
- for ci,cv in ipairs(containers) do
- if v.Id == cv.ImageID then
- data[index]["_containers"] = (data[index]["_containers"] and (data[index]["_containers"] .. " | ") or "")..
- ''.. cv.Names[1]:sub(2)..""
- end
- end
-
- data[index]["_size"] = string.format("%.2f", tostring(v.Size/1024/1024)).."MB"
- data[index]["_created"] = os.date("%Y/%m/%d %H:%M:%S",v.Created)
- end
-
- return data
-end
-
-local image_list = not lost_state and get_images() or {}
-
-m = SimpleForm("docker",
- translate("Docker - Images"),
- translate("On this page all images are displayed that are available on the system and with which a container can be created."))
-m.submit=false
-m.reset=false
-
-local pull_value={
- _image_tag_name="",
- _registry="index.docker.io"
-}
-
-s = m:section(SimpleSection,
- translate("Pull Image"),
- translate("By entering a valid image name with the corresponding version, the docker image can be downloaded from the configured registry."))
-s.template="cbi/nullsection"
-
-o = s:option(Value, "_image_tag_name")
-o.template = "dockerman/cbi/inlinevalue"
-o.placeholder="lisaac/luci:latest"
-o.write = function(self, section, value)
- local hastag = value:find(":")
-
- if not hastag then
- value = value .. ":latest"
- end
- pull_value["_image_tag_name"] = value
-end
-
-o = s:option(Button, "_pull")
-o.inputtitle= translate("Pull")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "add"
-o.disable = lost_state
-o.write = function(self, section)
- local tag = pull_value["_image_tag_name"]
- local json_stringify = luci.jsonc and luci.jsonc.stringify
-
- if tag and tag ~= "" then
- docker:write_status("Images: " .. "pulling" .. " " .. tag .. "...\n")
- local res = dk.images:create({query = {fromImage=tag}}, docker.pull_image_show_status_cb)
-
- if res and res.code and res.code == 200 and (res.body[#res.body] and not res.body[#res.body].error and res.body[#res.body].status and (res.body[#res.body].status == "Status: Downloaded newer image for ".. tag)) then
- docker:clear_status()
- else
- docker:append_status("code:" .. res.code.." ".. (res.body[#res.body] and res.body[#res.body].error or (res.body.message or res.message)).. "\n")
- end
- else
- docker:append_status("code: 400 please input the name of image name!")
- end
-
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/images"))
-end
-
-s = m:section(SimpleSection,
- translate("Import Image"),
- translate("When pressing the Import button, both a local image can be loaded onto the system and a valid image tar can be downloaded from remote."))
-
-o = s:option(DummyValue, "_image_import")
-o.template = "dockerman/images_import"
-o.disable = lost_state
-
-s = m:section(Table, image_list, translate("Images overview"))
-
-o = s:option(Flag, "_selected","")
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-o.write = function(self, section, value)
- image_list[section]._selected = value
-end
-
-o = s:option(DummyValue, "_id", translate("ID"))
-o.rawhtml = true
-
-o = s:option(DummyValue, "_tags", translate("RepoTags"))
-o.rawhtml = true
-
-o = s:option(DummyValue, "_containers", translate("Containers"))
-o.rawhtml = true
-
-o = s:option(DummyValue, "_size", translate("Size"))
-
-o = s:option(DummyValue, "_created", translate("Created"))
-
-local remove_action = function(force)
- local image_selected = {}
-
- for k in pairs(image_list) do
- if image_list[k]._selected == 1 then
- image_selected[#image_selected+1] = (image_list[k]["_tags"]:match("
") or image_list[k]["_tags"]:match("<none>")) and image_list[k].id or image_list[k].tag
- end
- end
-
- if next(image_selected) ~= nil then
- local success = true
-
- docker:clear_status()
- for _, img in ipairs(image_selected) do
- local query
- docker:append_status("Images: " .. "remove" .. " " .. img .. "...")
-
- if force then
- query = {force = true}
- end
-
- local msg = dk.images:remove({
- id = img,
- query = query
- })
- if msg and msg.code ~= 200 then
- docker:append_status("code:" .. msg.code.." ".. (msg.body.message and msg.body.message or msg.message).. "\n")
- success = false
- else
- docker:append_status("done\n")
- end
- end
-
- if success then
- docker:clear_status()
- end
-
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/images"))
- end
-end
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err = docker:read_status()
-s.err = s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(Table,{{}})
-s.notitle=true
-s.rowcolors=false
-s.template="cbi/nullsection"
-
-o = s:option(Button, "remove")
-o.inputtitle= translate("Remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "remove"
-o.forcewrite = true
-o.write = function(self, section)
- remove_action()
-end
-o.disable = lost_state
-
-o = s:option(Button, "forceremove")
-o.inputtitle= translate("Force Remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "remove"
-o.forcewrite = true
-o.write = function(self, section)
- remove_action(true)
-end
-o.disable = lost_state
-
-o = s:option(Button, "save")
-o.inputtitle= translate("Save")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "edit"
-o.disable = lost_state
-o.forcewrite = true
-o.write = function (self, section)
- local image_selected = {}
-
- for k in pairs(image_list) do
- if image_list[k]._selected == 1 then
- image_selected[#image_selected + 1] = image_list[k].id
- end
- end
-
- if next(image_selected) ~= nil then
- local names, first, show_name
-
- for _, img in ipairs(image_selected) do
- names = names and (names .. "&names=".. img) or img
- end
- if #image_selected > 1 then
- show_name = "images"
- else
- show_name = image_selected[1]
- end
- local cb = function(res, chunk)
- if res and res.code and res.code == 200 then
- if not first then
- first = true
- luci.http.header('Content-Disposition', 'inline; filename="'.. show_name .. '.tar"')
- luci.http.header('Content-Type', 'application\/x-tar')
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- else
- if not first then
- first = true
- luci.http.prepare_content("text/plain")
- end
- luci.ltn12.pump.all(chunk, luci.http.write)
- end
- end
-
- docker:write_status("Images: " .. "save" .. " " .. table.concat(image_selected, "\n") .. "...")
- local msg = dk.images:get({query = {names = names}}, cb)
- if msg and msg.code and msg.code ~= 200 then
- docker:append_status("code:" .. msg.code.." ".. (msg.body.message and msg.body.message or msg.message).. "\n")
- else
- docker:clear_status()
- end
- end
-end
-
-o = s:option(Button, "load")
-o.inputtitle= translate("Load")
-o.template = "dockerman/images_load"
-o.inputstyle = "add"
-o.disable = lost_state
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua
deleted file mode 100644
index 37702c783..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua
+++ /dev/null
@@ -1,159 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-
-local m, s, o
-local networks, dk, res, lost_state
-
-dk = docker.new()
-
-if dk:_ping().code ~= 200 then
- lost_state = true
-else
- res = dk.networks:list()
- if res and res.code and res.code < 300 then
- networks = res.body
- end
-end
-
-local get_networks = function ()
- local data = {}
-
- if type(networks) ~= "table" then
- return nil
- end
-
- for i, v in ipairs(networks) do
- local index = v.Created .. v.Id
-
- data[index]={}
- data[index]["_selected"] = 0
- data[index]["_id"] = v.Id:sub(1,12)
- data[index]["_name"] = v.Name
- data[index]["_driver"] = v.Driver
-
- if v.Driver == "bridge" then
- data[index]["_interface"] = v.Options["com.docker.network.bridge.name"]
- elseif v.Driver == "macvlan" then
- data[index]["_interface"] = v.Options.parent
- end
-
- data[index]["_subnet"] = v.IPAM and v.IPAM.Config[1] and v.IPAM.Config[1].Subnet or nil
- data[index]["_gateway"] = v.IPAM and v.IPAM.Config[1] and v.IPAM.Config[1].Gateway or nil
- end
-
- return data
-end
-
-local network_list = not lost_state and get_networks() or {}
-
-m = SimpleForm("docker",
- translate("Docker - Networks"),
- translate("This page displays all docker networks that have been created on the connected docker host."))
-m.submit=false
-m.reset=false
-
-s = m:section(Table, network_list, translate("Networks overview"))
-s.nodescr=true
-
-o = s:option(Flag, "_selected","")
-o.template = "dockerman/cbi/xfvalue"
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-o.render = function(self, section, scope)
- self.disable = 0
- if network_list[section]["_name"] == "bridge" or network_list[section]["_name"] == "none" or network_list[section]["_name"] == "host" then
- self.disable = 1
- end
- Flag.render(self, section, scope)
-end
-o.write = function(self, section, value)
- network_list[section]._selected = value
-end
-
-o = s:option(DummyValue, "_id", translate("ID"))
-
-o = s:option(DummyValue, "_name", translate("Network Name"))
-
-o = s:option(DummyValue, "_driver", translate("Driver"))
-
-o = s:option(DummyValue, "_interface", translate("Parent Interface"))
-
-o = s:option(DummyValue, "_subnet", translate("Subnet"))
-
-o = s:option(DummyValue, "_gateway", translate("Gateway"))
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err = docker:read_status()
-s.err = s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(Table,{{}})
-s.notitle=true
-s.rowcolors=false
-s.template="cbi/nullsection"
-
-o = s:option(Button, "_new")
-o.inputtitle= translate("New")
-o.template = "dockerman/cbi/inlinebutton"
-o.notitle=true
-o.inputstyle = "add"
-o.forcewrite = true
-o.disable = lost_state
-o.write = function(self, section)
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newnetwork"))
-end
-
-o = s:option(Button, "_remove")
-o.inputtitle= translate("Remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "remove"
-o.forcewrite = true
-o.disable = lost_state
-o.write = function(self, section)
- local network_selected = {}
- local network_name_selected = {}
- local network_driver_selected = {}
-
- for k in pairs(network_list) do
- if network_list[k]._selected == 1 then
- network_selected[#network_selected + 1] = network_list[k]._id
- network_name_selected[#network_name_selected + 1] = network_list[k]._name
- network_driver_selected[#network_driver_selected + 1] = network_list[k]._driver
- end
- end
-
- if next(network_selected) ~= nil then
- local success = true
- docker:clear_status()
-
- for ii, net in ipairs(network_selected) do
- docker:append_status("Networks: " .. "remove" .. " " .. net .. "...")
- local res = dk.networks["remove"](dk, {id = net})
-
- if res and res.code and res.code >= 300 then
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message).. "\n")
- success = false
- else
- docker:append_status("done\n")
- if network_driver_selected[ii] == "macvlan" then
- docker.remove_macvlan_interface(network_name_selected[ii])
- end
- end
- end
-
- if success then
- docker:clear_status()
- end
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/networks"))
- end
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua
deleted file mode 100644
index bafe73196..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua
+++ /dev/null
@@ -1,911 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-
-local m, s, o
-
-local dk = docker.new()
-
-local cmd_line = table.concat(arg, '/')
-local images, networks
-local create_body = {}
-
-if dk:_ping().code ~= 200 then
- lost_state = true
- images = {}
- networks = {}
-else
- images = dk.images:list().body
- networks = dk.networks:list().body
-end
-
-local is_quot_complete = function(str)
- local num = 0, w
- require "math"
-
- if not str then
- return true
- end
-
- local num = 0, w
- for w in str:gmatch("\"") do
- num = num + 1
- end
-
- if math.fmod(num, 2) ~= 0 then
- return false
- end
-
- num = 0
- for w in str:gmatch("\'") do
- num = num + 1
- end
-
- if math.fmod(num, 2) ~= 0 then
- return false
- end
-
- return true
-end
-
-function contains(list, x)
- for _, v in pairs(list) do
- if v == x then
- return true
- end
- end
- return false
-end
-
-local resolve_cli = function(cmd_line)
- local config = {
- advance = 1
- }
-
- local key_no_val = {
- 't',
- 'd',
- 'i',
- 'tty',
- 'rm',
- 'read_only',
- 'interactive',
- 'init',
- 'help',
- 'detach',
- 'privileged',
- 'P',
- 'publish_all',
- }
-
- local key_with_val = {
- 'sysctl',
- 'add_host',
- 'a',
- 'attach',
- 'blkio_weight_device',
- 'cap_add',
- 'cap_drop',
- 'device',
- 'device_cgroup_rule',
- 'device_read_bps',
- 'device_read_iops',
- 'device_write_bps',
- 'device_write_iops',
- 'dns',
- 'dns_option',
- 'dns_search',
- 'e',
- 'env',
- 'env_file',
- 'expose',
- 'group_add',
- 'l',
- 'label',
- 'label_file',
- 'link',
- 'link_local_ip',
- 'log_driver',
- 'log_opt',
- 'network_alias',
- 'p',
- 'publish',
- 'security_opt',
- 'storage_opt',
- 'tmpfs',
- 'v',
- 'volume',
- 'volumes_from',
- 'blkio_weight',
- 'cgroup_parent',
- 'cidfile',
- 'cpu_period',
- 'cpu_quota',
- 'cpu_rt_period',
- 'cpu_rt_runtime',
- 'c',
- 'cpu_shares',
- 'cpus',
- 'cpuset_cpus',
- 'cpuset_mems',
- 'detach_keys',
- 'disable_content_trust',
- 'domainname',
- 'entrypoint',
- 'gpus',
- 'health_cmd',
- 'health_interval',
- 'health_retries',
- 'health_start_period',
- 'health_timeout',
- 'h',
- 'hostname',
- 'ip',
- 'ip6',
- 'ipc',
- 'isolation',
- 'kernel_memory',
- 'log_driver',
- 'mac_address',
- 'm',
- 'memory',
- 'memory_reservation',
- 'memory_swap',
- 'memory_swappiness',
- 'mount',
- 'name',
- 'network',
- 'no_healthcheck',
- 'oom_kill_disable',
- 'oom_score_adj',
- 'pid',
- 'pids_limit',
- 'restart',
- 'runtime',
- 'shm_size',
- 'sig_proxy',
- 'stop_signal',
- 'stop_timeout',
- 'ulimit',
- 'u',
- 'user',
- 'userns',
- 'uts',
- 'volume_driver',
- 'w',
- 'workdir'
- }
-
- local key_abb = {
- net='network',
- a='attach',
- c='cpu-shares',
- d='detach',
- e='env',
- h='hostname',
- i='interactive',
- l='label',
- m='memory',
- p='publish',
- P='publish_all',
- t='tty',
- u='user',
- v='volume',
- w='workdir'
- }
-
- local key_with_list = {
- 'sysctl',
- 'add_host',
- 'a',
- 'attach',
- 'blkio_weight_device',
- 'cap_add',
- 'cap_drop',
- 'device',
- 'device_cgroup_rule',
- 'device_read_bps',
- 'device_read_iops',
- 'device_write_bps',
- 'device_write_iops',
- 'dns',
- 'dns_optiondns_search',
- 'e',
- 'env',
- 'env_file',
- 'expose',
- 'group_add',
- 'l',
- 'label',
- 'label_file',
- 'link',
- 'link_local_ip',
- 'log_driver',
- 'log_opt',
- 'network_alias',
- 'p',
- 'publish',
- 'security_opt',
- 'storage_opt',
- 'tmpfs',
- 'v',
- 'volume',
- 'volumes_from',
- }
-
- local key = nil
- local _key = nil
- local val = nil
- local is_cmd = false
-
- cmd_line = cmd_line:match("^DOCKERCLI%s+(.+)")
- for w in cmd_line:gmatch("[^%s]+") do
- if w =='\\' then
- elseif not key and not _key and not is_cmd then
- --key=val
- key, val = w:match("^%-%-([%lP%-]-)=(.+)")
- if not key then
- --key val
- key = w:match("^%-%-([%lP%-]+)")
- if not key then
- -- -v val
- key = w:match("^%-([%lP%-]+)")
- if key then
- -- for -dit
- if key:match("i") or key:match("t") or key:match("d") then
- if key:match("i") then
- config[key_abb["i"]] = true
- key:gsub("i", "")
- end
- if key:match("t") then
- config[key_abb["t"]] = true
- key:gsub("t", "")
- end
- if key:match("d") then
- config[key_abb["d"]] = true
- key:gsub("d", "")
- end
- if key:match("P") then
- config[key_abb["P"]] = true
- key:gsub("P", "")
- end
- if key == "" then
- key = nil
- end
- end
- end
- end
- end
- if key then
- key = key:gsub("-","_")
- key = key_abb[key] or key
- if contains(key_no_val, key) then
- config[key] = true
- val = nil
- key = nil
- elseif contains(key_with_val, key) then
- -- if key == "cap_add" then config.privileged = true end
- else
- key = nil
- val = nil
- end
- else
- config.image = w
- key = nil
- val = nil
- is_cmd = true
- end
- elseif (key or _key) and not is_cmd then
- if key == "mount" then
- -- we need resolve mount options here
- -- type=bind,source=/source,target=/app
- local _type = w:match("^type=([^,]+),") or "bind"
- local source = (_type ~= "tmpfs") and (w:match("source=([^,]+),") or w:match("src=([^,]+),")) or ""
- local target = w:match(",target=([^,]+)") or w:match(",dst=([^,]+)") or w:match(",destination=([^,]+)") or ""
- local ro = w:match(",readonly") and "ro" or nil
-
- if source and target then
- if _type ~= "tmpfs" then
- local bind_propagation = (_type == "bind") and w:match(",bind%-propagation=([^,]+)") or nil
- val = source..":"..target .. ((ro or bind_propagation) and (":" .. (ro and ro or "") .. (((ro and bind_propagation) and "," or "") .. (bind_propagation and bind_propagation or ""))or ""))
- else
- local tmpfs_mode = w:match(",tmpfs%-mode=([^,]+)") or nil
- local tmpfs_size = w:match(",tmpfs%-size=([^,]+)") or nil
- key = "tmpfs"
- val = target .. ((tmpfs_mode or tmpfs_size) and (":" .. (tmpfs_mode and ("mode=" .. tmpfs_mode) or "") .. ((tmpfs_mode and tmpfs_size) and "," or "") .. (tmpfs_size and ("size=".. tmpfs_size) or "")) or "")
- if not config[key] then
- config[key] = {}
- end
- table.insert( config[key], val )
- key = nil
- val = nil
- end
- end
- else
- val = w
- end
- elseif is_cmd then
- config["command"] = (config["command"] and (config["command"] .. " " )or "") .. w
- end
- if (key or _key) and val then
- key = _key or key
- if contains(key_with_list, key) then
- if not config[key] then
- config[key] = {}
- end
- if _key then
- config[key][#config[key]] = config[key][#config[key]] .. " " .. w
- else
- table.insert( config[key], val )
- end
- if is_quot_complete(config[key][#config[key]]) then
- config[key][#config[key]] = config[key][#config[key]]:gsub("[\"\']", "")
- _key = nil
- else
- _key = key
- end
- else
- config[key] = (config[key] and (config[key] .. " ") or "") .. val
- if is_quot_complete(config[key]) then
- config[key] = config[key]:gsub("[\"\']", "")
- _key = nil
- else
- _key = key
- end
- end
- key = nil
- val = nil
- end
- end
-
- return config
-end
-
-local default_config = {}
-
-if cmd_line and cmd_line:match("^DOCKERCLI.+") then
- default_config = resolve_cli(cmd_line)
-elseif cmd_line and cmd_line:match("^duplicate/[^/]+$") then
- local container_id = cmd_line:match("^duplicate/(.+)")
- create_body = dk:containers_duplicate_config({id = container_id}) or {}
- if not create_body.HostConfig then
- create_body.HostConfig = {}
- end
-
- if next(create_body) ~= nil then
- default_config.name = nil
- default_config.image = create_body.Image
- default_config.hostname = create_body.Hostname
- default_config.tty = create_body.Tty and true or false
- default_config.interactive = create_body.OpenStdin and true or false
- default_config.privileged = create_body.HostConfig.Privileged and true or false
- default_config.restart = create_body.HostConfig.RestartPolicy and create_body.HostConfig.RestartPolicy.name or nil
- -- default_config.network = create_body.HostConfig.NetworkMode == "default" and "bridge" or create_body.HostConfig.NetworkMode
- -- if container has leave original network, and add new network, .HostConfig.NetworkMode is INcorrect, so using first child of .NetworkingConfig.EndpointsConfig
- default_config.network = create_body.NetworkingConfig and create_body.NetworkingConfig.EndpointsConfig and next(create_body.NetworkingConfig.EndpointsConfig) or nil
- default_config.ip = default_config.network and default_config.network ~= "bridge" and default_config.network ~= "host" and default_config.network ~= "null" and create_body.NetworkingConfig.EndpointsConfig[default_config.network].IPAMConfig and create_body.NetworkingConfig.EndpointsConfig[default_config.network].IPAMConfig.IPv4Address or nil
- default_config.link = create_body.HostConfig.Links
- default_config.env = create_body.Env
- default_config.dns = create_body.HostConfig.Dns
- default_config.volume = create_body.HostConfig.Binds
- default_config.cap_add = create_body.HostConfig.CapAdd
- default_config.publish_all = create_body.HostConfig.PublishAllPorts
-
- if create_body.HostConfig.Sysctls and type(create_body.HostConfig.Sysctls) == "table" then
- default_config.sysctl = {}
- for k, v in pairs(create_body.HostConfig.Sysctls) do
- table.insert( default_config.sysctl, k.."="..v )
- end
- end
-
- if create_body.HostConfig.LogConfig and create_body.HostConfig.LogConfig.Config and type(create_body.HostConfig.LogConfig.Config) == "table" then
- default_config.log_opt = {}
- for k, v in pairs(create_body.HostConfig.LogConfig.Config) do
- table.insert( default_config.log_opt, k.."="..v )
- end
- end
-
- if create_body.HostConfig.PortBindings and type(create_body.HostConfig.PortBindings) == "table" then
- default_config.publish = {}
- for k, v in pairs(create_body.HostConfig.PortBindings) do
- for x, y in ipairs(v) do
- table.insert( default_config.publish, y.HostPort..":"..k:match("^(%d+)/.+").."/"..k:match("^%d+/(.+)") )
- end
- end
- end
-
- default_config.user = create_body.User or nil
- default_config.command = create_body.Cmd and type(create_body.Cmd) == "table" and table.concat(create_body.Cmd, " ") or nil
- default_config.advance = 1
- default_config.cpus = create_body.HostConfig.NanoCPUs
- default_config.cpu_shares = create_body.HostConfig.CpuShares
- default_config.memory = create_body.HostConfig.Memory
- default_config.blkio_weight = create_body.HostConfig.BlkioWeight
-
- if create_body.HostConfig.Devices and type(create_body.HostConfig.Devices) == "table" then
- default_config.device = {}
- for _, v in ipairs(create_body.HostConfig.Devices) do
- table.insert( default_config.device, v.PathOnHost..":"..v.PathInContainer..(v.CgroupPermissions ~= "" and (":" .. v.CgroupPermissions) or "") )
- end
- end
-
- if create_body.HostConfig.Tmpfs and type(create_body.HostConfig.Tmpfs) == "table" then
- default_config.tmpfs = {}
- for k, v in pairs(create_body.HostConfig.Tmpfs) do
- table.insert( default_config.tmpfs, k .. (v~="" and ":" or "")..v )
- end
- end
- end
-end
-
-m = SimpleForm("docker", translate("Docker - Containers"))
-m.redirect = luci.dispatcher.build_url("admin", "docker", "containers")
-if lost_state then
- m.submit=false
- m.reset=false
-end
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err=docker:read_status()
-s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(SimpleSection, translate("Create new docker container"))
-s.addremove = true
-s.anonymous = true
-
-o = s:option(DummyValue,"cmd_line", translate("Resolve CLI"))
-o.rawhtml = true
-o.template = "dockerman/newcontainer_resolve"
-
-o = s:option(Value, "name", translate("Container Name"))
-o.rmempty = true
-o.default = default_config.name or nil
-
-o = s:option(Flag, "interactive", translate("Interactive (-i)"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = default_config.interactive and 1 or 0
-
-o = s:option(Flag, "tty", translate("TTY (-t)"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = default_config.tty and 1 or 0
-
-o = s:option(Value, "image", translate("Docker Image"))
-o.rmempty = true
-o.default = default_config.image or nil
-for _, v in ipairs (images) do
- if v.RepoTags then
- o:value(v.RepoTags[1], v.RepoTags[1])
- end
-end
-
-o = s:option(Flag, "_force_pull", translate("Always pull image first"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-
-o = s:option(Flag, "privileged", translate("Privileged"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = default_config.privileged and 1 or 0
-
-o = s:option(ListValue, "restart", translate("Restart Policy"))
-o.rmempty = true
-o:value("no", "No")
-o:value("unless-stopped", "Unless stopped")
-o:value("always", "Always")
-o:value("on-failure", "On failure")
-o.default = default_config.restart or "unless-stopped"
-
-local d_network = s:option(ListValue, "network", translate("Networks"))
-d_network.rmempty = true
-d_network.default = default_config.network or "bridge"
-
-local d_ip = s:option(Value, "ip", translate("IPv4 Address"))
-d_ip.datatype="ip4addr"
-d_ip:depends("network", "nil")
-d_ip.default = default_config.ip or nil
-
-o = s:option(DynamicList, "link", translate("Links with other containers"))
-o.placeholder = "container_name:alias"
-o.rmempty = true
-o:depends("network", "bridge")
-o.default = default_config.link or nil
-
-o = s:option(DynamicList, "dns", translate("Set custom DNS servers"))
-o.placeholder = "8.8.8.8"
-o.rmempty = true
-o.default = default_config.dns or nil
-
-o = s:option(Value, "user",
- translate("User(-u)"),
- translate("The user that commands are run as inside the container.(format: name|uid[:group|gid])"))
-o.placeholder = "1000:1000"
-o.rmempty = true
-o.default = default_config.user or nil
-
-o = s:option(DynamicList, "env",
- translate("Environmental Variable(-e)"),
- translate("Set environment variables to inside the container"))
-o.placeholder = "TZ=Asia/Shanghai"
-o.rmempty = true
-o.default = default_config.env or nil
-
-o = s:option(DynamicList, "volume",
- translate("Bind Mount(-v)"),
- translate("Bind mount a volume"))
-o.placeholder = "/media:/media:slave"
-o.rmempty = true
-o.default = default_config.volume or nil
-
-local d_publish = s:option(DynamicList, "publish",
- translate("Exposed Ports(-p)"),
- translate("Publish container's port(s) to the host"))
-d_publish.placeholder = "2200:22/tcp"
-d_publish.rmempty = true
-d_publish.default = default_config.publish or nil
-
-o = s:option(Value, "command", translate("Run command"))
-o.placeholder = "/bin/sh init.sh"
-o.rmempty = true
-o.default = default_config.command or nil
-
-o = s:option(Flag, "advance", translate("Advance"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = default_config.advance or 0
-
-o = s:option(Value, "hostname",
- translate("Host Name"),
- translate("The hostname to use for the container"))
-o.rmempty = true
-o.default = default_config.hostname or nil
-o:depends("advance", 1)
-
-o = s:option(Flag, "publish_all",
- translate("Exposed All Ports(-P)"),
- translate("Allocates an ephemeral host port for all of a container's exposed ports"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = default_config.publish_all and 1 or 0
-o:depends("advance", 1)
-
-o = s:option(DynamicList, "device",
- translate("Device(--device)"),
- translate("Add host device to the container"))
-o.placeholder = "/dev/sda:/dev/xvdc:rwm"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.device or nil
-
-o = s:option(DynamicList, "tmpfs",
- translate("Tmpfs(--tmpfs)"),
- translate("Mount tmpfs directory"))
-o.placeholder = "/run:rw,noexec,nosuid,size=65536k"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.tmpfs or nil
-
-o = s:option(DynamicList, "sysctl",
- translate("Sysctl(--sysctl)"),
- translate("Sysctls (kernel parameters) options"))
-o.placeholder = "net.ipv4.ip_forward=1"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.sysctl or nil
-
-o = s:option(DynamicList, "cap_add",
- translate("CAP-ADD(--cap-add)"),
- translate("A list of kernel capabilities to add to the container"))
-o.placeholder = "NET_ADMIN"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.cap_add or nil
-
-o = s:option(Value, "cpus",
- translate("CPUs"),
- translate("Number of CPUs. Number is a fractional number. 0.000 means no limit"))
-o.placeholder = "1.5"
-o.rmempty = true
-o:depends("advance", 1)
-o.datatype="ufloat"
-o.default = default_config.cpus or nil
-
-o = s:option(Value, "cpu_shares",
- translate("CPU Shares Weight"),
- translate("CPU shares relative weight, if 0 is set, the system will ignore the value and use the default of 1024"))
-o.placeholder = "1024"
-o.rmempty = true
-o:depends("advance", 1)
-o.datatype="uinteger"
-o.default = default_config.cpu_shares or nil
-
-o = s:option(Value, "memory",
- translate("Memory"),
- translate("Memory limit (format: []). Number is a positive integer. Unit can be one of b, k, m, or g. Minimum is 4M"))
-o.placeholder = "128m"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.memory or nil
-
-o = s:option(Value, "blkio_weight",
- translate("Block IO Weight"),
- translate("Block IO weight (relative weight) accepts a weight value between 10 and 1000"))
-o.placeholder = "500"
-o.rmempty = true
-o:depends("advance", 1)
-o.datatype="uinteger"
-o.default = default_config.blkio_weight or nil
-
-o = s:option(DynamicList, "log_opt",
- translate("Log driver options"),
- translate("The logging configuration for this container"))
-o.placeholder = "max-size=1m"
-o.rmempty = true
-o:depends("advance", 1)
-o.default = default_config.log_opt or nil
-
-for _, v in ipairs (networks) do
- if v.Name then
- local parent = v.Options and v.Options.parent or nil
- local ip = v.IPAM and v.IPAM.Config and v.IPAM.Config[1] and v.IPAM.Config[1].Subnet or nil
- ipv6 = v.IPAM and v.IPAM.Config and v.IPAM.Config[2] and v.IPAM.Config[2].Subnet or nil
- local network_name = v.Name .. " | " .. v.Driver .. (parent and (" | " .. parent) or "") .. (ip and (" | " .. ip) or "").. (ipv6 and (" | " .. ipv6) or "")
- d_network:value(v.Name, network_name)
-
- if v.Name ~= "none" and v.Name ~= "bridge" and v.Name ~= "host" then
- d_ip:depends("network", v.Name)
- end
-
- if v.Driver == "bridge" then
- d_publish:depends("network", v.Name)
- end
- end
-end
-
-m.handle = function(self, state, data)
- if state ~= FORM_VALID then
- return
- end
-
- local tmp
- local name = data.name or ("luci_" .. os.date("%Y%m%d%H%M%S"))
- local hostname = data.hostname
- local tty = type(data.tty) == "number" and (data.tty == 1 and true or false) or default_config.tty or false
- local publish_all = type(data.publish_all) == "number" and (data.publish_all == 1 and true or false) or default_config.publish_all or false
- local interactive = type(data.interactive) == "number" and (data.interactive == 1 and true or false) or default_config.interactive or false
- local image = data.image
- local user = data.user
-
- if image and not image:match(".-:.+") then
- image = image .. ":latest"
- end
-
- local privileged = type(data.privileged) == "number" and (data.privileged == 1 and true or false) or default_config.privileged or false
- local restart = data.restart
- local env = data.env
- local dns = data.dns
- local cap_add = data.cap_add
- local sysctl = {}
-
- tmp = data.sysctl
- if type(tmp) == "table" then
- for i, v in ipairs(tmp) do
- local k,v1 = v:match("(.-)=(.+)")
- if k and v1 then
- sysctl[k]=v1
- end
- end
- end
-
- local log_opt = {}
- tmp = data.log_opt
- if type(tmp) == "table" then
- for i, v in ipairs(tmp) do
- local k,v1 = v:match("(.-)=(.+)")
- if k and v1 then
- log_opt[k]=v1
- end
- end
- end
-
- local network = data.network
- local ip = (network ~= "bridge" and network ~= "host" and network ~= "none") and data.ip or nil
- local volume = data.volume
- local memory = data.memory or nil
- local cpu_shares = data.cpu_shares or nil
- local cpus = data.cpus or nil
- local blkio_weight = data.blkio_weight or nil
-
- local portbindings = {}
- local exposedports = {}
-
- local tmpfs = {}
- tmp = data.tmpfs
- if type(tmp) == "table" then
- for i, v in ipairs(tmp)do
- local k= v:match("([^:]+)")
- local v1 = v:match(".-:([^:]+)") or ""
- if k then
- tmpfs[k]=v1
- end
- end
- end
-
- local device = {}
- tmp = data.device
- if type(tmp) == "table" then
- for i, v in ipairs(tmp) do
- local t = {}
- local _,_, h, c, p = v:find("(.-):(.-):(.+)")
- if h and c then
- t['PathOnHost'] = h
- t['PathInContainer'] = c
- t['CgroupPermissions'] = p or "rwm"
- else
- local _,_, h, c = v:find("(.-):(.+)")
- if h and c then
- t['PathOnHost'] = h
- t['PathInContainer'] = c
- t['CgroupPermissions'] = "rwm"
- else
- t['PathOnHost'] = v
- t['PathInContainer'] = v
- t['CgroupPermissions'] = "rwm"
- end
- end
-
- if next(t) ~= nil then
- table.insert( device, t )
- end
- end
- end
-
- tmp = data.publish or {}
- for i, v in ipairs(tmp) do
- for v1 ,v2 in string.gmatch(v, "(%d+):([^%s]+)") do
- local _,_,p= v2:find("^%d+/(%w+)")
- if p == nil then
- v2=v2..'/tcp'
- end
- portbindings[v2] = {{HostPort=v1}}
- exposedports[v2] = {HostPort=v1}
- end
- end
-
- local link = data.link
- tmp = data.command
- local command = {}
- if tmp ~= nil then
- for v in string.gmatch(tmp, "[^%s]+") do
- command[#command+1] = v
- end
- end
-
- if memory and memory ~= 0 then
- _,_,n,unit = memory:find("([%d%.]+)([%l%u]+)")
- if n then
- unit = unit and unit:sub(1,1):upper() or "B"
- if unit == "M" then
- memory = tonumber(n) * 1024 * 1024
- elseif unit == "G" then
- memory = tonumber(n) * 1024 * 1024 * 1024
- elseif unit == "K" then
- memory = tonumber(n) * 1024
- else
- memory = tonumber(n)
- end
- end
- end
-
- create_body.Hostname = network ~= "host" and (hostname or name) or nil
- create_body.Tty = tty and true or false
- create_body.OpenStdin = interactive and true or false
- create_body.User = user
- create_body.Cmd = command
- create_body.Env = env
- create_body.Image = image
- create_body.ExposedPorts = exposedports
- create_body.HostConfig = create_body.HostConfig or {}
- create_body.HostConfig.Dns = dns
- create_body.HostConfig.Binds = volume
- create_body.HostConfig.RestartPolicy = { Name = restart, MaximumRetryCount = 0 }
- create_body.HostConfig.Privileged = privileged and true or false
- create_body.HostConfig.PortBindings = portbindings
- create_body.HostConfig.Memory = memory and tonumber(memory)
- create_body.HostConfig.CpuShares = cpu_shares and tonumber(cpu_shares)
- create_body.HostConfig.NanoCPUs = cpus and tonumber(cpus) * 10 ^ 9
- create_body.HostConfig.BlkioWeight = blkio_weight and tonumber(blkio_weight)
- create_body.HostConfig.PublishAllPorts = publish_all
-
- if create_body.HostConfig.NetworkMode ~= network then
- create_body.NetworkingConfig = nil
- end
-
- create_body.HostConfig.NetworkMode = network
-
- if ip then
- if create_body.NetworkingConfig and create_body.NetworkingConfig.EndpointsConfig and type(create_body.NetworkingConfig.EndpointsConfig) == "table" then
- for k, v in pairs (create_body.NetworkingConfig.EndpointsConfig) do
- if k == network and v.IPAMConfig and v.IPAMConfig.IPv4Address then
- v.IPAMConfig.IPv4Address = ip
- else
- create_body.NetworkingConfig.EndpointsConfig = { [network] = { IPAMConfig = { IPv4Address = ip } } }
- end
- break
- end
- else
- create_body.NetworkingConfig = { EndpointsConfig = { [network] = { IPAMConfig = { IPv4Address = ip } } } }
- end
- elseif not create_body.NetworkingConfig then
- create_body.NetworkingConfig = nil
- end
-
- create_body["HostConfig"]["Tmpfs"] = tmpfs
- create_body["HostConfig"]["Devices"] = device
- create_body["HostConfig"]["Sysctls"] = sysctl
- create_body["HostConfig"]["CapAdd"] = cap_add
- create_body["HostConfig"]["LogConfig"] = next(log_opt) ~= nil and { Config = log_opt } or nil
-
- if network == "bridge" then
- create_body["HostConfig"]["Links"] = link
- end
-
- local pull_image = function(image)
- local json_stringify = luci.jsonc and luci.jsonc.stringify
- docker:append_status("Images: " .. "pulling" .. " " .. image .. "...\n")
- local res = dk.images:create({query = {fromImage=image}}, docker.pull_image_show_status_cb)
- if res and res.code and res.code == 200 and (res.body[#res.body] and not res.body[#res.body].error and res.body[#res.body].status and (res.body[#res.body].status == "Status: Downloaded newer image for ".. image or res.body[#res.body].status == "Status: Image is up to date for ".. image)) then
- docker:append_status("done\n")
- else
- res.code = (res.code == 200) and 500 or res.code
- docker:append_status("code:" .. res.code.." ".. (res.body[#res.body] and res.body[#res.body].error or (res.body.message or res.message)).. "\n")
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newcontainer"))
- end
- end
-
- docker:clear_status()
- local exist_image = false
-
- if image then
- for _, v in ipairs (images) do
- if v.RepoTags and v.RepoTags[1] == image then
- exist_image = true
- break
- end
- end
- if not exist_image then
- pull_image(image)
- elseif data._force_pull == 1 then
- pull_image(image)
- end
- end
-
- create_body = docker.clear_empty_tables(create_body)
-
- docker:append_status("Container: " .. "create" .. " " .. name .. "...")
- local res = dk.containers:create({name = name, body = create_body})
- if res and res.code and res.code == 201 then
- docker:clear_status()
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/containers"))
- else
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message))
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newcontainer"))
- end
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua
deleted file mode 100644
index c87678b85..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua
+++ /dev/null
@@ -1,258 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-
-local m, s, o
-
-local dk = docker.new()
-if dk:_ping().code ~= 200 then
- lost_state = true
-end
-
-m = SimpleForm("docker", translate("Docker - Network"))
-m.redirect = luci.dispatcher.build_url("admin", "docker", "networks")
-if lost_state then
- m.submit=false
- m.reset=false
-end
-
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err=docker:read_status()
-s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(SimpleSection, translate("Create new docker network"))
-s.addremove = true
-s.anonymous = true
-
-o = s:option(Value, "name",
- translate("Network Name"),
- translate("Name of the network that can be selected during container creation"))
-o.rmempty = true
-
-o = s:option(ListValue, "driver", translate("Driver"))
-o.rmempty = true
-o:value("bridge", translate("Bridge device"))
-o:value("macvlan", translate("MAC VLAN"))
-o:value("ipvlan", translate("IP VLAN"))
-o:value("overlay", translate("Overlay network"))
-
-o = s:option(Value, "parent", translate("Base device"))
-o.rmempty = true
-o:depends("driver", "macvlan")
-local interfaces = luci.sys and luci.sys.net and luci.sys.net.devices() or {}
-for _, v in ipairs(interfaces) do
- o:value(v, v)
-end
-o.default="br-lan"
-o.placeholder="br-lan"
-
-o = s:option(ListValue, "macvlan_mode", translate("Mode"))
-o.rmempty = true
-o:depends("driver", "macvlan")
-o.default="bridge"
-o:value("bridge", translate("Bridge (Support direct communication between MAC VLANs)"))
-o:value("private", translate("Private (Prevent communication between MAC VLANs)"))
-o:value("vepa", translate("VEPA (Virtual Ethernet Port Aggregator)"))
-o:value("passthru", translate("Pass-through (Mirror physical device to single MAC VLAN)"))
-
-o = s:option(ListValue, "ipvlan_mode", translate("Ipvlan Mode"))
-o.rmempty = true
-o:depends("driver", "ipvlan")
-o.default="l3"
-o:value("l2", translate("L2 bridge"))
-o:value("l3", translate("L3 bridge"))
-
-o = s:option(Flag, "ingress",
- translate("Ingress"),
- translate("Ingress network is the network which provides the routing-mesh in swarm mode"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-o:depends("driver", "overlay")
-
-o = s:option(DynamicList, "options", translate("Options"))
-o.rmempty = true
-o.placeholder="com.docker.network.driver.mtu=1500"
-
-o = s:option(Flag, "internal", translate("Internal"), translate("Restrict external access to the network"))
-o.rmempty = true
-o:depends("driver", "overlay")
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-
-if nixio.fs.access("/etc/config/network") and nixio.fs.access("/etc/config/firewall")then
- o = s:option(Flag, "op_macvlan", translate("Create macvlan interface"), translate("Auto create macvlan interface in Openwrt"))
- o:depends("driver", "macvlan")
- o.disabled = 0
- o.enabled = 1
- o.default = 1
-end
-
-o = s:option(Value, "subnet", translate("Subnet"))
-o.rmempty = true
-o.placeholder="10.1.0.0/16"
-o.datatype="ip4addr"
-
-o = s:option(Value, "gateway", translate("Gateway"))
-o.rmempty = true
-o.placeholder="10.1.1.1"
-o.datatype="ip4addr"
-
-o = s:option(Value, "ip_range", translate("IP range"))
-o.rmempty = true
-o.placeholder="10.1.1.0/24"
-o.datatype="ip4addr"
-
-o = s:option(DynamicList, "aux_address", translate("Exclude IPs"))
-o.rmempty = true
-o.placeholder="my-route=10.1.1.1"
-
-o = s:option(Flag, "ipv6", translate("Enable IPv6"))
-o.rmempty = true
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-
-o = s:option(Value, "subnet6", translate("IPv6 Subnet"))
-o.rmempty = true
-o.placeholder="fe80::/10"
-o.datatype="ip6addr"
-o:depends("ipv6", 1)
-
-o = s:option(Value, "gateway6", translate("IPv6 Gateway"))
-o.rmempty = true
-o.placeholder="fe80::1"
-o.datatype="ip6addr"
-o:depends("ipv6", 1)
-
-m.handle = function(self, state, data)
- if state == FORM_VALID then
- local name = data.name
- local driver = data.driver
-
- local internal = data.internal == 1 and true or false
-
- local subnet = data.subnet
- local gateway = data.gateway
- local ip_range = data.ip_range
-
- local aux_address = {}
- local tmp = data.aux_address or {}
- for i,v in ipairs(tmp) do
- _,_,k1,v1 = v:find("(.-)=(.+)")
- aux_address[k1] = v1
- end
-
- local options = {}
- tmp = data.options or {}
- for i,v in ipairs(tmp) do
- _,_,k1,v1 = v:find("(.-)=(.+)")
- options[k1] = v1
- end
-
- local ipv6 = data.ipv6 == 1 and true or false
-
- local create_body = {
- Name = name,
- Driver = driver,
- EnableIPv6 = ipv6,
- IPAM = {
- Driver= "default"
- },
- Internal = internal
- }
-
- if subnet or gateway or ip_range then
- create_body["IPAM"]["Config"] = {
- {
- Subnet = subnet,
- Gateway = gateway,
- IPRange = ip_range,
- AuxAddress = aux_address,
- AuxiliaryAddresses = aux_address
- }
- }
- end
-
- if driver == "macvlan" then
- create_body["Options"] = {
- macvlan_mode = data.macvlan_mode,
- parent = data.parent
- }
- elseif driver == "ipvlan" then
- create_body["Options"] = {
- ipvlan_mode = data.ipvlan_mode
- }
- elseif driver == "overlay" then
- create_body["Ingress"] = data.ingerss == 1 and true or false
- end
-
- if ipv6 and data.subnet6 and data.subnet6 then
- if type(create_body["IPAM"]["Config"]) ~= "table" then
- create_body["IPAM"]["Config"] = {}
- end
- local index = #create_body["IPAM"]["Config"]
- create_body["IPAM"]["Config"][index+1] = {
- Subnet = data.subnet6,
- Gateway = data.gateway6
- }
- end
-
- if next(options) ~= nil then
- create_body["Options"] = create_body["Options"] or {}
- for k, v in pairs(options) do
- create_body["Options"][k] = v
- end
- end
-
- create_body = docker.clear_empty_tables(create_body)
- docker:write_status("Network: " .. "create" .. " " .. create_body.Name .. "...")
-
- local res = dk.networks:create({
- body = create_body
- })
-
- if res and res.code == 201 then
- docker:write_status("Network: " .. "create macvlan interface...")
- res = dk.networks:inspect({
- name = create_body.Name
- })
-
- if driver == "macvlan" and
- data.op_macvlan ~= 0 and
- res and
- res.code and
- res.code == 200 and
- res.body and
- res.body.IPAM and
- res.body.IPAM.Config and
- res.body.IPAM.Config[1] and
- res.body.IPAM.Config[1].Gateway and
- res.body.IPAM.Config[1].Subnet then
-
- docker.create_macvlan_interface(data.name,
- data.parent,
- res.body.IPAM.Config[1].Gateway,
- res.body.IPAM.Config[1].Subnet)
- end
-
- docker:clear_status()
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/networks"))
- else
- docker:append_status("code:" .. res.code.." ".. (res.body.message and res.body.message or res.message).. "\n")
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/newnetwork"))
- end
- end
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua
deleted file mode 100644
index c91f349ce..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua
+++ /dev/null
@@ -1,151 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-local uci = (require "luci.model.uci").cursor()
-
-local m, s, o, lost_state
-local dk = docker.new()
-
-if dk:_ping().code ~= 200 then
- lost_state = true
-end
-
-m = SimpleForm("dockerd",
- translate("Docker - Overview"),
- translate("An overview with the relevant data is displayed here with which the LuCI docker client is connected.")
-..
- " " ..
- [[]] ..
- translate("Github") ..
- [[]])
-m.submit=false
-m.reset=false
-
-local docker_info_table = {}
--- docker_info_table['0OperatingSystem'] = {_key=translate("Operating System"),_value='-'}
--- docker_info_table['1Architecture'] = {_key=translate("Architecture"),_value='-'}
--- docker_info_table['2KernelVersion'] = {_key=translate("Kernel Version"),_value='-'}
-docker_info_table['3ServerVersion'] = {_key=translate("Docker Version"),_value='-'}
-docker_info_table['4ApiVersion'] = {_key=translate("Api Version"),_value='-'}
-docker_info_table['5NCPU'] = {_key=translate("CPUs"),_value='-'}
-docker_info_table['6MemTotal'] = {_key=translate("Total Memory"),_value='-'}
-docker_info_table['7DockerRootDir'] = {_key=translate("Docker Root Dir"),_value='-'}
-docker_info_table['8IndexServerAddress'] = {_key=translate("Index Server Address"),_value='-'}
-docker_info_table['9RegistryMirrors'] = {_key=translate("Registry Mirrors"),_value='-'}
-
-if nixio.fs.access("/usr/bin/dockerd") and not uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- s = m:section(SimpleSection)
- s.template = "dockerman/apply_widget"
- s.err=docker:read_status()
- s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
- if s.err then
- docker:clear_status()
- end
- s = m:section(Table,{{}})
- s.notitle=true
- s.rowcolors=false
- s.template = "cbi/nullsection"
-
- o = s:option(Button, "_start")
- o.template = "dockerman/cbi/inlinebutton"
- o.inputtitle = lost_state and translate("Start") or translate("Stop")
- o.inputstyle = lost_state and "add" or "remove"
- o.forcewrite = true
- o.write = function(self, section)
- docker:clear_status()
-
- if lost_state then
- docker:append_status("Docker daemon: starting")
- luci.util.exec("/etc/init.d/dockerd start")
- luci.util.exec("sleep 5")
- luci.util.exec("/etc/init.d/dockerman start")
-
- else
- docker:append_status("Docker daemon: stopping")
- luci.util.exec("/etc/init.d/dockerd stop")
- end
- docker:clear_status()
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/overview"))
- end
-
- o = s:option(Button, "_restart")
- o.template = "dockerman/cbi/inlinebutton"
- o.inputtitle = translate("Restart")
- o.inputstyle = "reload"
- o.forcewrite = true
- o.write = function(self, section)
- docker:clear_status()
- docker:append_status("Docker daemon: restarting")
- luci.util.exec("/etc/init.d/dockerd restart")
- luci.util.exec("sleep 5")
- luci.util.exec("/etc/init.d/dockerman start")
- docker:clear_status()
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/overview"))
- end
-end
-
-s = m:section(Table, docker_info_table)
-s:option(DummyValue, "_key", translate("Info"))
-s:option(DummyValue, "_value")
-
-s = m:section(SimpleSection)
-s.template = "dockerman/overview"
-
-s.containers_running = '-'
-s.images_used = '-'
-s.containers_total = '-'
-s.images_total = '-'
-s.networks_total = '-'
-s.volumes_total = '-'
-
--- local socket = luci.model.uci.cursor():get("dockerd", "dockerman", "socket_path")
-if not lost_state then
- local containers_list = dk.containers:list({query = {all=true}}).body
- local images_list = dk.images:list().body
- local vol = dk.volumes:list()
- local volumes_list = vol and vol.body and vol.body.Volumes or {}
- local networks_list = dk.networks:list().body or {}
- local docker_info = dk:info()
-
- -- docker_info_table['0OperatingSystem']._value = docker_info.body.OperatingSystem
- -- docker_info_table['1Architecture']._value = docker_info.body.Architecture
- -- docker_info_table['2KernelVersion']._value = docker_info.body.KernelVersion
- docker_info_table['3ServerVersion']._value = docker_info.body.ServerVersion
- docker_info_table['4ApiVersion']._value = docker_info.headers["Api-Version"]
- docker_info_table['5NCPU']._value = tostring(docker_info.body.NCPU)
- docker_info_table['6MemTotal']._value = docker.byte_format(docker_info.body.MemTotal)
- if docker_info.body.DockerRootDir then
- local statvfs = nixio.fs.statvfs(docker_info.body.DockerRootDir)
- local size = statvfs and (statvfs.bavail * statvfs.bsize) or 0
- docker_info_table['7DockerRootDir']._value = docker_info.body.DockerRootDir .. " (" .. tostring(docker.byte_format(size)) .. " " .. translate("Available") .. ")"
- end
-
- docker_info_table['8IndexServerAddress']._value = docker_info.body.IndexServerAddress
- for i, v in ipairs(docker_info.body.RegistryConfig.Mirrors) do
- docker_info_table['9RegistryMirrors']._value = docker_info_table['9RegistryMirrors']._value == "-" and v or (docker_info_table['9RegistryMirrors']._value .. ", " .. v)
- end
-
- s.images_used = 0
- for i, v in ipairs(images_list) do
- for ci,cv in ipairs(containers_list) do
- if v.Id == cv.ImageID then
- s.images_used = s.images_used + 1
- break
- end
- end
- end
-
- s.containers_running = tostring(docker_info.body.ContainersRunning)
- s.images_used = tostring(s.images_used)
- s.containers_total = tostring(docker_info.body.Containers)
- s.images_total = tostring(#images_list)
- s.networks_total = tostring(#networks_list)
- s.volumes_total = tostring(#volumes_list)
-else
- docker_info_table['3ServerVersion']._value = translate("Can NOT connect to docker daemon, please check!!")
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua b/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua
deleted file mode 100644
index 6c7064c41..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua
+++ /dev/null
@@ -1,143 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.model.docker"
-local dk = docker.new()
-
-local m, s, o
-
-local res, containers, volumes, lost_state
-
-function get_volumes()
- local data = {}
- for i, v in ipairs(volumes) do
- local index = v.Name
- data[index]={}
- data[index]["_selected"] = 0
- data[index]["_nameraw"] = v.Name
- data[index]["_name"] = v.Name:sub(1,12)
-
- for ci,cv in ipairs(containers) do
- if cv.Mounts and type(cv.Mounts) ~= "table" then
- break
- end
- for vi, vv in ipairs(cv.Mounts) do
- if v.Name == vv.Name then
- data[index]["_containers"] = (data[index]["_containers"] and (data[index]["_containers"] .. " | ") or "")..
- ''.. cv.Names[1]:sub(2)..''
- end
- end
- end
- data[index]["_driver"] = v.Driver
- data[index]["_mountpoint"] = nil
-
- for v1 in v.Mountpoint:gmatch('[^/]+') do
- if v1 == index then
- data[index]["_mountpoint"] = data[index]["_mountpoint"] .."/" .. v1:sub(1,12) .. "..."
- else
- data[index]["_mountpoint"] = (data[index]["_mountpoint"] and data[index]["_mountpoint"] or "").."/".. v1
- end
- end
- data[index]["_created"] = v.CreatedAt
- data[index]["_size"] = "-"
- end
-
- return data
-end
-
-if dk:_ping().code ~= 200 then
- lost_state = true
-else
- res = dk.volumes:list()
- if res and res.code and res.code <300 then
- volumes = res.body.Volumes
- end
-
- res = dk.containers:list({
- query = {
- all=true
- }
- })
- if res and res.code and res.code <300 then
- containers = res.body
- end
-end
-
-local volume_list = not lost_state and get_volumes() or {}
-
-m = SimpleForm("docker", translate("Docker - Volumes"))
-m.submit=false
-m.reset=false
-m:append(Template("dockerman/volume_size"))
-
-s = m:section(Table, volume_list, translate("Volumes overview"))
-
-o = s:option(Flag, "_selected","")
-o.disabled = 0
-o.enabled = 1
-o.default = 0
-o.write = function(self, section, value)
- volume_list[section]._selected = value
-end
-
-o = s:option(DummyValue, "_name", translate("Name"))
-o = s:option(DummyValue, "_driver", translate("Driver"))
-o = s:option(DummyValue, "_containers", translate("Containers"))
-o.rawhtml = true
-o = s:option(DummyValue, "_mountpoint", translate("Mount Point"))
-o = s:option(DummyValue, "_size", translate("Size"))
-o.rawhtml = true
-o = s:option(DummyValue, "_created", translate("Created"))
-
-s = m:section(SimpleSection)
-s.template = "dockerman/apply_widget"
-s.err=docker:read_status()
-s.err=s.err and s.err:gsub("\n","
"):gsub(" "," ")
-if s.err then
- docker:clear_status()
-end
-
-s = m:section(Table,{{}})
-s.notitle=true
-s.rowcolors=false
-s.template="cbi/nullsection"
-
-o = s:option(Button, "remove")
-o.inputtitle= translate("Remove")
-o.template = "dockerman/cbi/inlinebutton"
-o.inputstyle = "remove"
-o.forcewrite = true
-o.disable = lost_state
-o.write = function(self, section)
- local volume_selected = {}
-
- for k in pairs(volume_list) do
- if volume_list[k]._selected == 1 then
- volume_selected[#volume_selected+1] = k
- end
- end
-
- if next(volume_selected) ~= nil then
- local success = true
- docker:clear_status()
- for _,vol in ipairs(volume_selected) do
- docker:append_status("Volumes: " .. "remove" .. " " .. vol .. "...")
- local msg = dk.volumes["remove"](dk, {id = vol})
- if msg and msg.code and msg.code ~= 204 then
- docker:append_status("code:" .. msg.code.." ".. (msg.body.message and msg.body.message or msg.message).. "\n")
- success = false
- else
- docker:append_status("done\n")
- end
- end
-
- if success then
- docker:clear_status()
- end
- luci.http.redirect(luci.dispatcher.build_url("admin/docker/volumes"))
- end
-end
-
-return m
diff --git a/package/lean/luci-app-dockerman/luasrc/model/docker.lua b/package/lean/luci-app-dockerman/luasrc/model/docker.lua
deleted file mode 100644
index bf8fc6254..000000000
--- a/package/lean/luci-app-dockerman/luasrc/model/docker.lua
+++ /dev/null
@@ -1,504 +0,0 @@
---[[
-LuCI - Lua Configuration Interface
-Copyright 2019 lisaac
-]]--
-
-local docker = require "luci.docker"
-local fs = require "nixio.fs"
-local uci = (require "luci.model.uci").cursor()
-
-local _docker = {}
-_docker.options = {}
-
---pull image and return iamge id
-local update_image = function(self, image_name)
- local json_stringify = luci.jsonc and luci.jsonc.stringify
- _docker:append_status("Images: " .. "pulling" .. " " .. image_name .. "...\n")
- local res = self.images:create({query = {fromImage=image_name}}, _docker.pull_image_show_status_cb)
-
- if res and res.code and res.code == 200 and (#res.body > 0 and not res.body[#res.body].error and res.body[#res.body].status and (res.body[#res.body].status == "Status: Downloaded newer image for ".. image_name)) then
- _docker:append_status("done\n")
- else
- res.body.message = res.body[#res.body] and res.body[#res.body].error or (res.body.message or res.message)
- end
-
- new_image_id = self.images:inspect({name = image_name}).body.Id
- return new_image_id, res
-end
-
-local table_equal = function(t1, t2)
- if not t1 then
- return true
- end
-
- if not t2 then
- return false
- end
-
- if #t1 ~= #t2 then
- return false
- end
-
- for i, v in ipairs(t1) do
- if t1[i] ~= t2[i] then
- return false
- end
- end
-
- return true
-end
-
-local table_subtract = function(t1, t2)
- if not t1 or next(t1) == nil then
- return nil
- end
-
- if not t2 or next(t2) == nil then
- return t1
- end
-
- local res = {}
- for _, v1 in ipairs(t1) do
- local found = false
- for _, v2 in ipairs(t2) do
- if v1 == v2 then
- found= true
- break
- end
- end
- if not found then
- table.insert(res, v1)
- end
- end
-
- return next(res) == nil and nil or res
-end
-
-local map_subtract = function(t1, t2)
- if not t1 or next(t1) == nil then
- return nil
- end
-
- if not t2 or next(t2) == nil then
- return t1
- end
-
- local res = {}
- for k1, v1 in pairs(t1) do
- local found = false
- for k2, v2 in ipairs(t2) do
- if k1 == k2 and luci.util.serialize_data(v1) == luci.util.serialize_data(v2) then
- found= true
- break
- end
- end
-
- if not found then
- res[k1] = v1
- end
- end
-
- return next(res) ~= nil and res or nil
-end
-
-_docker.clear_empty_tables = function ( t )
- local k, v
-
- if next(t) == nil then
- t = nil
- else
- for k, v in pairs(t) do
- if type(v) == 'table' then
- t[k] = _docker.clear_empty_tables(v)
- end
- end
- end
-
- return t
-end
-
-local get_config = function(container_config, image_config)
- local config = container_config.Config
- local old_host_config = container_config.HostConfig
- local old_network_setting = container_config.NetworkSettings.Networks or {}
-
- if config.WorkingDir == image_config.WorkingDir then
- config.WorkingDir = ""
- end
-
- if config.User == image_config.User then
- config.User = ""
- end
-
- if table_equal(config.Cmd, image_config.Cmd) then
- config.Cmd = nil
- end
-
- if table_equal(config.Entrypoint, image_config.Entrypoint) then
- config.Entrypoint = nil
- end
-
- if table_equal(config.ExposedPorts, image_config.ExposedPorts) then
- config.ExposedPorts = nil
- end
-
- config.Env = table_subtract(config.Env, image_config.Env)
- config.Labels = table_subtract(config.Labels, image_config.Labels)
- config.Volumes = map_subtract(config.Volumes, image_config.Volumes)
-
- if old_host_config.PortBindings and next(old_host_config.PortBindings) ~= nil then
- config.ExposedPorts = {}
- for p, v in pairs(old_host_config.PortBindings) do
- config.ExposedPorts[p] = { HostPort=v[1] and v[1].HostPort }
- end
- end
-
- local network_setting = {}
- local multi_network = false
- local extra_network = {}
-
- for k, v in pairs(old_network_setting) do
- if multi_network then
- extra_network[k] = v
- else
- network_setting[k] = v
- end
- multi_network = true
- end
-
- local host_config = old_host_config
- host_config.Mounts = {}
- for i, v in ipairs(container_config.Mounts) do
- if v.Type == "volume" then
- table.insert(host_config.Mounts, {
- Type = v.Type,
- Target = v.Destination,
- Source = v.Source:match("([^/]+)\/_data"),
- BindOptions = (v.Type == "bind") and {Propagation = v.Propagation} or nil,
- ReadOnly = not v.RW
- })
- end
- end
-
- local create_body = config
- create_body["HostConfig"] = host_config
- create_body["NetworkingConfig"] = {EndpointsConfig = network_setting}
- create_body = _docker.clear_empty_tables(create_body) or {}
- extra_network = _docker.clear_empty_tables(extra_network) or {}
-
- return create_body, extra_network
-end
-
-local upgrade = function(self, request)
- _docker:clear_status()
-
- local container_info = self.containers:inspect({id = request.id})
-
- if container_info.code > 300 and type(container_info.body) == "table" then
- return container_info
- end
-
- local image_name = container_info.body.Config.Image
- if not image_name:match(".-:.+") then
- image_name = image_name .. ":latest"
- end
-
- local old_image_id = container_info.body.Image
- local container_name = container_info.body.Name:sub(2)
-
- local image_id, res = update_image(self, image_name)
- if res and res.code and res.code ~= 200 then
- return res
- end
-
- if image_id == old_image_id then
- return {code = 305, body = {message = "Already up to date"}}
- end
-
- local t = os.date("%Y%m%d%H%M%S")
- _docker:append_status("Container: rename" .. " " .. container_name .. " to ".. container_name .. "_old_".. t .. "...")
- res = self.containers:rename({name = container_name, query = { name = container_name .. "_old_" ..t }})
- if res and res.code and res.code < 300 then
- _docker:append_status("done\n")
- else
- return res
- end
-
- local image_config = self.images:inspect({id = old_image_id}).body.Config
- local create_body, extra_network = get_config(container_info.body, image_config)
-
- -- create new container
- _docker:append_status("Container: Create" .. " " .. container_name .. "...")
- create_body = _docker.clear_empty_tables(create_body)
- res = self.containers:create({name = container_name, body = create_body})
- if res and res.code and res.code > 300 then
- return res
- end
- _docker:append_status("done\n")
-
- -- extra networks need to network connect action
- for k, v in pairs(extra_network) do
- _docker:append_status("Networks: Connect" .. " " .. container_name .. "...")
- res = self.networks:connect({id = k, body = {Container = container_name, EndpointConfig = v}})
- if res and res.code and res.code > 300 then
- return res
- end
- _docker:append_status("done\n")
- end
-
- _docker:append_status("Container: " .. "Stop" .. " " .. container_name .. "_old_".. t .. "...")
- res = self.containers:stop({name = container_name .. "_old_" ..t })
- if res and res.code and res.code < 305 then
- _docker:append_status("done\n")
- else
- return res
- end
-
- _docker:append_status("Container: " .. "Start" .. " " .. container_name .. "...")
- res = self.containers:start({name = container_name})
- if res and res.code and res.code < 305 then
- _docker:append_status("done\n")
- else
- return res
- end
-
- _docker:clear_status()
- return res
-end
-
-local duplicate_config = function (self, request)
- local container_info = self.containers:inspect({id = request.id})
- if container_info.code > 300 and type(container_info.body) == "table" then
- return nil
- end
-
- local old_image_id = container_info.body.Image
- local image_config = self.images:inspect({id = old_image_id}).body.Config
-
- return get_config(container_info.body, image_config)
-end
-
-_docker.new = function()
- local host = nil
- local port = nil
- local socket_path = nil
- local debug_path = nil
-
- if uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- host = uci:get("dockerd", "dockerman", "remote_host") or nil
- port = uci:get("dockerd", "dockerman", "remote_port") or nil
- else
- socket_path = uci:get("dockerd", "dockerman", "socket_path") or "/var/run/docker.sock"
- end
-
- local debug = uci:get_bool("dockerd", "dockerman", "debug")
- if debug then
- debug_path = uci:get("dockerd", "dockerman", "debug_path") or "/tmp/.docker_debug"
- end
-
- local status_path = uci:get("dockerd", "dockerman", "status_path") or "/tmp/.docker_action_status"
-
- _docker.options = {
- host = host,
- port = port,
- socket_path = socket_path,
- debug = debug,
- debug_path = debug_path,
- status_path = status_path
- }
-
- local _new = docker.new(_docker.options)
- _new.containers_upgrade = upgrade
- _new.containers_duplicate_config = duplicate_config
-
- return _new
-end
-
-_docker.options.status_path = uci:get("dockerd", "dockerman", "status_path") or "/tmp/.docker_action_status"
-
-_docker.append_status=function(self,val)
- if not val then
- return
- end
- local file_docker_action_status=io.open(self.options.status_path, "a+")
- file_docker_action_status:write(val)
- file_docker_action_status:close()
-end
-
-_docker.write_status=function(self,val)
- if not val then
- return
- end
- local file_docker_action_status=io.open(self.options.status_path, "w+")
- file_docker_action_status:write(val)
- file_docker_action_status:close()
-end
-
-_docker.read_status=function(self)
- return fs.readfile(self.options.status_path)
-end
-
-_docker.clear_status=function(self)
- fs.remove(self.options.status_path)
-end
-
-local status_cb = function(res, source, handler)
- res.body = res.body or {}
- while true do
- local chunk = source()
- if chunk then
- --standard output to res.body
- table.insert(res.body, chunk)
- handler(chunk)
- else
- return
- end
- end
-end
-
---{"status":"Pulling from library\/debian","id":"latest"}
---{"status":"Pulling fs layer","progressDetail":[],"id":"50e431f79093"}
---{"status":"Downloading","progressDetail":{"total":50381971,"current":2029978},"id":"50e431f79093","progress":"[==> ] 2.03MB\/50.38MB"}
---{"status":"Download complete","progressDetail":[],"id":"50e431f79093"}
---{"status":"Extracting","progressDetail":{"total":50381971,"current":17301504},"id":"50e431f79093","progress":"[=================> ] 17.3MB\/50.38MB"}
---{"status":"Pull complete","progressDetail":[],"id":"50e431f79093"}
---{"status":"Digest: sha256:a63d0b2ecbd723da612abf0a8bdb594ee78f18f691d7dc652ac305a490c9b71a"}
---{"status":"Status: Downloaded newer image for debian:latest"}
-_docker.pull_image_show_status_cb = function(res, source)
- return status_cb(res, source, function(chunk)
- local json_parse = luci.jsonc.parse
- local step = json_parse(chunk)
- if type(step) == "table" then
- local buf = _docker:read_status()
- local num = 0
- local str = '\t' .. (step.id and (step.id .. ": ") or "") .. (step.status and step.status or "") .. (step.progress and (" " .. step.progress) or "").."\n"
- if step.id then
- buf, num = buf:gsub("\t"..step.id .. ": .-\n", str)
- end
- if num == 0 then
- buf = buf .. str
- end
- _docker:write_status(buf)
- end
- end)
-end
-
---{"status":"Downloading from https://downloads.openwrt.org/releases/19.07.0/targets/x86/64/openwrt-19.07.0-x86-64-generic-rootfs.tar.gz"}
---{"status":"Importing","progressDetail":{"current":1572391,"total":3821714},"progress":"[====================\u003e ] 1.572MB/3.822MB"}
---{"status":"sha256:d5304b58e2d8cc0a2fd640c05cec1bd4d1229a604ac0dd2909f13b2b47a29285"}
-_docker.import_image_show_status_cb = function(res, source)
- return status_cb(res, source, function(chunk)
- local json_parse = luci.jsonc.parse
- local step = json_parse(chunk)
- if type(step) == "table" then
- local buf = _docker:read_status()
- local num = 0
- local str = '\t' .. (step.status and step.status or "") .. (step.progress and (" " .. step.progress) or "").."\n"
- if step.status then
- buf, num = buf:gsub("\t"..step.status .. " .-\n", str)
- end
- if num == 0 then
- buf = buf .. str
- end
- _docker:write_status(buf)
- end
- end)
-end
-
-_docker.create_macvlan_interface = function(name, device, gateway, subnet)
- if not fs.access("/etc/config/network") or not fs.access("/etc/config/firewall") then
- return
- end
-
- if uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- return
- end
-
- local ip = require "luci.ip"
- local if_name = "docker_"..name
- local dev_name = "macvlan_"..name
- local net_mask = tostring(ip.new(subnet):mask())
- local lan_interfaces
-
- -- add macvlan device
- uci:delete("network", dev_name)
- uci:set("network", dev_name, "device")
- uci:set("network", dev_name, "name", dev_name)
- uci:set("network", dev_name, "ifname", device)
- uci:set("network", dev_name, "type", "macvlan")
- uci:set("network", dev_name, "mode", "bridge")
-
- -- add macvlan interface
- uci:delete("network", if_name)
- uci:set("network", if_name, "interface")
- uci:set("network", if_name, "proto", "static")
- uci:set("network", if_name, "ifname", dev_name)
- uci:set("network", if_name, "ipaddr", gateway)
- uci:set("network", if_name, "netmask", net_mask)
- uci:foreach("firewall", "zone", function(s)
- if s.name == "lan" then
- local interfaces
- if type(s.network) == "table" then
- interfaces = table.concat(s.network, " ")
- uci:delete("firewall", s[".name"], "network")
- else
- interfaces = s.network and s.network or ""
- end
- interfaces = interfaces .. " " .. if_name
- interfaces = interfaces:gsub("%s+", " ")
- uci:set("firewall", s[".name"], "network", interfaces)
- end
- end)
-
- uci:commit("firewall")
- uci:commit("network")
-
- os.execute("ifup " .. if_name)
-end
-
-_docker.remove_macvlan_interface = function(name)
- if not fs.access("/etc/config/network") or not fs.access("/etc/config/firewall") then
- return
- end
-
- if uci:get_bool("dockerd", "dockerman", "remote_endpoint") then
- return
- end
-
- local if_name = "docker_"..name
- local dev_name = "macvlan_"..name
- uci:foreach("firewall", "zone", function(s)
- if s.name == "lan" then
- local interfaces
- if type(s.network) == "table" then
- interfaces = table.concat(s.network, " ")
- else
- interfaces = s.network and s.network or ""
- end
- interfaces = interfaces and interfaces:gsub(if_name, "")
- interfaces = interfaces and interfaces:gsub("%s+", " ")
- uci:set("firewall", s[".name"], "network", interfaces)
- end
- end)
-
- uci:delete("network", dev_name)
- uci:delete("network", if_name)
- uci:commit("network")
- uci:commit("firewall")
-
- os.execute("ip link del " .. if_name)
-end
-
-_docker.byte_format = function (byte)
- if not byte then return 'NaN' end
- local suff = {"B", "KB", "MB", "GB", "TB"}
- for i=1, 5 do
- if byte > 1024 and i < 5 then
- byte = byte / 1024
- else
- return string.format("%.2f %s", byte, suff[i])
- end
- end
-end
-
-return _docker
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/apply_widget.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/apply_widget.htm
deleted file mode 100644
index f96b2d72a..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/apply_widget.htm
+++ /dev/null
@@ -1,147 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinebutton.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinebutton.htm
deleted file mode 100644
index a061a6dba..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinebutton.htm
+++ /dev/null
@@ -1,7 +0,0 @@
-
- <% if self:cfgvalue(section) ~= false then %>
- " type="submit"" <% if self.disable then %>disabled <% end %><%= attr("name", cbid) .. attr("id", cbid) .. attr("value", self.inputtitle or self.title)%> />
- <% else %>
- -
- <% end %>
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinevalue.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinevalue.htm
deleted file mode 100644
index e4b0cf7a0..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinevalue.htm
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
- <%- if self.password then -%>
- />
- <%- end -%>
- 0, "data-choices", { self.keylist, self.vallist })
- %> />
- <%- if self.password then -%>
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/namedsection.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/namedsection.htm
deleted file mode 100644
index 244d2c10a..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/namedsection.htm
+++ /dev/null
@@ -1,9 +0,0 @@
-<% if self:cfgvalue(self.section) then section = self.section %>
-
- <% end %>
-
- <%+cbi/tabmenu%>
-
- <%+cbi/ucisection%>
-
-
-<% end %>
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/xfvalue.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/xfvalue.htm
deleted file mode 100644
index 04f7bc2ee..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/cbi/xfvalue.htm
+++ /dev/null
@@ -1,10 +0,0 @@
-<%+cbi/valueheader%>
- />
- disabled <% end %><%=
- attr("id", cbid) .. attr("name", cbid) .. attr("value", self.enabled or 1) ..
- ifattr((self:cfgvalue(section) or self.default) == self.enabled, "checked", "checked")
- %> />
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/container.htm
deleted file mode 100644
index 9f05d9d58..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container.htm
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_console.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_console.htm
deleted file mode 100644
index 7f626b3dc..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_console.htm
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm
deleted file mode 100644
index af2f6f43f..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm
+++ /dev/null
@@ -1,331 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_stats.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_stats.htm
deleted file mode 100644
index bbcd633e7..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/container_stats.htm
+++ /dev/null
@@ -1,81 +0,0 @@
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/containers_running_stats.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/containers_running_stats.htm
deleted file mode 100644
index d88e28be9..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/containers_running_stats.htm
+++ /dev/null
@@ -1,91 +0,0 @@
-
\ No newline at end of file
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_import.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_import.htm
deleted file mode 100644
index 0ad6e0fce..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_import.htm
+++ /dev/null
@@ -1,104 +0,0 @@
-
-
-
- disabled <% end %>/>
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_load.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_load.htm
deleted file mode 100644
index b201510ac..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/images_load.htm
+++ /dev/null
@@ -1,40 +0,0 @@
-
- disabled <% end %>/>
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/logs.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/logs.htm
deleted file mode 100644
index 6cd2cb095..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/logs.htm
+++ /dev/null
@@ -1,13 +0,0 @@
-<% if self.title == "Events" then %>
-<%+header%>
-<%:Docker - Events%>
-
-<%:Events%>
-<% end %>
-
-
-
-<% if self.title == "Events" then %>
-
-<%+footer%>
-<% end %>
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm
deleted file mode 100644
index 338fd59d5..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm
+++ /dev/null
@@ -1,102 +0,0 @@
-
-
-
-<%+cbi/valueheader%>
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/overview.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/overview.htm
deleted file mode 100644
index e491fc512..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/overview.htm
+++ /dev/null
@@ -1,197 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
- <%:Containers%>
-
- <%- if self.containers_total ~= "-" then -%><%- end -%>
- <%=self.containers_running%>
- /<%=self.containers_total%>
- <%- if self.containers_total ~= "-" then -%><%- end -%>
-
-
-
-
-
-
-
-
-
-
-
-
- <%:Images%>
-
- <%- if self.images_total ~= "-" then -%><%- end -%>
- <%=self.images_used%>
- /<%=self.images_total%>
- <%- if self.images_total ~= "-" then -%><%- end -%>
-
-
-
-
-
-
-
-
-
-
-
-
- <%:Networks%>
-
- <%- if self.networks_total ~= "-" then -%><%- end -%>
- <%=self.networks_total%>
-
- <%- if self.networks_total ~= "-" then -%><%- end -%>
-
-
-
-
-
-
-
-
-
-
-
-
- <%:Volumes%>
-
- <%- if self.volumes_total ~= "-" then -%><%- end -%>
- <%=self.volumes_total%>
-
- <%- if self.volumes_total ~= "-" then -%><%- end -%>
-
-
-
-
-
diff --git a/package/lean/luci-app-dockerman/luasrc/view/dockerman/volume_size.htm b/package/lean/luci-app-dockerman/luasrc/view/dockerman/volume_size.htm
deleted file mode 100644
index dc024734b..000000000
--- a/package/lean/luci-app-dockerman/luasrc/view/dockerman/volume_size.htm
+++ /dev/null
@@ -1,21 +0,0 @@
-
\ No newline at end of file
diff --git a/package/lean/luci-app-dockerman/po/zh-cn/dockerman.po b/package/lean/luci-app-dockerman/po/zh-cn/dockerman.po
deleted file mode 100644
index 2bdc11b8d..000000000
--- a/package/lean/luci-app-dockerman/po/zh-cn/dockerman.po
+++ /dev/null
@@ -1,1094 +0,0 @@
-msgid ""
-msgstr ""
-"PO-Revision-Date: 2021-03-19 04:16+0000\n"
-"Last-Translator: Eric \n"
-"Language-Team: Chinese (Simplified) \n"
-"Language: zh_Hans\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=1; plural=0;\n"
-"X-Generator: Weblate 4.5.2-dev\n"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:619
-msgid "A list of kernel capabilities to add to the container"
-msgstr "要添加到容器的内核功能列表"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:69
-msgid "Access Control"
-msgstr "访问控制"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:223
-msgid "Add"
-msgstr "新增"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:595
-msgid "Add host device to the container"
-msgstr "将主机设备添加到容器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:571
-msgid "Advance"
-msgstr "高级选项"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:586
-msgid "Allocates an ephemeral host port for all of a container's exposed ports"
-msgstr "为容器的所有暴露端口分配临时主机端口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:118
-msgid "Allowed access interfaces"
-msgstr "允许的访问接口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:498
-msgid "Always pull image first"
-msgstr "总是先拉取镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:29
-msgid ""
-"An overview with the relevant data is displayed here with which the LuCI "
-"docker client is connected."
-msgstr "在此展示与LuCI docker客户端相连接的相关数据的概览。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:43
-msgid "Api Version"
-msgstr "Api 版本"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:94
-msgid "Auto create macvlan interface in Openwrt"
-msgstr "在 Openwrt 中自动创建 macvlan 界面"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:18
-msgid "Auto start"
-msgstr "自动启动"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:134
-msgid "Available"
-msgstr "可用"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:47
-msgid "Base device"
-msgstr "基设备"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:553
-msgid "Bind Mount(-v)"
-msgstr "绑定挂载(-v)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:554
-msgid "Bind mount a volume"
-msgstr "绑定挂载卷"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:596
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:652
-msgid "Block IO Weight"
-msgstr "块 IO 权重"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:653
-msgid ""
-"Block IO weight (relative weight) accepts a weight value between 10 and 1000"
-msgstr "块 IO 权重(相对权重)接受10到1000之间的数值"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:597
-msgid ""
-"Block IO weight (relative weight) accepts a weight value between 10 and 1000."
-msgstr "块 IO 权重(相对权重)接受10到1000之间的数值。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:61
-msgid "Bridge (Support direct communication between MAC VLANs)"
-msgstr "桥接(支持 MAC VLAN 之间的直接通信)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:42
-msgid "Bridge device"
-msgstr "Bridge device"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:84
-msgid ""
-"By entering a valid image name with the corresponding version, the docker "
-"image can be downloaded from the configured registry."
-msgstr ""
-"通过输入具有相应版本的有效映像名称,可以从镜像存储中心(Registry)中下载"
-"docker映像。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:618
-msgid "CAP-ADD(--cap-add)"
-msgstr "权限控制(--cap-add)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:581
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:635
-msgid "CPU Shares Weight"
-msgstr "CPU 共享权重"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:779
-msgid "CPU Useage"
-msgstr "CPU 使用率"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:636
-msgid ""
-"CPU shares relative weight, if 0 is set, the system will ignore the value "
-"and use the default of 1024"
-msgstr "CPU 共享相对权重,如果设置为 0,则系统将忽略该值并使用默认值 1024"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:582
-msgid ""
-"CPU shares relative weight, if 0 is set, the system will ignore the value "
-"and use the default of 1024."
-msgstr "CPU 共享相对权重,如果设置为 0,则系统将忽略该值并使用默认值 1024。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:573
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:626
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:44
-msgid "CPUs"
-msgstr "线程数量"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:159
-msgid "Can NOT connect to docker daemon, please check!!"
-msgstr "无法连接到docker守护进程(docker daemon),请检查!!"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm:91
-msgid "Cancel"
-msgstr "取消"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:60
-msgid "Client connection"
-msgstr "客户端连接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:347
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:687
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:182
-msgid "Command"
-msgstr "命令"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm:100
-msgid "Command line"
-msgstr "命令行"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm:72
-msgid "Command line Error"
-msgstr "命令行错误"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:17
-msgid "Configuration"
-msgstr "配置"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:36
-msgid "Configure the default bridge network"
-msgstr "配置默认桥接网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:405
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:707
-msgid "Connect"
-msgstr "连接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:403
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:437
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:473
-msgid "Connect Network"
-msgstr "连接网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:74
-msgid "Connect to remote docker endpoint"
-msgstr "连接到远程docker"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:7
-msgid "Console"
-msgstr "控制台"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:161
-msgid "Container Info"
-msgstr "容器信息"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:650
-msgid "Container Inspect"
-msgstr "检查容器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:671
-msgid "Container Logs"
-msgstr "容器日志"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:473
-msgid "Container Name"
-msgstr "容器名称"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:92
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:58
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:29
-msgid "Container detail"
-msgstr "容器详情"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:38
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:142
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:148
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:87
-#: applications/luci-app-dockerman/luasrc/view/dockerman/overview.htm:133
-msgid "Containers"
-msgstr "容器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:94
-msgid "Create macvlan interface"
-msgstr "创建 macvlan 接口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:465
-msgid "Create new docker container"
-msgstr "创建 docker 容器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:31
-msgid "Create new docker network"
-msgstr "创建 docker 网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:312
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:153
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:92
-msgid "Created"
-msgstr "创建时间"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:33
-msgid "DELETING"
-msgstr "删除中"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:371
-msgid "DNS"
-msgstr "DNS"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:51
-msgid "Debug"
-msgstr "调试"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:35
-msgid "Default bridge"
-msgstr "默认桥接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:363
-msgid "Device"
-msgstr "设备"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:594
-msgid "Device(--device)"
-msgstr "设备(--device)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:396
-msgid "Disconnect"
-msgstr "断开"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:14
-msgid "Docker"
-msgstr "Docker"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:12
-msgid "Docker - Configuration"
-msgstr "Docker - 配置"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:192
-msgid "Docker - Container (%s)"
-msgstr "Docker - 容器 (%s)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:128
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:450
-msgid "Docker - Containers"
-msgstr "Docker - 容器"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/logs.htm:3
-msgid "Docker - Events"
-msgstr "Docker - 事件"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:72
-msgid "Docker - Images"
-msgstr "Docker - 镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:15
-msgid "Docker - Network"
-msgstr "Docker - 网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:54
-msgid "Docker - Networks"
-msgstr "Docker - 网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:28
-msgid "Docker - Overview"
-msgstr "Docker - 概览"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:69
-msgid "Docker - Volumes"
-msgstr "Docker - 存储卷"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:16
-msgid "Docker Daemon settings"
-msgstr "Docker 服务端(Docker Daemon)设置"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:489
-msgid "Docker Image"
-msgstr "Docker 镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:30
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:46
-msgid "Docker Root Dir"
-msgstr "Docker 根目录"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:93
-msgid "Docker Socket Path"
-msgstr "Docker 套接字路径"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:42
-msgid "Docker Version"
-msgstr "Docker 版本"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/apply_widget.htm:91
-msgid "Docker actions done."
-msgstr "Docker 执行完成。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:70
-msgid "DockerMan"
-msgstr "DockerMan"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:13
-msgid "DockerMan is a simple docker manager client for LuCI"
-msgstr "DockerMan是用于LuCI的简单docker管理器客户端"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:68
-msgid "DockerMan settings"
-msgstr "DockerMan设置"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:172
-msgid "Download"
-msgstr "下载"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:82
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:40
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:85
-msgid "Driver"
-msgstr "驱动"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:265
-msgid "Duplicate/Edit"
-msgstr "复制/编辑"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:120
-msgid "Enable IPv6"
-msgstr "启用 IPv6"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:351
-msgid "Env"
-msgstr "环境变量"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:546
-msgid "Environmental Variable(-e)"
-msgstr "环境变量(-e)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:54
-msgid "Error"
-msgstr "错误"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:42
-#: applications/luci-app-dockerman/luasrc/view/dockerman/logs.htm:5
-msgid "Events"
-msgstr "事件"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:116
-msgid "Exclude IPs"
-msgstr "排除 IP"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:247
-msgid "Export"
-msgstr "导出"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:585
-msgid "Exposed All Ports(-P)"
-msgstr "暴露所有端口(-P)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:560
-msgid "Exposed Ports(-p)"
-msgstr "暴露端口(-p)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:55
-msgid "Fatal"
-msgstr "致命的"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:6
-msgid "File"
-msgstr "文件"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:324
-msgid "Finish Time"
-msgstr "完成时间"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:220
-msgid "Force Remove"
-msgstr "强制移除"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:88
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:106
-msgid "Gateway"
-msgstr "网关"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:33
-msgid "Github"
-msgstr "Github"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinevalue.htm:4
-msgid "Go to relevant configuration page"
-msgstr "进入相关配置页面"
-
-#: applications/luci-app-dockerman/root/usr/share/rpcd/acl.d/luci-app-dockerman.json:3
-msgid "Grant UCI access for luci-app-dockerman"
-msgstr "授予 UCI 访问 luci-app-dockerman 的权限"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:330
-msgid "Healthy"
-msgstr "健康"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:578
-msgid "Host Name"
-msgstr "主机名"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:100
-msgid "Host or IP Address for the connection to a remote docker instance"
-msgstr "连接到远程Docker实例的主机名或IP地址"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:300
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:142
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:78
-msgid "ID"
-msgstr "ID"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:44
-msgid "IP VLAN"
-msgstr "IP VLAN"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:111
-msgid "IP range"
-msgstr "IP 范围"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:522
-msgid "IPv4 Address"
-msgstr "IPv4 地址"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:132
-msgid "IPv6 Gateway"
-msgstr "IPv6 网关"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:126
-msgid "IPv6 Subnet"
-msgstr "IPv6 子网"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:304
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_import.htm:54
-msgid "Image"
-msgstr "镜像"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:39
-#: applications/luci-app-dockerman/luasrc/view/dockerman/overview.htm:151
-msgid "Images"
-msgstr "镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:132
-msgid "Images overview"
-msgstr "镜像概览"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_import.htm:4
-msgid "Import"
-msgstr "导入"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:125
-msgid "Import Image"
-msgstr "导入镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:47
-msgid "Index Server Address"
-msgstr "索引服务器地址"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:52
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:414
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:102
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:3
-msgid "Info"
-msgstr "信息"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:74
-msgid "Ingress"
-msgstr "入口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:75
-msgid ""
-"Ingress network is the network which provides the routing-mesh in swarm mode"
-msgstr "入口网络是以群模式提供路由网格的网络"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:8
-msgid "Inspect"
-msgstr "检查"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:477
-msgid "Interactive (-i)"
-msgstr "交互(-i)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:86
-msgid "Internal"
-msgstr "内部"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:66
-msgid "Ipvlan Mode"
-msgstr "Ipvlan 模式"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:43
-msgid ""
-"It replaces the daemon registry mirrors with a new set of registry mirrors"
-msgstr ""
-"设置新的镜像存储中心(Registry)镜像源,这将取代服务端(daemon)配置的镜像存"
-"储中心(Registry)的镜像源"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:238
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:264
-msgid "Kill"
-msgstr "强制关闭"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:70
-msgid "L2 bridge"
-msgstr "L2 桥接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:71
-msgid "L3 bridge"
-msgstr "L3 桥接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:359
-msgid "Links"
-msgstr "链接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:527
-msgid "Links with other containers"
-msgstr "与其他容器的链接"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:283
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_load.htm:2
-msgid "Load"
-msgstr "负载"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:49
-msgid "Log Level"
-msgstr "日志等级"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:661
-msgid "Log driver options"
-msgstr "日志驱动选项"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:9
-msgid "Logs"
-msgstr "日志"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:43
-msgid "MAC VLAN"
-msgstr "MAC VLAN"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:589
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:644
-msgid "Memory"
-msgstr "内存"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:783
-msgid "Memory Useage"
-msgstr "内存使用率"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:645
-msgid ""
-"Memory limit (format: []). Number is a positive integer. Unit "
-"can be one of b, k, m, or g. Minimum is 4M"
-msgstr ""
-"内存限制(格式:<数字>[<单位>])。数字是正整数。单位可以是 b、k、m 或 g 之一。"
-"最小值为 4M"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:590
-msgid ""
-"Memory limit (format: []). Number is a positive integer. Unit "
-"can be one of b, k, m, or g. Minimum is 4M."
-msgstr ""
-"内存限制(格式:<数字>[<单位>])。数字是正整数。单位可以是 b、k、m 或 g 之一。"
-"最小值为 4M。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:57
-msgid "Mode"
-msgstr "模式"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:90
-msgid "Mount Point"
-msgstr "挂载点"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:603
-msgid "Mount tmpfs directory"
-msgstr "挂载 tmpfs 目录"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:343
-msgid "Mount/Volume"
-msgstr "挂载/卷"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:175
-msgid "Mounts"
-msgstr "挂载点"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:295
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:419
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:83
-msgid "Name"
-msgstr "名称"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:37
-msgid "Name of the network that can be selected during container creation"
-msgstr "在容器创建时可以选择网络的名称"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:394
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:528
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:169
-msgid "Network"
-msgstr "网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:80
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:36
-msgid "Network Name"
-msgstr "网络名称"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:40
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:518
-#: applications/luci-app-dockerman/luasrc/view/dockerman/overview.htm:169
-msgid "Networks"
-msgstr "网络"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:59
-msgid "Networks overview"
-msgstr "网络概览"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:104
-msgid "New"
-msgstr "新建"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:39
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_import.htm:54
-msgid "New tag"
-msgstr "新建标签"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:627
-msgid "Number of CPUs. Number is a fractional number. 0.000 means no limit"
-msgstr "CPU 数量。数字是小数。0.000 表示没有限制"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:574
-msgid "Number of CPUs. Number is a fractional number. 0.000 means no limit."
-msgstr "CPU 数量。数字是小数。0.000 表示没有限制。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:73
-msgid ""
-"On this page all images are displayed that are available on the system and "
-"with which a container can be created."
-msgstr "在此页面上,显示系统上可用的所有镜像文件,并可以用它们来创建容器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:193
-msgid "On this page, the selected container can be managed."
-msgstr "在此页面可以管理所选的容器。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:82
-msgid "Options"
-msgstr "选项"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:45
-msgid "Overlay network"
-msgstr "Overlay network"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:37
-msgid "Overview"
-msgstr "概览"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:33
-msgid "PLEASE CONFIRM"
-msgstr "请确认"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:84
-msgid "Parent Interface"
-msgstr "父接口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:64
-msgid "Pass-through (Mirror physical device to single MAC VLAN)"
-msgstr "直通(将物理设备镜像到单独的 MAC VLAN)"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_import.htm:54
-msgid "Please input new tag"
-msgstr "请输入新的标签"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:270
-msgid "Please input the PATH and select the file !"
-msgstr "请输入路径并选择文件!"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:82
-msgid "Please input the PORT or HOST IP of remote docker instance!"
-msgstr "请输入合法的远程docker实例端口和主机IP"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:86
-msgid "Please input the SOCKET PATH of docker daemon!"
-msgstr "请输入合法docker服务端(docker daemon)的SOCKET地址"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm:91
-msgid "Plese input command line:"
-msgstr "请输入 的命令行:"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:355
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:172
-msgid "Ports"
-msgstr "端口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:124
-msgid "Ports allowed to be accessed"
-msgstr "允许访问的端口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:62
-msgid "Private (Prevent communication between MAC VLANs)"
-msgstr "专用(阻止 MAC VLAN 之间的通信)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:504
-msgid "Privileged"
-msgstr "特权模式"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:561
-msgid "Publish container's port(s) to the host"
-msgstr "将容器的端口发布到主机"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:100
-msgid "Pull"
-msgstr "拉取"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:83
-msgid "Pull Image"
-msgstr "拉取镜像"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:42
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:48
-msgid "Registry Mirrors"
-msgstr "镜像加速器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:73
-msgid "Remote Endpoint"
-msgstr "远程实例"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:99
-msgid "Remote Host"
-msgstr "远程主机"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:106
-msgid "Remote Port"
-msgstr "远程端口"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:274
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:274
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:210
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:115
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:108
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:173
-msgid "Remove"
-msgstr "移除"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:43
-#: applications/luci-app-dockerman/luasrc/view/dockerman/images_import.htm:82
-msgid "Remove tag"
-msgstr "移除标签"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:171
-msgid "Rename"
-msgstr "重命名"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:145
-msgid "RepoTags"
-msgstr "仓库标签"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:469
-msgid "Resolve CLI"
-msgstr "解析 CLI"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:4
-msgid "Resources"
-msgstr "资源"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:220
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:244
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:87
-msgid "Restart"
-msgstr "重新启动"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:334
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:427
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:510
-msgid "Restart Policy"
-msgstr "重启策略"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:86
-msgid "Restrict external access to the network"
-msgstr "限制外部网络访问"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/cbi/inlinevalue.htm:31
-msgid "Reveal/hide password"
-msgstr "显示/隐藏 密码"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:566
-msgid "Run command"
-msgstr "运行命令"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:230
-msgid "Save"
-msgstr "保存"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:533
-msgid "Set custom DNS servers"
-msgstr "设置自定义 DNS 服务器"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:547
-msgid "Set environment variables to inside the container"
-msgstr "在容器内部设置环境变量"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:50
-msgid "Set the logging level"
-msgstr "设置日志记录级别"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:151
-msgid "Size"
-msgstr "大小"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:61
-msgid ""
-"Specifies where the Docker daemon will listen for client connections "
-"(default: unix:///var/run/docker.sock)"
-msgstr ""
-"指定Docker服务端(Docker daemon)将在何处侦听客户端连接(默认: unix:///var/"
-"run/docker.sock)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:211
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:234
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:65
-msgid "Start"
-msgstr "启动"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:319
-msgid "Start Time"
-msgstr "开始时间"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:789
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:790
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container.htm:5
-msgid "Stats"
-msgstr "状态"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:308
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:165
-msgid "Status"
-msgstr "状态"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:229
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:254
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:65
-msgid "Stop"
-msgstr "停止"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/newcontainer_resolve.htm:91
-msgid "Submit"
-msgstr "提交"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:86
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:101
-msgid "Subnet"
-msgstr "子网"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:375
-msgid "Sysctl"
-msgstr "系统控制"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:610
-msgid "Sysctl(--sysctl)"
-msgstr "系统控制(--sysctl)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:611
-msgid "Sysctls (kernel parameters) options"
-msgstr "系统控制(内核参数)选项"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:792
-msgid "TOP"
-msgstr "TOP"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:483
-msgid "TTY (-t)"
-msgstr "TTY(-t)"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_stats.htm:56
-msgid "TX/RX"
-msgstr "发射/接收"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:579
-msgid "The hostname to use for the container"
-msgstr "容器使用的主机名"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:662
-msgid "The logging configuration for this container"
-msgstr "该容器的日志记录配置"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:540
-msgid ""
-"The user that commands are run as inside the container.(format: name|uid[:"
-"group|gid])"
-msgstr "在容器中以用户运行命令。(格式:name|uid[:group|gid])"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/containers.lua:129
-msgid ""
-"This page displays all containers that have been created on the connected "
-"docker host."
-msgstr "此页面显示在连接的Docker主机上已创建的所有容器。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/networks.lua:55
-msgid ""
-"This page displays all docker networks that have been created on the "
-"connected docker host."
-msgstr "此页面显示在已连接的Docker主机上创建的所有Docker网络。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:367
-msgid "Tmpfs"
-msgstr "Tmpfs"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:602
-msgid "Tmpfs(--tmpfs)"
-msgstr "Tmpfs(--tmpfs)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/overview.lua:45
-msgid "Total Memory"
-msgstr "总内存"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:697
-msgid "UID"
-msgstr "UID"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:297
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:336
-msgid "Update"
-msgstr "更新"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:256
-msgid "Upgrade"
-msgstr "升级"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:7
-msgid "Upload"
-msgstr "上传"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:303
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:304
-msgid "Upload Error"
-msgstr "上传错误"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_file_manager.htm:294
-msgid "Upload Success"
-msgstr "上传成功"
-
-#: applications/luci-app-dockerman/luasrc/view/dockerman/container_stats.htm:48
-msgid "Upload/Download"
-msgstr "上传/下载"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/container.lua:339
-msgid "User"
-msgstr "用户"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newcontainer.lua:539
-msgid "User(-u)"
-msgstr "用户(-u)"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/newnetwork.lua:63
-msgid "VEPA (Virtual Ethernet Port Aggregator)"
-msgstr "VEPA(虚拟以太网端口聚合器)"
-
-#: applications/luci-app-dockerman/luasrc/controller/dockerman.lua:41
-#: applications/luci-app-dockerman/luasrc/view/dockerman/overview.htm:187
-msgid "Volumes"
-msgstr "存储卷"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/volumes.lua:73
-msgid "Volumes overview"
-msgstr "卷概览"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:53
-msgid "Warning"
-msgstr "警告"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/images.lua:126
-msgid ""
-"When pressing the Import button, both a local image can be loaded onto the "
-"system and a valid image tar can be downloaded from remote."
-msgstr ""
-"按下导入按钮时,既可以将本地镜像文件加载到系统上,也可以从远程下载有效的Tar格"
-"式的镜像文件。"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:124
-msgid ""
-"Which Port(s) can be accessed, it's not restricted by the Allowed Access "
-"interfaces configuration. Use this configuration with caution!"
-msgstr "设置可以被访问的端口,该配置不受“允许的访问接口”配置的限制。请谨慎使用该配置选项!"
-
-#: applications/luci-app-dockerman/luasrc/model/cbi/dockerman/configuration.lua:118
-msgid ""
-"Which interface(s) can access containers under the bridge network, fill-in "
-"Interface Name"
-msgstr "哪些接口可以访问桥接网络下的容器,请填写接口名称"
-
-#~ msgid "Containers allowed to be accessed"
-#~ msgstr "允许访问的容器"
-
-#~ msgid ""
-#~ "Which container(s) under bridge network can be accessed, even from "
-#~ "interfaces that are not allowed, fill-in Container Id or Name"
-#~ msgstr ""
-#~ "桥接网络下哪些容器可以访问,即使是不允许从接口访问,也要填写容器 ID 或名称"
-
-#~ msgid "Connect to remote endpoint"
-#~ msgstr "连接到远程终端"
-
-#~ msgid "Global settings"
-#~ msgstr "全局设定"
-
-#~ msgid "Path"
-#~ msgstr "路径"
-
-#~ msgid "Please input the PATH !"
-#~ msgstr "请输入合法路径!"
-
-#~ msgid "Setting"
-#~ msgstr "设置"
-
-#~ msgid "Specifies where the Docker daemon will listen for client connections"
-#~ msgstr "指定Docker服务端(Docker daemon)侦听客户端连接的位置"
-
-#~ msgid "Docker Container"
-#~ msgstr "Docker 容器"
-
-#~ msgid ""
-#~ "DockerMan is a Simple Docker manager client for LuCI, If you have any "
-#~ "issue please visit:"
-#~ msgstr ""
-#~ "DockerMan 是一个简单的 LuCI 客户端 Docker 管理器,如果您有任何问题,请访"
-#~ "问:"
-
-#~ msgid "Import Images"
-#~ msgstr "导入镜像"
-
-#~ msgid "New Container"
-#~ msgstr "新建容器"
-
-#~ msgid "New Network"
-#~ msgstr "新建网络"
-
-#~ msgid "Macvlan Mode"
-#~ msgstr "Macvlan 模式"
-
-#~ msgid ""
-#~ "Daemon unix socket (unix:///var/run/docker.sock) or TCP Remote Hosts "
-#~ "(tcp://0.0.0.0:2375), default: unix:///var/run/docker.sock"
-#~ msgstr ""
-#~ "守护进程 unix 套接字 (unix:///var/run/docker.sock) 或 TCP 远程主机 "
-#~ "(tcp://0.0.0.0:2375),默认值:unix:///var/run/docker.sock"
-
-#~ msgid "Docker Daemon"
-#~ msgstr "Docker 服务端"
-
-#~ msgid "Dockerman connect to remote endpoint"
-#~ msgstr "Dockerman 连接到远程端点"
-
-#~ msgid "Enable"
-#~ msgstr "启用"
-
-#~ msgid "Server Host"
-#~ msgstr "服务器主机"
-
-#~ msgid "Contaienr Info"
-#~ msgstr "容器信息"
diff --git a/package/lean/luci-app-dockerman/root/etc/init.d/dockerman b/package/lean/luci-app-dockerman/root/etc/init.d/dockerman
deleted file mode 100755
index 304cf91d0..000000000
--- a/package/lean/luci-app-dockerman/root/etc/init.d/dockerman
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/bin/sh /etc/rc.common
-
-START=99
-USE_PROCD=1
-# PROCD_DEBUG=1
-config_load 'dockerd'
-# config_get daemon_ea "dockerman" daemon_ea
-_DOCKERD=/etc/init.d/dockerd
-
-docker_running(){
- docker version > /dev/null 2>&1
- return $?
-}
-
-add_ports() {
- [ $# -eq 0 ] && return
- $($_DOCKERD running) && docker_running || return 1
- ids=$@
- for id in $ids; do
- id=$(docker ps --filter "ID=$id" --quiet)
- [ -z "$id" ] && {
- echo "Docker containner not running";
- return 1;
- }
- ports=$(docker ps --filter "ID=$id" --format "{{.Ports}}")
- # echo "$ports"
- for port in $ports; do
- echo "$port" | grep -qE "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}:.*$" || continue;
- [ "${port: -1}" == "," ] && port="${port:0:-1}"
- local protocol=""
- [ "${port%tcp}" != "$port" ] && protocol="/tcp"
- [ "${port%udp}" != "$port" ] && protocol="/udp"
- [ "$protocol" == "" ] && continue
- port="${port%%->*}"
- port="${port##*:}"
- uci_add_list dockerd dockerman ac_allowed_ports "${port}${protocol}"
- done
- done
- uci_commit dockerd
-}
-
-
-convert() {
- _convert() {
- _id=$1
- _id=$(docker ps --all --filter "ID=$_id" --quiet)
- if [ -z "$_id" ]; then
- uci_remove_list dockerd dockerman ac_allowed_container "$1"
- return
- fi
- if /etc/init.d/dockerman add_ports "$_id"; then
- uci_remove_list dockerd dockerman ac_allowed_container "$_id"
- fi
- }
- config_list_foreach dockerman ac_allowed_container _convert
- uci_commit dockerd
-}
-
-iptables_append(){
- # Wait for a maximum of 10 second per command, retrying every millisecond
- local iptables_wait_args="--wait 10 --wait-interval 1000"
- if ! iptables ${iptables_wait_args} --check $@ 2>/dev/null; then
- iptables ${iptables_wait_args} -A $@ 2>/dev/null
- fi
-}
-
-init_dockerman_chain(){
- iptables -N DOCKER-MAN >/dev/null 2>&1
- iptables -F DOCKER-MAN >/dev/null 2>&1
- iptables -D DOCKER-USER -j DOCKER-MAN >/dev/null 2>&1
- iptables -I DOCKER-USER -j DOCKER-MAN >/dev/null 2>&1
-}
-
-delete_dockerman_chain(){
- iptables -D DOCKER-USER -j DOCKER-MAN >/dev/null 2>&1
- iptables -F DOCKER-MAN >/dev/null 2>&1
- iptables -X DOCKER-MAN >/dev/null 2>&1
-}
-
-add_allowed_interface(){
- iptables_append DOCKER-MAN -i $1 -o docker0 -j RETURN
-}
-
-add_allowed_ports(){
- port=$1
- if [ "${port%/tcp}" != "$port" ]; then
- iptables_append DOCKER-MAN -p tcp -m conntrack --ctorigdstport ${port%/tcp} --ctdir ORIGINAL -j RETURN
- elif [ "${port%/udp}" != "$port" ]; then
- iptables_append DOCKER-MAN -p udp -m conntrack --ctorigdstport ${port%/udp} --ctdir ORIGINAL -j RETURN
- fi
-}
-
-handle_allowed_ports(){
- config_list_foreach "dockerman" "ac_allowed_ports" add_allowed_ports
-}
-
-handle_allowed_interface(){
- config_list_foreach "dockerman" "ac_allowed_interface" add_allowed_interface
- iptables_append DOCKER-MAN -m conntrack --ctstate ESTABLISHED,RELATED -o docker0 -j RETURN >/dev/null 2>&1
- iptables_append DOCKER-MAN -m conntrack --ctstate NEW,INVALID -o docker0 -j DROP >/dev/null 2>&1
- iptables_append DOCKER-MAN -j RETURN >/dev/null 2>&1
-}
-
-start_service(){
- [ -x "$_DOCKERD" ] && $($_DOCKERD enabled) || return 0
- delete_dockerman_chain
- $($_DOCKERD running) && docker_running || return 0
- init_dockerman_chain
- handle_allowed_ports
- handle_allowed_interface
-}
-
-stop_service(){
- delete_dockerman_chain
-}
-
-service_triggers() {
- procd_add_reload_trigger 'dockerd'
-}
-
-reload_service() {
- start
-}
-
-boot() {
- sleep 5s
- start
-}
-
-extra_command "add_ports" "Add allowed ports based on the container ID(s)"
-extra_command "convert" "Convert Ac allowed container to AC allowed ports"
diff --git a/package/lean/luci-app-dockerman/root/etc/uci-defaults/luci-app-dockerman b/package/lean/luci-app-dockerman/root/etc/uci-defaults/luci-app-dockerman
deleted file mode 100755
index 4358728a1..000000000
--- a/package/lean/luci-app-dockerman/root/etc/uci-defaults/luci-app-dockerman
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/sh
-
-. $IPKG_INSTROOT/lib/functions.sh
-
-[ -x "$(command -v dockerd)" ] && chmod +x /etc/init.d/dockerman && /etc/init.d/dockerman enable >/dev/null 2>&1
-sed -i 's/self:cfgvalue(section) or {}/self:cfgvalue(section) or self.default or {}/' /usr/lib/lua/luci/view/cbi/dynlist.htm
-/etc/init.d/uhttpd restart >/dev/null 2>&1
-rm -fr /tmp/luci-indexcache /tmp/luci-modulecache >/dev/null 2>&1
-touch /etc/config/dockerd
-ls /etc/rc.d/*dockerd &> /dev/null && uci -q set dockerd.globals.auto_start="1" || uci -q set dockerd.globals.auto_start="0"
-uci -q batch <<-EOF >/dev/null
- set uhttpd.main.script_timeout="3600"
- commit uhttpd
- set dockerd.dockerman=dockerman
- set dockerd.dockerman.socket_path='/var/run/docker.sock'
- set dockerd.dockerman.status_path='/tmp/.docker_action_status'
- set dockerd.dockerman.debug='false'
- set dockerd.dockerman.debug_path='/tmp/.docker_debug'
- set dockerd.dockerman.remote_endpoint='0'
-
- del_list dockerd.dockerman.ac_allowed_interface='br-lan'
- add_list dockerd.dockerman.ac_allowed_interface='br-lan'
-
- commit dockerd
-EOF
-# remove dockerd firewall
-config_load dockerd
-remove_firewall(){
- cfg=${1}
- uci_remove dockerd ${1}
-}
-config_foreach remove_firewall firewall
-# Convert ac_allowed_container to ac_allowed_ports
-(sleep 30s && /etc/init.d/dockerman convert;/etc/init.d/dockerman restart) &
-
-exit 0
diff --git a/package/lean/luci-app-easymesh/Makefile b/package/lean/luci-app-easymesh/Makefile
deleted file mode 100644
index 99f2cc180..000000000
--- a/package/lean/luci-app-easymesh/Makefile
+++ /dev/null
@@ -1,13 +0,0 @@
-#
-#-- Copyright (C) 2021 dz
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=LuCI Support for easymesh
-LUCI_DEPENDS:= +kmod-cfg80211 +batctl-default +kmod-batman-adv +wpad-openssl +dawn
-PKG_VERSION:=1.9
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-easymesh/luasrc/controller/easymesh.lua b/package/lean/luci-app-easymesh/luasrc/controller/easymesh.lua
deleted file mode 100644
index a004058d5..000000000
--- a/package/lean/luci-app-easymesh/luasrc/controller/easymesh.lua
+++ /dev/null
@@ -1,14 +0,0 @@
--- Copyright (C) 2021 dz
-
-module("luci.controller.easymesh", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/easymesh") then
- return
- end
-
- local page
- page = entry({"admin", "network", "easymesh"}, cbi("easymesh"), _("EASY MESH"), 60)
- page.dependent = true
- page.acl_depends = { "luci-app-easymesh" }
-end
diff --git a/package/lean/luci-app-easymesh/luasrc/model/cbi/easymesh.lua b/package/lean/luci-app-easymesh/luasrc/model/cbi/easymesh.lua
deleted file mode 100644
index 534947771..000000000
--- a/package/lean/luci-app-easymesh/luasrc/model/cbi/easymesh.lua
+++ /dev/null
@@ -1,122 +0,0 @@
--- Copyright (C) 2021 dz
-
-local m,s,o
-local sys = require "luci.sys"
-local uci = require "luci.model.uci".cursor()
-
-m = Map("easymesh")
-
-function detect_Node()
- local data = {}
- local lps = luci.util.execi(" batctl n 2>/dev/null | tail +2 | sed 's/^[ ][ ]*//g' | sed 's/[ ][ ]*/ /g' | sed 's/$/ /g' ")
- for value in lps do
- local row = {}
- local pos = string.find(value, " ")
- local IFA = string.sub(value, 1, pos - 1)
- local value = string.sub(value, pos + 1, string.len(value))
- pos = string.find(value, " ")
- local pos = string.find(value, " ")
- local Neighbora = string.sub(value, 1, pos - 1)
- local value = string.sub(value, pos + 1, string.len(value))
- pos = string.find(value, " ")
- local pos = string.find(value, " ")
- local lastseena = string.sub(value, 1, pos - 1)
- local value = string.sub(value, pos + 1, string.len(value))
- pos = string.find(value, " ")
- row["IF"] = IFA
- row["Neighbor"] = Neighbora
- row["lastseen"] = lastseena
- table.insert(data, row)
- end
- return data
-end
-local Nodes = luci.sys.exec("batctl n 2>/dev/null| tail +3 | wc -l")
-local Node = detect_Node()
-v = m:section(Table, Node, "" ,translate("Active node:" .. Nodes .. ""))
-v:option(DummyValue, "IF")
-v:option(DummyValue, "Neighbor")
-v:option(DummyValue, "lastseen")
-
--- Basic
-s = m:section(TypedSection, "easymesh", translate("Settings"), translate("General Settings"))
-s.anonymous = true
-
----- Eanble
-o = s:option(Flag, "enabled", translate("Enable"), translate("Enable or disable EASY MESH"))
-o.default = 0
-o.rmempty = false
-
-o = s:option(ListValue, "role", translate("role"))
-o:value("off", translate("off"))
-o:value("server", translate("host MESH"))
-o:value("client", translate("son MESH"))
-o.rmempty = false
-
-apRadio = s:option(ListValue, "apRadio", translate("MESH Radio device"), translate("The radio device which MESH use"))
-uci:foreach("wireless", "wifi-device",
- function(s)
- apRadio:value(s['.name'])
- end)
-apRadio:value("all", translate("ALL"))
-o.default = "radio0"
-o.rmempty = false
-
----- mesh
-o = s:option(Value, "mesh_id", translate("MESH ID"))
-o.default = "easymesh"
-o.description = translate("MESH ID")
-
-enable = s:option(Flag, "encryption", translate("Encryption"), translate(""))
-enable.default = 0
-enable.rmempty = false
-
-o = s:option(Value, "key", translate("Key"))
-o.default = "easymesh"
-o:depends("encryption", 1)
-
----- kvr
-enable = s:option(Flag, "kvr", translate("K/V/R"), translate(""))
-enable.default = 1
-enable.rmempty = false
-
-o = s:option(Value, "mobility_domain", translate("Mobility Domain"), translate("4-character hexadecimal ID"))
-o.default = "4f57"
-o.datatype = "and(hexstring,rangelength(4,4))"
-o:depends("kvr", 1)
-
-o = s:option(Value, "rssi_val", translate("Threshold for an good RSSI"))
-o.default = "-60"
-o.atatype = "range(-1,-120)"
-o:depends("kvr", 1)
-
-o = s:option(Value, "low_rssi_val", translate("Threshold for an bad RSSI"))
-o.default = "-88"
-o.atatype = "range(-1,-120)"
-o:depends("kvr", 1)
-
----- ap_mode
-enable = s:option(Flag, "ap_mode", translate("AP MODE Enable"), translate("Enable or disable AP MODE"))
-enable.default = 0
-enable.rmempty = false
-
-o = s:option(Value, "ipaddr", translate("IPv4-Address"))
-o.default = "192.168.1.10"
-o.datatype = "ip4addr"
-o:depends("ap_mode", 1)
-
-o = s:option(Value, "netmask", translate("IPv4 netmask"))
-o.default = "255.255.255.0"
-o.datatype = "ip4addr"
-o:depends("ap_mode", 1)
-
-o = s:option(Value, "gateway", translate("IPv4 gateway"))
-o.default = "192.168.1.1"
-o.datatype = "ip4addr"
-o:depends("ap_mode", 1)
-
-o = s:option(Value, "dns", translate("Use custom DNS servers"))
-o.default = "192.168.1.1"
-o.datatype = "ip4addr"
-o:depends("ap_mode", 1)
-
-return m
diff --git a/package/lean/luci-app-easymesh/po/zh-cn/easymesh.po b/package/lean/luci-app-easymesh/po/zh-cn/easymesh.po
deleted file mode 100755
index d2759869c..000000000
--- a/package/lean/luci-app-easymesh/po/zh-cn/easymesh.po
+++ /dev/null
@@ -1,83 +0,0 @@
-msgid ""
-msgstr "Content-Type: text/plain; charset=UTF-8\n"
-
-msgid "easymesh"
-msgstr "easymesh"
-
-msgid "Active node:" .. Nodes .. ""
-msgstr "活动节点:" .. Nodes .. ""
-
-msgid "IF"
-msgstr "IF"
-
-msgid "Neighbor"
-msgstr "节点邻居设备"
-
-msgid "lastseen"
-msgstr "上次连接延时"
-
-msgid "EASY MESH"
-msgstr "简单MESH"
-
-msgid "Settings"
-msgstr "设置"
-
-msgid "General Settings"
-msgstr "基本设置"
-
-msgid "Enable"
-msgstr "启用"
-
-msgid "role"
-msgstr "角色"
-
-msgid "off"
-msgstr "关闭"
-
-msgid "host MESH"
-msgstr "主MESH"
-
-msgid "son MESH"
-msgstr "子MESH"
-
-msgid "MESH Radio device"
-msgstr "MESH无线设备"
-
-msgid "The radio device which MESH use"
-msgstr "使用MESH组网的无线设备"
-
-msgid "AUTO"
-msgstr "自动"
-
-msgid "Enable or disable EASY MESH"
-msgstr "启用或禁用简单MESH"
-
-msgid "Encryption"
-msgstr "加密"
-
-msgid "Key"
-msgstr "密码"
-
-msgid "Threshold for an good RSSI"
-msgstr "快速漫游接入值"
-
-msgid "Threshold for an bad RSSI"
-msgstr "快速漫游踢出值"
-
-msgid "AP MODE Enable"
-msgstr "启用AP模式"
-
-msgid "Enable or disable AP MODE"
-msgstr "启用或禁用AP模式"
-
-msgid "IPv4-Address"
-msgstr "IPv4 地址"
-
-msgid "IPv4 netmask"
-msgstr "IPv4 子网掩码"
-
-msgid "IPv4 gateway"
-msgstr "IPv4 网关"
-
-msgid "Use custom DNS servers"
-msgstr "使用自定义的 DNS 服务器"
diff --git a/package/lean/luci-app-easymesh/po/zh_Hans b/package/lean/luci-app-easymesh/po/zh_Hans
deleted file mode 120000
index 41451e4a1..000000000
--- a/package/lean/luci-app-easymesh/po/zh_Hans
+++ /dev/null
@@ -1 +0,0 @@
-zh-cn
\ No newline at end of file
diff --git a/package/lean/luci-app-easymesh/root/etc/config/easymesh b/package/lean/luci-app-easymesh/root/etc/config/easymesh
deleted file mode 100644
index 3446d21c1..000000000
--- a/package/lean/luci-app-easymesh/root/etc/config/easymesh
+++ /dev/null
@@ -1,3 +0,0 @@
-
-config easymesh 'config'
- option enabled '0'
diff --git a/package/lean/luci-app-easymesh/root/etc/init.d/easymesh b/package/lean/luci-app-easymesh/root/etc/init.d/easymesh
deleted file mode 100755
index 1f2e66a2d..000000000
--- a/package/lean/luci-app-easymesh/root/etc/init.d/easymesh
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/bin/sh /etc/rc.common
-START=99
-STOP=10
-
-enable=$(uci get easymesh.config.enabled 2>/dev/null)
-mesh_bat0=$(uci get network.bat0 2>/dev/null)
-ap_mode=$(uci get easymesh.config.ap_mode 2>/dev/null)
-lan=$(uci get network.lan.ifname 2>/dev/null)
-ipaddr=$(uci get easymesh.config.ipaddr 2>/dev/null)
-netmask=$(uci get easymesh.config.netmask 2>/dev/null)
-gateway=$(uci get easymesh.config.gateway 2>/dev/null)
-dns=$(uci get easymesh.config.dns 2>/dev/null)
-ap_ipaddr=$(uci get network.lan.ipaddr 2>/dev/null)
-ap_ipaddr1=$(cat /etc/easymesh | sed -n '1p' 2>/dev/null)
-apRadio=$(uci get easymesh.config.apRadio 2>/dev/null)
-kvr=$(uci get easymesh.config.kvr 2>/dev/null)
-brlan=$(uci get network.@device[0].name 2>/dev/null)
-role=$(uci get easymesh.config.role 2>/dev/null)
-
-ap_mode_stop(){
- ap_ipaddr=$(uci get network.lan.ipaddr 2>/dev/null)
- ap_ipaddr1=$(cat /etc/easymesh | sed -n '1p' 2>/dev/null)
- dns1=$(cat /etc/easymesh | sed -n '2p' 2>/dev/null)
- if [ "$ap_ipaddr" = "$ap_ipaddr1" ]; then
- uci delete network.lan.gateway
- uci del_list network.lan.dns=$dns1
- uci commit network
-
- echo "" > /etc/easymesh
-
- uci delete dhcp.lan.dynamicdhcp
- uci delete dhcp.lan.ignore
- uci commit dhcp
-
- /etc/init.d/odhcpd enable && /etc/init.d/odhcpd start
- /etc/init.d/firewall enable && /etc/init.d/firewall start >/dev/null 2>&1
- fi
-}
-
-add_wifi_mesh(){
- mesh_nwi_mesh=$(uci get network.nwi_mesh_${apall} 2>/dev/null)
- mesh_apRadio=$(uci get wireless.mesh_${apall}.device 2>/dev/null)
- mesh_mesh=$(uci get wireless.mesh_${apall} 2>/dev/null)
- mesh_id=$(uci get easymesh.config.mesh_id 2>/dev/null)
- mobility_domain=$(uci get easymesh.config.mobility_domain 2>/dev/null)
- key=$(uci get easymesh.config.key 2>/dev/null)
- encryption=$(uci get easymesh.config.encryption 2>/dev/null)
-
- if [ "$mesh_nwi_mesh" != "interface" ]; then
- uci set network.nwi_mesh_$apall=interface
- uci set network.nwi_mesh_$apall.proto='batadv_hardif'
- uci set network.nwi_mesh_$apall.master='bat0'
- uci set network.nwi_mesh_$apall.mtu='1536'
- uci commit network
- fi
-
- if [ "$mesh_mesh" != "wifi-iface" ]; then
- uci set wireless.mesh_$apall=wifi-iface
- uci set wireless.mesh_$apall.device=$apall
- uci set wireless.mesh_$apall.ifname=mesh_${apall}
- uci set wireless.mesh_$apall.network=nwi_mesh_${apall}
- uci set wireless.mesh_$apall.mode='mesh'
- uci set wireless.mesh_$apall.mesh_id=$mesh_id
- uci set wireless.mesh_$apall.mesh_fwding='0'
- uci set wireless.mesh_$apall.mesh_ttl='1'
- uci set wireless.mesh_$apall.mcast_rate='24000'
- uci set wireless.mesh_$apall.disabled='0'
- uci commit wireless
- fi
-
- if [ "$mesh_mesh" = "wifi-iface" ]; then
- if [ "$mesh_apRadio" != "$apall" ]; then
- uci set wireless.mesh_$apall.device=$apall
- uci commit wireless
- fi
- fi
-
- if [ "$encryption" != 1 ]; then
- uci set wireless.mesh_$apall.encryption='none'
- uci commit wireless
- else
- uci set wireless.mesh_$apall.encryption='sae'
- uci set wireless.mesh_$apall.key=$key
- uci commit wireless
- fi
-}
-
-add_kvr(){
- kvr=$(uci get easymesh.config.kvr 2>/dev/null)
- mobility_domain=$(uci get easymesh.config.mobility_domain 2>/dev/null)
- for apall in $(uci -X show wireless | grep wifi-device | awk -F'[.=]' '{print $2}'); do
- if [ "$kvr" = 1 ]; then
- uci set wireless.default_$apall.ieee80211k='1'
- uci set wireless.default_$apall.rrm_neighbor_report='1'
- uci set wireless.default_$apall.rrm_beacon_report='1'
- uci set wireless.default_$apall.ieee80211v='1'
- uci set wireless.default_$apall.bss_transition='1'
- uci set wireless.default_$apall.ieee80211r='1'
- uci set wireless.default_$apall.encryption='psk2+ccmp'
- uci set wireless.default_$apall.mobility_domain=$mobility_domain
- uci set wireless.default_$apall.ft_over_ds='1'
- uci set wireless.default_$apall.ft_psk_generate_local='1'
- uci commit wireless
- else
- uci delete wireless.default_$apall.ieee80211k
- uci delete wireless.default_$apall.ieee80211v
- uci delete wireless.default_$apall.ieee80211r
- uci commit wireless
- fi
- done
-}
-
-add_dawn(){
- kvr=$(uci get easymesh.config.kvr 2>/dev/null)
- rssi_val=$(uci get easymesh.config.rssi_val 2>/dev/null)
- low_rssi_val=$(uci get easymesh.config.low_rssi_val 2>/dev/null)
-
- if [ "$kvr" = 1 ]; then
- uci set dawn.@metric[0].rssi_val=$rssi_val
- uci set dawn.@metric[0].low_rssi_val=$low_rssi_val
- uci commit dawn
- /etc/init.d/dawn enable && /etc/init.d/dawn start
- else
- /etc/init.d/dawn stop && /etc/init.d/dawn disable
- fi
-}
-
-start(){
- if [ "$enable" == 1 ]; then
- if [ "$mesh_bat0" != "interface" ]; then
- uci set network.bat0=interface
- uci set network.bat0.proto='batadv'
- uci set network.bat0.routing_algo='BATMAN_IV'
- uci set network.bat0.aggregated_ogms='1'
- uci set network.bat0.ap_isolation='0'
- uci set network.bat0.bonding='0'
- uci set network.bat0.bridge_loop_avoidance='1'
- uci set network.bat0.distributed_arp_table='1'
- uci set network.bat0.fragmentation='1'
- #uci set network.bat0.gw_bandwidth='10000/2000'
- #uci set network.bat0.gw_sel_class='20'
- uci set network.bat0.hop_penalty='30'
- uci set network.bat0.isolation_mark='0x00000000/0x00000000'
- uci set network.bat0.log_level='0'
- uci set network.bat0.multicast_fanout='16'
- uci set network.bat0.multicast_mode='1'
- uci set network.bat0.network_coding='0'
- uci set network.bat0.orig_interval='1000'
-
- if [ "$role" = "server" ]; then
- uci set network.bat0.gw_mode='server'
- elif [ "$role" = "client" ]; then
- uci set network.bat0.gw_mode='client'
- else
- uci set network.bat0.gw_mode='off'
- fi
-
- if [ "$brlan" = "br-lan" ]; then
- uci add_list network.@device[0].ports='bat0'
- else
- uci set network.lan.ifname="${lan} bat0"
- fi
- uci commit network
- fi
-
- if [ "$apRadio" = "all" ]; then
- for apall in $(uci -X show wireless | grep wifi-device | awk -F'[.=]' '{print $2}'); do
- add_wifi_mesh
- done
- else
- apall=$apRadio
- add_wifi_mesh
- fi
-
- add_kvr
- add_dawn
-
- if [ "$ap_mode" == 1 ]; then
- if [ "$ap_ipaddr" != "$ipaddr" ]; then
- uci set network.lan.ipaddr=$ipaddr
- uci set network.lan.netmask=$netmask
- uci set network.lan.gateway=$gateway
- uci add_list network.lan.dns=$dns
- uci commit network
-
- echo "" > /etc/easymesh
- echo "$ipaddr" > /etc/easymesh
- echo "$dns" >> /etc/easymesh
-
- uci set dhcp.lan.dynamicdhcp='0'
- uci set dhcp.lan.ignore='1'
- uci delete dhcp.lan.ra
- uci delete dhcp.lan.dhcpv6
- uci delete dhcp.lan.ra_management
- uci commit dhcp
-
- /etc/init.d/odhcpd stop && /etc/init.d/odhcpd disable
- /etc/init.d/firewall stop && /etc/init.d/firewall disable >/dev/null 2>&1
- fi
- else
- ap_mode_stop
- fi
-
- /etc/init.d/network restart
- else
- stop
- fi
-}
-
-stop(){
- if [ "$enable" != 1 ]; then
- if [ "$mesh_bat0" = "interface" ]; then
- uci delete network.bat0
- if [ "$brlan" = "br-lan" ]; then
- uci del_list network.@device[0].ports='bat0'
- else
- sed -i 's/ bat0//' /etc/config/network
- fi
- uci commit network
- fi
-
- for apall in $(uci -X show wireless | grep wifi-device | awk -F'[.=]' '{print $2}'); do
- mesh_nwi_mesh=$(uci get network.nwi_mesh_${apall} 2>/dev/null)
- mesh_mesh=$(uci get wireless.mesh_${apall} 2>/dev/null)
-
- if [ "$mesh_nwi_mesh" = "interface" ]; then
- uci delete network.nwi_mesh_$apall
- uci commit network
- fi
-
- if [ "$mesh_mesh" = "wifi-iface" ]; then
- uci delete wireless.mesh_$apall
- uci commit wireless
- fi
- done
-
- add_kvr
- add_dawn
-
- if [ "$ap_mode" == 1 ]; then
- ap_mode_stop
- fi
-
- /etc/init.d/network restart
- fi
-}
-
-restart() {
- start
-}
diff --git a/package/lean/luci-app-easymesh/root/etc/uci-defaults/luci-easymesh b/package/lean/luci-app-easymesh/root/etc/uci-defaults/luci-easymesh
deleted file mode 100755
index 92b07747e..000000000
--- a/package/lean/luci-app-easymesh/root/etc/uci-defaults/luci-easymesh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@easymesh[-1]
- add ucitrack easymesh
- set ucitrack.@easymesh[-1].init=easymesh
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-easymesh/root/usr/share/rpcd/acl.d/luci-app-easymesh.json b/package/lean/luci-app-easymesh/root/usr/share/rpcd/acl.d/luci-app-easymesh.json
deleted file mode 100644
index 4d435948a..000000000
--- a/package/lean/luci-app-easymesh/root/usr/share/rpcd/acl.d/luci-app-easymesh.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "luci-app-easymesh": {
- "description": "Grant UCI access for luci-app-easymesh",
- "read": {
- "uci": [ "easymesh" ]
- },
- "write": {
- "uci": [ "easymesh" ]
- }
- }
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-familycloud/Makefile b/package/lean/luci-app-familycloud/Makefile
deleted file mode 100644
index 926ec8324..000000000
--- a/package/lean/luci-app-familycloud/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (C) 2016 Openwrt.org
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=LuCI support for FamilyCloudSpeeder
-LUCI_DEPENDS:=+coreutils +coreutils-nohup +libreadline +libcurl +libopenssl +bash +curl +wget +openssl-util +ca-certificates +ca-bundle
-LUCI_PKGARCH:=all
-PKG_NAME:=luci-app-familycloud
-PKG_VERSION:=1
-PKG_RELEASE:=6
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-familycloud/luasrc/controller/familycloud.lua b/package/lean/luci-app-familycloud/luasrc/controller/familycloud.lua
deleted file mode 100644
index b0f3b6d48..000000000
--- a/package/lean/luci-app-familycloud/luasrc/controller/familycloud.lua
+++ /dev/null
@@ -1,21 +0,0 @@
-module("luci.controller.familycloud", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/familycloud") then
- return
- end
-
- entry({"admin", "services", "familycloud"}, firstchild(), _("天翼家庭云/云盘提速"), 80).dependent = false
-
- entry({"admin", "services", "familycloud", "general"}, cbi("familycloud/familycloud"), _("Base Setting"), 1)
- entry({"admin", "services", "familycloud", "log"}, form("familycloud/familycloud_log"), _("Log"), 2)
-
- entry({"admin", "services", "familycloud", "status"}, call("act_status")).leaf = true
-end
-
-function act_status()
- local e = {}
- e.running = luci.sys.call("ps | grep speedup.sh | grep -v grep >/dev/null") == 0
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud.lua b/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud.lua
deleted file mode 100644
index e57ff0243..000000000
--- a/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud.lua
+++ /dev/null
@@ -1,22 +0,0 @@
-mp = Map("familycloud")
-mp.title = translate("天翼家庭云/天翼云盘提速")
-mp.description = translate("天翼家庭云/天翼云盘提速 (最高可达500Mbps)")
-
-mp:section(SimpleSection).template = "familycloud/familycloud_status"
-
-s = mp:section(TypedSection, "familycloud")
-s.anonymous = true
-s.addremove = false
-
-enabled = s:option(Flag, "enabled", translate("启用提速"))
-enabled.default = 0
-enabled.rmempty = false
-
-speedtype = s:option(ListValue, "speedertype", translate("天翼提速包类型"))
-speedtype:value("CloudDisk", translate("天翼云盘提速"))
-speedtype:value("FamilyCloud", translate("天翼家庭云提速"))
-
-account = s:option(Value, "token", translate("AccessToken"))
-account.datatype = "string"
-
-return mp
diff --git a/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud_log.lua b/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud_log.lua
deleted file mode 100644
index c1b7ad0b0..000000000
--- a/package/lean/luci-app-familycloud/luasrc/model/cbi/familycloud/familycloud_log.lua
+++ /dev/null
@@ -1,14 +0,0 @@
-local fs = require "nixio.fs"
-local conffile = "/tmp/familycloud.log"
-
-f = SimpleForm("logview")
-
-t = f:field(TextValue, "conf")
-t.rmempty = true
-t.rows = 15
-function t.cfgvalue()
- return fs.readfile(conffile) or ""
-end
-t.readonly = "readonly"
-
-return f
diff --git a/package/lean/luci-app-familycloud/luasrc/view/familycloud/familycloud_status.htm b/package/lean/luci-app-familycloud/luasrc/view/familycloud/familycloud_status.htm
deleted file mode 100644
index b2ae91c91..000000000
--- a/package/lean/luci-app-familycloud/luasrc/view/familycloud/familycloud_status.htm
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-familycloud/root/etc/config/familycloud b/package/lean/luci-app-familycloud/root/etc/config/familycloud
deleted file mode 100644
index 55b9172ee..000000000
--- a/package/lean/luci-app-familycloud/root/etc/config/familycloud
+++ /dev/null
@@ -1,6 +0,0 @@
-
-config familycloud
- option token 'f18a1952289c4659b3722ad54d3d92fb'
- option speedertype 'CloudDisk'
- option enabled '0'
-
diff --git a/package/lean/luci-app-familycloud/root/etc/init.d/familycloud b/package/lean/luci-app-familycloud/root/etc/init.d/familycloud
deleted file mode 100755
index 974a81a24..000000000
--- a/package/lean/luci-app-familycloud/root/etc/init.d/familycloud
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh /etc/rc.common
-
-START=99
-STOP=10
-
-TYPE=$(uci get familycloud.@familycloud[0].speedertype)
-TOKEN=$(uci get familycloud.@familycloud[0].token)
-
-
-start()
-{
- stop
- enable=$(uci get familycloud.@familycloud[0].enabled)
- [ $enable -eq 0 ] && exit 0
- sed "s/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/$TOKEN/" /usr/share/familycloud/$TYPE/config.ini > /tmp/config.json
- bash /usr/share/familycloud/$TYPE/speedup.sh > /tmp/familycloud.log &
-}
-
-stop()
-{
- kill -9 $(ps | grep speedup.sh | grep -v grep | awk '{print $1}') >/dev/null 2>&1
-}
-
-
-
diff --git a/package/lean/luci-app-familycloud/root/etc/ppp/ip-up.d/familycloud.sh b/package/lean/luci-app-familycloud/root/etc/ppp/ip-up.d/familycloud.sh
deleted file mode 100755
index 60074df64..000000000
--- a/package/lean/luci-app-familycloud/root/etc/ppp/ip-up.d/familycloud.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-sleep 60 && /etc/init.d/familycloud restart
diff --git a/package/lean/luci-app-familycloud/root/etc/uci-defaults/luci-familycloud b/package/lean/luci-app-familycloud/root/etc/uci-defaults/luci-familycloud
deleted file mode 100755
index 0251b0eee..000000000
--- a/package/lean/luci-app-familycloud/root/etc/uci-defaults/luci-familycloud
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@familycloud[-1]
- add ucitrack familycloud
- set ucitrack.@familycloud[-1].init=familycloud
- commit ucitrack
-EOF
-
-touch /tmp/config.json
-
-ln -s /tmp/config.json /usr/share/familycloud/CloudDisk/config.json
-
-rm -f /tmp/luci-indexcache
-
-exit 0
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/config.ini b/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/config.ini
deleted file mode 100644
index 31f0d788a..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/config.ini
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "accessToken": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
- "setting": {
- "method": "GET",
- "rate": 600
- },
- "extra_header": {
- "User-Agent": "Apache-HttpClient/UNAVAILABLE (java 1.4)"
- }
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup.sh
deleted file mode 100755
index 30006574c..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env bash
-
-base_dir=`dirname $0`
-source "$base_dir/utils.sh"
-config="$base_dir/config.json"
-
-
-accessToken=`getSingleJsonValue "$config" "accessToken"`
-method=`getSingleJsonValue "$config" "method"`
-rate=`getSingleJsonValue "$config" "rate"`
-UA=`getSingleJsonValue "$config" "User-Agent"`
-extra_header="User-Agent:$UA"
-
-
-HOST="http://api.cloud.189.cn"
-LOGIN_URL="/loginByOpen189AccessToken.action"
-ACCESS_URL="/speed/startSpeedV2.action"
-count=0
-echo "*******************************************"
-while :
-do
- count=$((count+1))
- echo "Sending heart_beat package <$count>"
- split="~"
- headers_string="$extra_header"
- headers=`formatHeaderString "$split" "$headers_string"`
- result=`get "$HOST$LOGIN_URL?accessToken=$accessToken" "$headers"`
- session_key=`echo "$result" | grep -Eo "sessionKey>.*//'`
- session_secret=`echo "$result" | grep -Eo "sessionSecret>.*//' | sed 's/<\/sessionSecret//'`
- date=`env LANG=C.UTF-8 date -u '+%a, %d %b %Y %T GMT'`
- data="SessionKey=$session_key&Operate=$method&RequestURI=$ACCESS_URL&Date=$date"
- key="$session_secret"
- signature=`hashHmac "sha1" "$data" "$key"`
- headers_string="SessionKey:$session_key"${split}"Signature:$signature"${split}"Date:$date"${split}"$extra_header"
- headers=`formatHeaderString "$split" "$headers_string"`
- qosClientSn="$session_key"
- result=`get "$HOST$ACCESS_URL?qosClientSn=$qosClientSn" "$headers"`
- echo "heart_beat:"
- echo "date:<$date>"
- echo -e "response:\n$result"
- [[ "`echo ${result} | grep dialAcc`" != "" ]] && hint="succeeded" || hint="failed"
- echo "Sending heart_beat package <$count> $hint"
- echo "*******************************************"
- sleep ${rate}
-done
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup_router.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup_router.sh
deleted file mode 100644
index 769b5a63b..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/speedup_router.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env bash
-
-base_dir=`dirname $0`
-source "$base_dir/utils.sh"
-config="$base_dir/config.json"
-
-echoToLogreader() {
- logger -t CloudDisk "${1}"
-}
-
-accessToken=`getSingleJsonValue "$config" "accessToken"`
-method=`getSingleJsonValue "$config" "method"`
-UA=`getSingleJsonValue "$config" "User-Agent"`
-extra_header="User-Agent:$UA"
-
-
-HOST="http://api.cloud.189.cn"
-LOGIN_URL="/loginByOpen189AccessToken.action"
-ACCESS_URL="/speed/startSpeedV2.action"
-echoToLogreader "*******************************************"
-echoToLogreader "Sending Heartbeat Package ..."
-split="~"
-headers_string="$extra_header"
-headers=`formatHeaderString "$split" "$headers_string"`
-result=`get "$HOST$LOGIN_URL?accessToken=$accessToken" "$headers"`
-session_key=`echo "$result" | grep -Eo "sessionKey>.*//'`
-session_secret=`echo "$result" | grep -Eo "sessionSecret>.*//' | sed 's/<\/sessionSecret//'`
-date=`env LANG=C.UTF-8 date -u '+%a, %d %b %Y %T GMT'`
-data="SessionKey=$session_key&Operate=$method&RequestURI=$ACCESS_URL&Date=$date"
-key="$session_secret"
-signature=`hashHmac "sha1" "$data" "$key"`
-headers_string="SessionKey:$session_key"${split}"Signature:$signature"${split}"Date:$date"${split}"$extra_header"
-headers=`formatHeaderString "$split" "$headers_string"`
-qosClientSn=`cat /proc/sys/kernel/random/uuid`
-result=`get "$HOST$ACCESS_URL?qosClientSn=$qosClientSn" "$headers"`
-echoToLogreader "Heartbeat Signature: $signature"
-echoToLogreader "Date: $date"
-echoToLogreader "Response: $result"
-[[ "`echo ${result} | grep dialAcc`" != "" ]] && hint="succeeded" || hint="failed"
-echoToLogreader "Heartbeating $hint."
-echoToLogreader "*******************************************"
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/utils.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/utils.sh
deleted file mode 100755
index d54e1e3a9..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/CloudDisk/utils.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env bash
-
-CONNECTION_TIME="15"
-TRANSMISSION_TIME="15"
-
-formatHeaderString() {
- OLD_IFS=$IFS
- IFS="$1"
- STR="$2"
- ARRAY=(${STR})
- for i in "${!ARRAY[@]}"
- do
- HEADERS="$HEADERS -H '${ARRAY[$i]}'"
- done
- echo ${HEADERS} | sed 's/^ //'
- IFS=${OLD_IFS}
-}
-
-get() {
- HEADER="$1"
- URL="$2"
- eval curl -s --connect-timeout "${CONNECTION_TIME}" -m "${TRANSMISSION_TIME}" "${HEADER}" "${URL}"
-}
-
-post() {
- HEADER="$1"
- URL="$2"
- PAYLOAD="$3"
- eval curl -s --connect-timeout "${CONNECTION_TIME}" -m "${TRANSMISSION_TIME}" -X POST "${URL}" "${HEADER}" -w %{http_code} -d "'$PAYLOAD'"
-}
-
-getSingleJsonValue() {
- FILE="$1"
- KEY="$2"
- cat ${FILE} | grep "$KEY" | sed 's/,$//' | awk -F "[:]" '{ print $2 }' | sed 's/ //g' | sed 's/"//g'
-}
-
-hashHmac() {
- digest="$1"
- data="$2"
- key="$3"
- echo -n "$data" | openssl dgst "-$digest" -hmac "$key" | sed -e 's/^.* //' | tr 'a-z' 'A-Z'
-}
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.ini b/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.ini
deleted file mode 100644
index f83cab7d1..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.ini
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "accessToken": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
- "AppKey": "600100885",
- "setting": {
- "method": "POST",
- "rate": 600
-},
- "send_data": {
- "prodCode": "76"
- },
- "extra_header": {
- "User-Agent": "Apache-HttpClient/UNAVAILABLE (java 1.4)"
- }
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.json b/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.json
deleted file mode 120000
index 35e21f5b3..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/config.json
+++ /dev/null
@@ -1 +0,0 @@
-/tmp/config.json
\ No newline at end of file
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup.sh
deleted file mode 100755
index d7ade60cf..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env bash
-
-base_dir=`dirname $0`
-source "$base_dir/utils.sh"
-config="$base_dir/config.json"
-
-
-accessToken=`getSingleJsonValue "$config" "accessToken"`
-AppKey=`getSingleJsonValue "$config" "AppKey"`
-method=`getSingleJsonValue "$config" "method"`
-rate=`getSingleJsonValue "$config" "rate"`
-prodCode=`getSingleJsonValue "$config" "prodCode"`
-UA=`getSingleJsonValue "$config" "User-Agent"`
-extra_header="User-Agent:$UA"
-
-
-HOST="http://api.cloud.189.cn"
-LOGIN_URL="/login4MergedClient.action"
-ACCESS_URL="/family/qos/startQos.action"
-count=0
-echo "*******************************************"
-while :
-do
- count=$((count+1))
- echo "Sending heart_beat package <$count>"
- split="~"
- headers_string="AppKey:$AppKey"${split}"$extra_header"
- headers=`formatHeaderString "$split" "$headers_string"`
- result=`post "$headers" "$HOST$LOGIN_URL?accessToken=$accessToken"`
- session_key=`echo "$result" | grep -Eo "familySessionKey>.+//' | sed 's/<\/familySessionKey//'`
- session_secret=`echo "$result" | grep -Eo "familySessionSecret>.+//' | sed 's/<\/familySessionSecret//'`
- date=`env LANG=C.UTF-8 date -u '+%a, %d %b %Y %T GMT'`
- data="SessionKey=$session_key&Operate=$method&RequestURI=$ACCESS_URL&Date=$date"
- key="$session_secret"
- signature=`hashHmac "sha1" "$data" "$key"`
- headers_string="SessionKey:$session_key"${split}"Signature:$signature"${split}"Date:$date"${split}"$extra_header"
- headers=`formatHeaderString "$split" "$headers_string"`
- send_data="prodCode=$prodCode"
- result=`post "$headers" "$HOST$ACCESS_URL" "$send_data"`
- echo "heart_beat:"
- echo "date:<$date>"
- echo "status_code:${result: -3}"
- echo -e "response:\n`echo ${result} | sed "s^[0-9]\{3\}$^^"`"
- [[ "`echo ${result} | grep dialAcc`" != "" ]] && hint="succeeded" || hint="failed"
- echo "Sending heart_beat package <$count> $hint"
- echo "*******************************************"
- sleep ${rate}
-done
\ No newline at end of file
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup_router.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup_router.sh
deleted file mode 100644
index 721023b0c..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/speedup_router.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env bash
-
-base_dir=`dirname $0`
-source "$base_dir/utils.sh"
-config="$base_dir/config.json"
-
-echoToLogreader() {
- logger -t FamilyCloud "${1}"
-}
-
-accessToken=`getSingleJsonValue "$config" "accessToken"`
-AppKey=`getSingleJsonValue "$config" "AppKey"`
-method=`getSingleJsonValue "$config" "method"`
-prodCode=`getSingleJsonValue "$config" "prodCode"`
-UA=`getSingleJsonValue "$config" "User-Agent"`
-extra_header="User-Agent:$UA"
-
-
-HOST="http://api.cloud.189.cn"
-LOGIN_URL="/login4MergedClient.action"
-ACCESS_URL="/family/qos/startQos.action"
-echoToLogreader "*******************************************"
-echoToLogreader "Sending Heartbeat Package ..."
-split="~"
-headers_string="AppKey:$AppKey"${split}"$extra_header"
-headers=`formatHeaderString "$split" "$headers_string"`
-result=`post "$headers" "$HOST$LOGIN_URL?accessToken=$accessToken"`
-session_key=`echo "$result" | grep -Eo "familySessionKey>.+//' | sed 's/<\/familySessionKey//'`
-session_secret=`echo "$result" | grep -Eo "familySessionSecret>.+//' | sed 's/<\/familySessionSecret//'`
-date=`env LANG=C.UTF-8 date -u '+%a, %d %b %Y %T GMT'`
-data="SessionKey=$session_key&Operate=$method&RequestURI=$ACCESS_URL&Date=$date"
-key="$session_secret"
-signature=`hashHmac "sha1" "$data" "$key"`
-headers_string="SessionKey:$session_key"${split}"Signature:$signature"${split}"Date:$date"${split}"$extra_header"
-headers=`formatHeaderString "$split" "$headers_string"`
-send_data="prodCode=$prodCode"
-result=`post "$headers" "$HOST$ACCESS_URL" "$send_data"`
-echoToLogreader "Heartbeat Signature: $signature"
-echoToLogreader "Date: $date"
-echoToLogreader "Response: ${result}"
-[[ "`echo ${result} | grep dialAcc`" != "" ]] && hint="succeeded" || hint="failed"
-echoToLogreader "Heartbeating $hint."
-echoToLogreader "*******************************************"
diff --git a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/utils.sh b/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/utils.sh
deleted file mode 100755
index d54e1e3a9..000000000
--- a/package/lean/luci-app-familycloud/root/usr/share/familycloud/FamilyCloud/utils.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env bash
-
-CONNECTION_TIME="15"
-TRANSMISSION_TIME="15"
-
-formatHeaderString() {
- OLD_IFS=$IFS
- IFS="$1"
- STR="$2"
- ARRAY=(${STR})
- for i in "${!ARRAY[@]}"
- do
- HEADERS="$HEADERS -H '${ARRAY[$i]}'"
- done
- echo ${HEADERS} | sed 's/^ //'
- IFS=${OLD_IFS}
-}
-
-get() {
- HEADER="$1"
- URL="$2"
- eval curl -s --connect-timeout "${CONNECTION_TIME}" -m "${TRANSMISSION_TIME}" "${HEADER}" "${URL}"
-}
-
-post() {
- HEADER="$1"
- URL="$2"
- PAYLOAD="$3"
- eval curl -s --connect-timeout "${CONNECTION_TIME}" -m "${TRANSMISSION_TIME}" -X POST "${URL}" "${HEADER}" -w %{http_code} -d "'$PAYLOAD'"
-}
-
-getSingleJsonValue() {
- FILE="$1"
- KEY="$2"
- cat ${FILE} | grep "$KEY" | sed 's/,$//' | awk -F "[:]" '{ print $2 }' | sed 's/ //g' | sed 's/"//g'
-}
-
-hashHmac() {
- digest="$1"
- data="$2"
- key="$3"
- echo -n "$data" | openssl dgst "-$digest" -hmac "$key" | sed -e 's/^.* //' | tr 'a-z' 'A-Z'
-}
diff --git a/package/lean/luci-app-filetransfer/Makefile b/package/lean/luci-app-filetransfer/Makefile
deleted file mode 100644
index 6c8aa5a49..000000000
--- a/package/lean/luci-app-filetransfer/Makefile
+++ /dev/null
@@ -1,16 +0,0 @@
-#
-# Copyright (C) 2008-2014 The LuCI Team
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=LuCI page for IPK upload
-LUCI_DEPENDS:=+luci-lib-fs
-PKG_VERSION:=1
-PKG_RELEASE:=2
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-filetransfer/luasrc/controller/filetransfer.lua b/package/lean/luci-app-filetransfer/luasrc/controller/filetransfer.lua
deleted file mode 100644
index d79806928..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/controller/filetransfer.lua
+++ /dev/null
@@ -1,13 +0,0 @@
---[[
-luci-app-filetransfer
-Description: File upload / download
-Author: yuleniwo xzm2@qq.com QQ:529698939
-Modify: ayongwifi@126.com www.openwrtdl.com
-]]--
-
-module("luci.controller.filetransfer", package.seeall)
-
-function index()
-
- entry({"admin", "system", "filetransfer"}, form("filetransfer"), _("FileTransfer"), 89)
-end
diff --git a/package/lean/luci-app-filetransfer/luasrc/model/cbi/filetransfer.lua b/package/lean/luci-app-filetransfer/luasrc/model/cbi/filetransfer.lua
deleted file mode 100644
index 0317b48c2..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/model/cbi/filetransfer.lua
+++ /dev/null
@@ -1,158 +0,0 @@
-local fs = require "luci.fs"
-local http = luci.http
-
-ful = SimpleForm("upload", translate("Upload"), nil)
-ful.reset = false
-ful.submit = false
-
-sul = ful:section(SimpleSection, "", translate("Upload file to '/tmp/upload/'"))
-fu = sul:option(FileUpload, "")
-fu.template = "filetransfer/other_upload"
-um = sul:option(DummyValue, "", nil)
-um.template = "filetransfer/other_dvalue"
-
-fdl = SimpleForm("download", translate("Download"), nil)
-fdl.reset = false
-fdl.submit = false
-sdl = fdl:section(SimpleSection, "", translate("Download file"))
-fd = sdl:option(FileUpload, "")
-fd.template = "filetransfer/other_download"
-dm = sdl:option(DummyValue, "", nil)
-dm.template = "filetransfer/other_dvalue"
-
-function Download()
- local sPath, sFile, fd, block
- sPath = http.formvalue("dlfile")
- sFile = nixio.fs.basename(sPath)
- if luci.fs.isdirectory(sPath) then
- fd = io.popen('tar -C "%s" -cz .' % {sPath}, "r")
- sFile = sFile .. ".tar.gz"
- else
- fd = nixio.open(sPath, "r")
- end
- if not fd then
- dm.value = translate("Couldn't open file: ") .. sPath
- return
- end
- dm.value = nil
- http.header('Content-Disposition', 'attachment; filename="%s"' % {sFile})
- http.prepare_content("application/octet-stream")
- while true do
- block = fd:read(nixio.const.buffersize)
- if (not block) or (#block ==0) then
- break
- else
- http.write(block)
- end
- end
- fd:close()
- http.close()
-end
-
-local dir, fd
-dir = "/tmp/upload/"
-nixio.fs.mkdir(dir)
-http.setfilehandler(
- function(meta, chunk, eof)
- if not fd then
- if not meta then return end
-
- if meta and chunk then fd = nixio.open(dir .. meta.file, "w") end
-
- if not fd then
- um.value = translate("Create upload file error.")
- return
- end
- end
- if chunk and fd then
- fd:write(chunk)
- end
- if eof and fd then
- fd:close()
- fd = nil
- um.value = translate("File saved to") .. ' "/tmp/upload/' .. meta.file .. '"'
- end
- end
-)
-
-if luci.http.formvalue("upload") then
-
-
- local f = luci.http.formvalue("ulfile")
- if #f <= 0 then
- um.value = translate("No specify upload file.")
- end
-elseif luci.http.formvalue("download") then
- Download()
-end
-
-local function getSizeStr(size)
- local i = 0
- local byteUnits = {' kB', ' MB', ' GB', ' TB'}
- repeat
- size = size / 1024
- i = i + 1
- until(size <= 1024)
- return string.format("%.1f", size) .. byteUnits[i]
-end
-
-local inits, attr = {}
-for i, f in ipairs(fs.glob("/tmp/upload/*")) do
- attr = fs.stat(f)
- if attr then
- inits[i] = {}
- inits[i].name = fs.basename(f)
- inits[i].mtime = os.date("%Y-%m-%d %H:%M:%S", attr.mtime)
- inits[i].modestr = attr.modestr
- inits[i].size = getSizeStr(attr.size)
- inits[i].remove = 0
- inits[i].install = false
- end
-end
-
-form = SimpleForm("filelist", translate("Upload file list"), nil)
-form.reset = false
-form.submit = false
-
-tb = form:section(Table, inits)
-nm = tb:option(DummyValue, "name", translate("File name"))
-mt = tb:option(DummyValue, "mtime", translate("Modify time"))
-ms = tb:option(DummyValue, "modestr", translate("Attributes"))
-sz = tb:option(DummyValue, "size", translate("Size"))
-btnrm = tb:option(Button, "remove", translate("Remove"))
-btnrm.render = function(self, section, scope)
- self.inputstyle = "remove"
- Button.render(self, section, scope)
-end
-
-btnrm.write = function(self, section)
- local v = luci.fs.unlink("/tmp/upload/" .. luci.fs.basename(inits[section].name))
- if v then table.remove(inits, section) end
- return v
-end
-
-function IsIpkFile(name)
- name = name or ""
- local ext = string.lower(string.sub(name, -4, -1))
- return ext == ".ipk"
-end
-
-btnis = tb:option(Button, "install", translate("Install"))
-btnis.template = "filetransfer/other_button"
-btnis.render = function(self, section, scope)
- if not inits[section] then return false end
- if IsIpkFile(inits[section].name) then
- scope.display = ""
- else
- scope.display = "none"
- end
- self.inputstyle = "apply"
- Button.render(self, section, scope)
-end
-
-btnis.write = function(self, section)
- local r = luci.sys.exec(string.format('opkg --force-depends install "/tmp/upload/%s"', inits[section].name))
- form.description = string.format('%s', r)
-end
-
-return ful, fdl, form
diff --git a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_button.htm b/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_button.htm
deleted file mode 100644
index 1c391ad98..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_button.htm
+++ /dev/null
@@ -1,7 +0,0 @@
-<%+cbi/valueheader%>
- <% if self:cfgvalue(section) ~= false then %>
- " style="display: <%= display %>" type="submit"<%= attr("name", cbid) .. attr("id", cbid) .. attr("value", self.inputtitle or self.title)%> />
- <% else %>
- -
- <% end %>
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_download.htm b/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_download.htm
deleted file mode 100644
index c14728632..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_download.htm
+++ /dev/null
@@ -1,5 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_dvalue.htm b/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_dvalue.htm
deleted file mode 100644
index 296c61e4d..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_dvalue.htm
+++ /dev/null
@@ -1,8 +0,0 @@
-<%+cbi/valueheader%>
-
-<%
- local val = self:cfgvalue(section) or self.default or ""
- write(pcdata(val))
-%>
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_upload.htm b/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_upload.htm
deleted file mode 100644
index bb56f444f..000000000
--- a/package/lean/luci-app-filetransfer/luasrc/view/filetransfer/other_upload.htm
+++ /dev/null
@@ -1,5 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-filetransfer/po/zh-cn/filetransfer.po b/package/lean/luci-app-filetransfer/po/zh-cn/filetransfer.po
deleted file mode 100644
index 925a47aab..000000000
--- a/package/lean/luci-app-filetransfer/po/zh-cn/filetransfer.po
+++ /dev/null
@@ -1,66 +0,0 @@
-msgid ""
-msgstr ""
-"Content-Type: text/plain; charset=UTF-8\n"
-"Project-Id-Version: \n"
-"POT-Creation-Date: \n"
-"PO-Revision-Date: \n"
-"Last-Translator: dingpengyu \n"
-"Language-Team: \n"
-"MIME-Version: 1.0\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Language: zh_CN\n"
-"X-Generator: Poedit 2.3.1\n"
-
-msgid "Choose local file:"
-msgstr "选择本地文件:"
-
-msgid "Couldn't open file:"
-msgstr "无法打开文件:"
-
-msgid "Create upload file error."
-msgstr "创建上传文件失败。"
-
-msgid "Download"
-msgstr "下载"
-
-msgid "Download file"
-msgstr "下载文件"
-
-msgid "File name"
-msgstr "文件名"
-
-msgid "File saved to"
-msgstr "文件保存到"
-
-msgid "FileTransfer"
-msgstr "文件传输"
-
-msgid "Install"
-msgstr "安装"
-
-msgid "Attributes"
-msgstr "属性"
-
-msgid "Modify time"
-msgstr "修改时间"
-
-msgid "No specify upload file."
-msgstr "未指定上传文件。"
-
-msgid "Path on Route:"
-msgstr "路由根目录:"
-
-msgid "Remove"
-msgstr "移除"
-
-msgid "Size"
-msgstr "大小"
-
-msgid "Upload"
-msgstr "上传"
-
-msgid "Upload file list"
-msgstr "上传文件列表"
-
-msgid "Upload file to '/tmp/upload/'"
-msgstr "将文件上传到'/tmp/upload/'"
diff --git a/package/lean/luci-app-filetransfer/root/etc/uci-defaults/luci-filetransfer b/package/lean/luci-app-filetransfer/root/etc/uci-defaults/luci-filetransfer
deleted file mode 100755
index 935d7c8be..000000000
--- a/package/lean/luci-app-filetransfer/root/etc/uci-defaults/luci-filetransfer
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-
-sed -i 's/cbi.submit\"] = true/cbi.submit\"] = \"1\"/g' /usr/lib/lua/luci/dispatcher.lua
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-frpc/Makefile b/package/lean/luci-app-frpc/Makefile
deleted file mode 100644
index 470303a6e..000000000
--- a/package/lean/luci-app-frpc/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2008-2014 The LuCI Team
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=LuCI for FRPC
-LUCI_DEPENDS:=+wget +frpc
-LUCI_PKGARCH:=all
-PKG_VERSION:=1.4
-PKG_RELEASE:=2
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-frpc/luasrc/controller/frp.lua b/package/lean/luci-app-frpc/luasrc/controller/frp.lua
deleted file mode 100644
index 59ee2ddfc..000000000
--- a/package/lean/luci-app-frpc/luasrc/controller/frp.lua
+++ /dev/null
@@ -1,18 +0,0 @@
-module("luci.controller.frp", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/frp") then
- return
- end
-
- entry({"admin", "services", "frp"}, cbi("frp/basic"), _("Frp Setting"), 100).dependent = true
- entry({"admin", "services", "frp", "config"}, cbi("frp/config")).leaf = true
- entry({"admin", "services", "frp", "status"}, call("act_status")).leaf = true
-end
-
-function act_status()
- local e = {}
- e.running = luci.sys.call("pidof frpc > /dev/null") == 0
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-frpc/luasrc/model/cbi/frp/basic.lua b/package/lean/luci-app-frpc/luasrc/model/cbi/frp/basic.lua
deleted file mode 100644
index b5d2de20e..000000000
--- a/package/lean/luci-app-frpc/luasrc/model/cbi/frp/basic.lua
+++ /dev/null
@@ -1,237 +0,0 @@
-local o = require"luci.dispatcher"
-local e = require("luci.model.ipkg")
-local s = require"nixio.fs"
-local e = luci.model.uci.cursor()
-local i = "frp"
-local a,t,e
-local n = {}
-
-a = Map("frp")
-a.title = translate("Frp Setting")
-a.description = translate("Frp is a fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.")
-
-a:section(SimpleSection).template="frp/frp_status"
-
-t = a:section(NamedSection, "common","frp")
-t.anonymous = true
-t.addremove = false
-
-t:tab("base", translate("Basic Settings"))
-t:tab("other", translate("Other Settings"))
-t:tab("log", translate("Client Log"))
-
-e = t:taboption("base", Flag, "enabled", translate("Enabled"))
-e.rmempty = false
-
-e = t:taboption("base", Value, "server_addr", translate("Server"))
-e.optional = false
-e.rmempty = false
-
-e = t:taboption("base", Value, "server_port", translate("Port"))
-e.datatype = "port"
-e.optional = false
-e.rmempty = false
-
-e = t:taboption("base", Value, "token", translate("Token"))
-e.description = translate("Time duration between server of frpc and frps mustn't exceed 15 minutes.")
-e.optional = false
-e.password = true
-e.rmempty = false
-
-e = t:taboption("base", Value, "user", translate("User"))
-e.description = translate("Commonly used to distinguish you with other clients.")
-e.optional = true
-e.default = ""
-e.rmempty = false
-
-e = t:taboption("base", Value, "vhost_http_port", translate("Vhost HTTP Port"))
-e.datatype = "port"
-e.rmempty = false
-
-e = t:taboption("base", Value, "vhost_https_port", translate("Vhost HTTPS Port"))
-e.datatype = "port"
-e.rmempty = false
-
-e = t:taboption("base", Value, "time", translate("Service registration interval"))
-e.description = translate("0 means disable this feature, unit: min")
-e.datatype = "range(0,59)"
-e.default = 30
-e.rmempty = false
-
-e = t:taboption("other", Flag, "login_fail_exit", translate("Exit program when first login failed"))
-e.description = translate("decide if exit program when first login failed, otherwise continuous relogin to frps.")
-e.default = "1"
-e.rmempty = false
-
-e = t:taboption("other", Flag, "tcp_mux", translate("TCP Stream Multiplexing"))
-e.description = translate("Default is Ture. This feature in frps.ini and frpc.ini must be same.")
-e.default = "1"
-e.rmempty = false
-
-e = t:taboption("other", Flag, "tls_enable", translate("Use TLS Connection"))
-e.description = translate("if tls_enable is true, frpc will connect frps by tls.")
-e.default = "0"
-e.rmempty = false
-
-e = t:taboption("other", ListValue, "protocol", translate("Protocol Type"))
-e.description = translate("Frp support kcp protocol since v0.12.0")
-e.default = "tcp"
-e:value("tcp", translate("TCP Protocol"))
-e:value("kcp", translate("KCP Protocol"))
-
-e = t:taboption("other", Flag, "enable_http_proxy", translate("Connect frps by HTTP PROXY"))
-e.description = translate("frpc can connect frps using HTTP PROXY")
-e.default = "0"
-e.rmempty = false
-e:depends("protocol","tcp")
-
-e = t:taboption("other", Value, "http_proxy", translate("HTTP PROXY"))
-e.placeholder = "http://user:pwd@192.168.1.128:8080"
-e:depends("enable_http_proxy",1)
-e.optional = false
-
-e = t:taboption("other", Flag, "enable_cpool", translate("Enable Connection Pool"))
-e.description = translate("This feature is fit for a large number of short connections.")
-e.rmempty = false
-
-e = t:taboption("other", Value, "pool_count", translate("Connection Pool"))
-e.description = translate("Connections will be established in advance.")
-e.datatype = "uinteger"
-e.default = "1"
-e:depends("enable_cpool",1)
-e.optional=false
-
-e = t:taboption("other", ListValue, "log_level", translate("Log Level"))
-e.default = "warn"
-e:value("trace", translate("Trace"))
-e:value("debug", translate("Debug"))
-e:value("info", translate("Info"))
-e:value("warn", translate("Warning"))
-e:value("error", translate("Error"))
-
-e = t:taboption("other", Value, "log_max_days", translate("Log Keepd Max Days"))
-e.datatype = "uinteger"
-e.default = "3"
-e.rmempty = false
-e.optional = false
-
-e = t:taboption("other", Flag, "admin_enable", translate("Enable Web API"))
-e.description = translate("set admin address for control frpc's action by http api such as reload.")
-e.default = "0"
-e.rmempty=false
-
-e = t:taboption("other", Value, "admin_port", translate("Admin Web Port"))
-e.datatype = "port"
-e.default = 7400
-e:depends("admin_enable",1)
-
-e = t:taboption("other", Value, "admin_user", translate("Admin Web UserName"))
-e.optional = false
-e.default = "admin"
-e:depends("admin_enable",1)
-
-e = t:taboption("other", Value, "admin_pwd", translate("Admin Web PassWord"))
-e.optional = false
-e.default = "admin"
-e.password = true
-e:depends("admin_enable",1)
-
-e = t:taboption("log", TextValue,"log")
-e.rows = 26
-e.wrap = "off"
-e.readonly = true
-e.cfgvalue = function(t,t)
-return s.readfile("/var/etc/frp/frpc.log")or""
-end
-e.write = function(e,e,e)
-end
-
-t = a:section(TypedSection, "proxy", translate("Services List"))
-t.anonymous = true
-t.addremove = true
-t.template = "cbi/tblsection"
-t.extedit = o.build_url("admin","services","frp","config","%s")
-
-function t.create(e,t)
-new = TypedSection.create(e,t)
-luci.http.redirect(e.extedit:format(new))
-end
-
-function t.remove(e,t)
-e.map.proceed = true
-e.map:del(t)
-luci.http.redirect(o.build_url("admin","services","frp"))
-end
-
-local o = ""
-e = t:option(DummyValue, "remark", translate("Service Remark Name"))
-e.width="10%"
-
-e = t:option(DummyValue, "type", translate("Frp Protocol Type"))
-e.width="10%"
-
-e = t:option(DummyValue, "custom_domains", translate("Domain/Subdomain"))
-e.width="20%"
-
-e.cfgvalue = function(t,n)
-local t = a.uci:get(i,n,"domain_type")or""
-local m = a.uci:get(i,n,"type")or""
-if t=="custom_domains" then
-local b = a.uci:get(i,n,"custom_domains")or"" return b end
-if t=="subdomain" then
-local b = a.uci:get(i,n,"subdomain")or"" return b end
-if t=="both_dtype" then
-local b = a.uci:get(i,n,"custom_domains")or""
-local c = a.uci:get(i,n,"subdomain")or""
-b="%s/%s"%{b,c} return b end
-if m=="tcp" or m=="udp" then
-local b=a.uci:get(i,"common","server_addr")or"" return b end
-end
-
-e = t:option(DummyValue,"remote_port",translate("Remote Port"))
-e.width = "10%"
-e.cfgvalue = function(t,b)
-local t = a.uci:get(i,b,"type")or""
-if t==""or b==""then return""end
-if t=="http" then
-local b = a.uci:get(i,"common","vhost_http_port")or"" return b end
-if t=="https" then
-local b = a.uci:get(i,"common","vhost_https_port")or"" return b end
-if t=="tcp" or t=="udp" then
-local b = a.uci:get(i,b,"remote_port")or"" return b end
-end
-
-e = t:option(DummyValue, "local_ip", translate("Local Host Address"))
-e.width = "15%"
-
-e = t:option(DummyValue, "local_port", translate("Local Host Port"))
-e.width = "10%"
-
-e = t:option(DummyValue, "use_encryption", translate("Use Encryption"))
-e.width = "15%"
-
-e.cfgvalue = function(t,n)
-local t=a.uci:get(i,n,"use_encryption")or""
-local b
-if t==""or b==""then return""end
-if t=="1" then b="ON"
-else b="OFF" end
-return b
-end
-
-e = t:option(DummyValue, "use_compression", translate("Use Compression"))
-e.width = "15%"
-e.cfgvalue = function(t,n)
-local t = a.uci:get(i,n,"use_compression")or""
-local b
-if t==""or b==""then return""end
-if t=="1" then b="ON"
-else b="OFF" end
-return b
-end
-
-e = t:option(Flag, "enable", translate("Enable State"))
-e.width = "10%"
-e.rmempty = false
-
-return a
diff --git a/package/lean/luci-app-frpc/luasrc/model/cbi/frp/config.lua b/package/lean/luci-app-frpc/luasrc/model/cbi/frp/config.lua
deleted file mode 100644
index 2a0a7b350..000000000
--- a/package/lean/luci-app-frpc/luasrc/model/cbi/frp/config.lua
+++ /dev/null
@@ -1,203 +0,0 @@
-local n = "frp"
-local i = require"luci.dispatcher"
-local o = require"luci.model.network".init()
-local m = require"nixio.fs"
-local a,t,e
-
-arg[1] = arg[1]or""
-
-a = Map("frp")
-a.title = translate("Frp Domain Config")
-a.redirect = i.build_url("admin","services","frp")
-
-t = a:section(NamedSection, arg[1], "frp")
-t.title = translate("Config Frp Protocol")
-t.addremove = false
-t.dynamic = false
-
-t:tab("base", translate("Basic Settings"))
-t:tab("other", translate("Other Settings"))
-
-e = t:taboption("base", ListValue,"enable", translate("Enable State"))
-e.default = "1"
-e.rmempty = false
-e:value("1", translate("Enable"))
-e:value("0", translate("Disable"))
-
-e = t:taboption("base", ListValue, "type", translate("Frp Protocol Type"))
-e:value("http", translate("HTTP"))
-e:value("https", translate("HTTPS"))
-e:value("tcp", translate("TCP"))
-e:value("udp", translate("UDP"))
-e:value("stcp", translate("STCP"))
-
-e = t:taboption("base", ListValue, "domain_type", translate("Domain Type"))
-e.default = "custom_domains"
-e:value("custom_domains", translate("Custom Domains"))
-e:value("subdomain", translate("SubDomain"))
-e:value("both_dtype", translate("Both the above two Domain types"))
-e:depends("type","http")
-e:depends("type","https")
-
-e = t:taboption("base", Value, "custom_domains", translate("Custom Domains"))
-e.description = translate("If SubDomain is used, Custom Domains couldn't be subdomain or wildcard domain of the maindomain(subdomain_host).")
-e:depends("domain_type","custom_domains")
-e:depends("domain_type","both_dtype")
-
-e = t:taboption("base", Value, "subdomain", translate("SubDomain"))
-e.description = translate("subdomain_host must be configured in server: frps in advance.")
-e:depends("domain_type","subdomain")
-e:depends("domain_type","both_dtype")
-
-e = t:taboption("base", ListValue, "stcp_role", translate("STCP Role"))
-e.default = "server"
-e:value("server", translate("STCP Server"))
-e:value("visitor", translate("STCP Vistor"))
-e:depends("type","stcp")
-
-e = t:taboption("base", Value, "remote_port", translate("Remote Port"))
-e.datatype = "port"
-e:depends("type","tcp")
-e:depends("type","udp")
-
-e = t:taboption("other", Flag, "enable_plugin", translate("Use Plugin"))
-e.description = translate("If plugin is defined, local_ip and local_port is useless, plugin will handle connections got from frps.")
-e.default = "0"
-e:depends("type","tcp")
-
-e = t:taboption("base", Value, "local_ip", translate("Local Host Address"))
-luci.sys.net.ipv4_hints(function(x,d)
-e:value(x,"%s (%s)"%{x,d})
-end)
-e.datatype = "ip4addr"
-e:depends("type","udp")
-e:depends("type","http")
-e:depends("type","https")
-e:depends("enable_plugin",0)
-
-e = t:taboption("base", Value, "local_port", translate("Local Host Port"))
-e.datatype = "port"
-e:depends("type","udp")
-e:depends("type","http")
-e:depends("type","https")
-e:depends("enable_plugin",0)
-
-e = t:taboption("base", Value, "stcp_secretkey", translate("STCP Screct Key"))
-e.default = "abcdefg"
-e:depends("type","stcp")
-
-e = t:taboption("base", Value, "stcp_servername", translate("STCP Server Name"))
-e.description = translate("STCP Server Name is Service Remark Name of STCP Server")
-e.default = "secret_tcp"
-e:depends("stcp_role","visitor")
-
-e = t:taboption("other", Flag, "enable_locations", translate("Enable URL routing"))
-e.description = translate("Frp support forward http requests to different backward web services by url routing.")
-e:depends("type","http")
-
-e = t:taboption("other", Value, "locations ", translate("URL routing"))
-e.description = translate("Http requests with url prefix /news will be forwarded to this service.")
-e.default = "locations=/"
-e:depends("enable_locations",1)
-
-e = t:taboption("other", ListValue, "plugin", translate("Choose Plugin"))
-e:value("http_proxy", translate("http_proxy"))
-e:value("socks5", translate("socks5"))
-e:value("unix_domain_socket", translate("unix_domain_socket"))
-e:depends("enable_plugin",1)
-
-e = t:taboption("other", Flag, "enable_plugin_httpuserpw", translate("Proxy Authentication"))
-e.description = translate("Other PCs could access the Internet through frpc's network by using http_proxy plugin.")
-e.default = "0"
-e:depends("plugin","http_proxy")
-
-e = t:taboption("other", Value, "plugin_http_user", translate("HTTP Proxy UserName"))
-e.default = "abc"
-e:depends("enable_plugin_httpuserpw",1)
-
-e = t:taboption("other", Value, "plugin_http_passwd", translate("HTTP Proxy Password"))
-e.default = "abc"
-e:depends("enable_plugin_httpuserpw",1)
-
-e = t:taboption("other", Value, "plugin_unix_path", translate("Plugin Unix Sock Path"))
-e.default = "/var/run/docker.sock"
-e:depends("plugin","unix_domain_socket")
-
-e = t:taboption("other", Flag, "enable_http_auth", translate("Password protecting your web service"))
-e.description = translate("Http username and password are safety certification for http protocol.")
-e.default = "0"
-e:depends("type","http")
-
-e = t:taboption("other", Value, "http_user", translate("HTTP UserName"))
-e.default = "frp"
-e:depends("enable_http_auth",1)
-
-e = t:taboption("other", Value, "http_pwd", translate("HTTP PassWord"))
-e.default = "frp"
-e:depends("enable_http_auth",1)
-
-e = t:taboption("other", Flag, "enable_host_header_rewrite", translate("Rewriting the Host Header"))
-e.description = translate("Frp can rewrite http requests with a modified Host header.")
-e.default = "0"
-e:depends("type","http")
-
-e = t:taboption("other", Value, "host_header_rewrite", translate("Host Header"))
-e.description = translate("The Host header will be rewritten to match the hostname portion of the forwarding address.")
-e.default = "dev.yourdomain.com"
-e:depends("enable_host_header_rewrite",1)
-
-e = t:taboption("other", Flag, "enable_https_plugin", translate("Use Plugin"))
-e.default = "0"
-e:depends("type","https")
-
-e = t:taboption("other", ListValue, "https_plugin", translate("Choose Plugin"))
-e.description = translate("If plugin is defined, local_ip and local_port is useless, plugin will handle connections got from frps.")
-e:value("https2http", translate("https2http"))
-e:depends("enable_https_plugin",1)
-
-e = t:taboption("other", Value, "plugin_local_addr", translate("Plugin_Local_Addr"))
-e.default="127.0.0.1:80"
-e:depends("https_plugin","https2http")
-
-e = t:taboption("other", Value, "plugin_crt_path", translate("plugin_crt_path"))
-e.default = "./server.crt"
-e:depends("https_plugin","https2http")
-
-e = t:taboption("other", Value, "plugin_key_path", translate("plugin_key_path"))
-e.default = "./server.key"
-e:depends("https_plugin","https2http")
-
-e = t:taboption("other", Value, "plugin_host_header_rewrite", translate("plugin_host_header_rewrite"))
-e.default = "127.0.0.1"
-e:depends("https_plugin","https2http")
-
-e = t:taboption("other", Value, "plugin_header_X_From_Where", translate("plugin_header_X-From-Where"))
-e.default = "frp"
-e:depends("https_plugin","https2http")
-
-e = t:taboption("base", ListValue, "proxy_protocol_version", translate("Proxy-Protocol Version"))
-e.description = translate("Proxy Protocol to send user's real IP to local services.")
-e.default = "disable"
-e:value("disable", translate("Disable"))
-e:value("v1", translate("V1"))
-e:value("v2", translate("V2"))
-e:depends("type","tcp")
-e:depends("type","stcp")
-e:depends("type","http")
-e:depends("type","https")
-
-e = t:taboption("base",Flag, "use_encryption", translate("Use Encryption"))
-e.description = translate("Encrypted the communication between frpc and frps, will effectively prevent the traffic intercepted.")
-e.default = "1"
-e.rmempty = false
-
-e = t:taboption("base",Flag, "use_compression", translate("Use Compression"))
-e.description = translate("The contents will be compressed to speed up the traffic forwarding speed, but this will consume some additional cpu resources.")
-e.default = "1"
-e.rmempty = false
-
-e = t:taboption("base",Value, "remark", translate("Service Remark Name"))
-e.description = translate("Please ensure the remark name is unique.")
-e.rmempty = false
-
-return a
diff --git a/package/lean/luci-app-frpc/luasrc/view/frp/frp_status.htm b/package/lean/luci-app-frpc/luasrc/view/frp/frp_status.htm
deleted file mode 100644
index 49b7a4957..000000000
--- a/package/lean/luci-app-frpc/luasrc/view/frp/frp_status.htm
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-frpc/po/zh-cn/frp.po b/package/lean/luci-app-frpc/po/zh-cn/frp.po
deleted file mode 100644
index 41a5586a5..000000000
--- a/package/lean/luci-app-frpc/po/zh-cn/frp.po
+++ /dev/null
@@ -1,290 +0,0 @@
-msgid ""
-msgstr "Content-Type: text/plain; charset=UTF-8"
-
-msgid "Frp Setting"
-msgstr "Frp 内网穿透"
-
-msgid "NOT RUNNING"
-msgstr "未运行"
-
-msgid "RUNNING"
-msgstr "运行中"
-
-msgid "Frp is a fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet."
-msgstr "Frp 是一个可用于内网穿透的高性能的反向代理应用。"
-
-msgid "Basic Settings"
-msgstr "基本设置"
-
-msgid "Other Settings"
-msgstr "其他设置"
-
-msgid "Client Log"
-msgstr "日志"
-
-msgid "Enabled"
-msgstr "启用"
-
-msgid "Server"
-msgstr "服务器"
-
-msgid "Port"
-msgstr "端口"
-
-msgid "Token"
-msgstr "令牌"
-
-msgid "User"
-msgstr "用户名"
-
-msgid "Commonly used to distinguish you with other clients."
-msgstr "通常用于区分你与其他客户端"
-
-msgid "Time duration between server of frpc and frps mustn't exceed 15 minutes."
-msgstr "frpc服务器与frps之间的时间间隔不得超过15分钟"
-
-msgid "Vhost HTTP Port"
-msgstr "HTTP 穿透服务端口"
-
-msgid "Vhost HTTPS Port"
-msgstr "HTTPS 穿透服务端口"
-
-msgid "Exit program when first login failed"
-msgstr "初始登录失败即退出程序"
-
-msgid "TCP Stream Multiplexing"
-msgstr "TCP 端口复用"
-
-msgid "decide if exit program when first login failed, otherwise continuous relogin to frps."
-msgstr "第一次登录失败就退出程序,否则将持续尝试登陆 Frp 服务器。"
-
-msgid "Default is Ture. This feature in frps.ini and frpc.ini must be same."
-msgstr "该功能默认启用,该配置项在服务端和客户端必须保持一致。"
-
-msgid "Use TLS Connection"
-msgstr "TLS 连接"
-
-msgid "if tls_enable is true, frpc will connect frps by tls."
-msgstr "使用 TLS 协议与服务器连接(若连接服务器异常可以尝试开启)"
-
-msgid "Protocol Type"
-msgstr "协议类型"
-
-msgid "Frp support kcp protocol since v0.12.0"
-msgstr "从 v0.12.0 版本开始,底层通信协议支持选择 kcp 协议加速。"
-
-msgid "TCP Protocol"
-msgstr "TCP 协议"
-
-msgid "KCP Protocol"
-msgstr "KCP 协议"
-
-msgid "Connect frps by HTTP PROXY"
-msgstr "通过代理连接 frps"
-
-msgid "frpc can connect frps using HTTP PROXY"
-msgstr "frpc 支持通过 HTTP PROXY 和 frps 进行通信"
-
-msgid "HTTP PROXY"
-msgstr "HTTP 代理"
-
-msgid "Enable Connection Pool"
-msgstr "启用连接池功能"
-
-msgid "This feature is fit for a large number of short connections."
-msgstr "适合有大量短连接请求时开启"
-
-msgid "Connection Pool"
-msgstr "指定预创建连接的数量"
-
-msgid "Connections will be established in advance."
-msgstr "frpc 会预先和服务端建立起指定数量的连接。"
-
-msgid "Service registration interval"
-msgstr "服务注册间隔"
-
-msgid "0 means disable this feature, unit: min"
-msgstr "0表示禁用定时注册功能,单位:分钟"
-
-msgid "Log Level"
-msgstr "日志记录等级"
-
-msgid "Trace"
-msgstr "追踪"
-
-msgid "Debug"
-msgstr "调试"
-
-msgid "Info"
-msgstr "信息"
-
-msgid "Warning"
-msgstr "警告"
-
-msgid "Error"
-msgstr "错误"
-
-msgid "Log Keepd Max Days"
-msgstr "日志记录天数"
-
-msgid "Enable Web API"
-msgstr "开启网页管理"
-
-msgid "set admin address for control frpc's action by http api such as reload."
-msgstr "可通过http查看客户端状态以及通过API控制"
-
-msgid "Admin Web Port"
-msgstr "管理员端口号"
-
-msgid "Admin Web UserName"
-msgstr "管理员用户名"
-
-msgid "Admin Web PassWord"
-msgstr "管理员密码"
-
-msgid "Services List"
-msgstr "服务列表"
-
-msgid "Service Remark Name"
-msgstr "服务备注名"
-
-msgid "Domain/Subdomain"
-msgstr "域名/子域名"
-
-msgid "Remote Port"
-msgstr "远程主机端口"
-
-msgid "Local Host Address"
-msgstr "内网主机地址"
-
-msgid "Local Host Port"
-msgstr "内网主机端口"
-
-msgid "Use Encryption"
-msgstr "开启数据加密"
-
-msgid "Use Compression"
-msgstr "使用压缩"
-
-msgid "Enable State"
-msgstr "开启状态"
-
-msgid "Frp Domain Config"
-msgstr "Frp 域名配置"
-
-msgid "Config Frp Protocol"
-msgstr "配置 Frp 协议参数"
-
-msgid "Disable"
-msgstr "关闭"
-
-msgid "Frp Protocol Type"
-msgstr "Frp 协议类型"
-
-msgid "Domain Type"
-msgstr "域名类型"
-
-msgid "Custom Domains"
-msgstr "自定义域名"
-
-msgid "SubDomain"
-msgstr "子域名"
-
-msgid "Both the above two Domain types"
-msgstr "同时使用2种域名"
-
-msgid "If SubDomain is used, Custom Domains couldn't be subdomain or wildcard domain of the maindomain(subdomain_host)."
-msgstr "如果服务端配置了主域名(subdomain_host),则自定义域名不能是属于主域名(subdomain_host) 的子域名或者泛域名。"
-
-msgid "subdomain_host must be configured in server: frps in advance."
-msgstr "使用子域名时,必须预先在服务端配置主域名(subdomain_host)参数。"
-
-msgid "STCP Role"
-msgstr "SFTP 服务类型"
-
-msgid "Use Plugin"
-msgstr "使用插件"
-
-msgid "If plugin is defined, local_ip and local_port is useless, plugin will handle connections got from frps."
-msgstr "使用插件使用插件模式时,本地 IP 地址和端口无需配置,插件将会处理来自服务端的链接请求。"
-
-msgid "STCP Screct Key"
-msgstr "SFTP 密钥"
-
-msgid "STCP Server Name"
-msgstr "SFTP 服务名称"
-
-msgid "Enable URL routing"
-msgstr "启用 URL 路由"
-
-msgid "Frp support forward http requests to different backward web services by url routing."
-msgstr "Frp 支持通过url路由将http请求转发到不同的反向web服务。"
-
-msgid "Choose Plugin"
-msgstr "选择插件"
-
-msgid "Proxy Authentication"
-msgstr "代理认证"
-
-msgid "Other PCs could access the Internet through frpc's network by using http_proxy plugin."
-msgstr "http proxy 插件,可以使其他机器通过 frpc 的网络访问互联网;开启身份验证之后需要用户名、密码才能连接到 HTTP 代理。"
-
-msgid "HTTP Proxy UserName"
-msgstr "HTTP 代理用户名"
-
-msgid "HTTP Proxy Password"
-msgstr "HTTP 代理密码"
-
-msgid "Plugin Unix Sock Path"
-msgstr "Unix Sock 插件路径"
-
-msgid "Password protecting your web service"
-msgstr "密码保护您的web服务"
-
-msgid "HTTP UserName"
-msgstr "HTTP 用户名"
-
-msgid "HTTP PassWord"
-msgstr "HTTP 密码"
-
-msgid "Rewriting the Host Header"
-msgstr "修改 Host Header"
-
-msgid "Frp can rewrite http requests with a modified Host header."
-msgstr "Frp可以用修改后的主机头重写http请求。"
-
-msgid "Proxy-Protocol Version"
-msgstr "Proxy-Protocol 版本"
-
-msgid "Encrypted the communication between frpc and frps, will effectively prevent the traffic intercepted."
-msgstr "将 frpc 与 frps 之间的通信内容加密传输,将会有效防止流量被拦截。"
-
-msgid "The contents will be compressed to speed up the traffic forwarding speed, but this will consume some additional cpu resources."
-msgstr "对传输内容进行压缩,加快流量转发速度,但是会额外消耗一些 cpu 资源。"
-
-msgid "Http username and password are safety certification for http protocol."
-msgstr "Http用户名和密码是Http协议的安全认证。"
-
-msgid "Proxy Protocol to send user's real IP to local services."
-msgstr "将用户的真实IP发送到本地服务的代理协议。"
-
-msgid "STCP Server Name is Service Remark Name of STCP Server"
-msgstr "STCP服务器别名"
-
-msgid "Please ensure the remark name is unique."
-msgstr "确保备注名唯一"
-
-msgid "Plugin_Local_Addr"
-msgstr "插件本地地址(格式 IP:Port)"
-
-msgid "plugin_crt_path"
-msgstr "插件证书路径"
-
-msgid "plugin_key_path"
-msgstr "插件私钥路径"
-
-msgid "plugin_host_header_rewrite"
-msgstr "插件 Host Header 重写"
-
-msgid "plugin_header_X-From-Where"
-msgstr "插件X-From-Where请求头"
diff --git a/package/lean/luci-app-frpc/root/etc/config/frp b/package/lean/luci-app-frpc/root/etc/config/frp
deleted file mode 100644
index 8b1f67bbb..000000000
--- a/package/lean/luci-app-frpc/root/etc/config/frp
+++ /dev/null
@@ -1,17 +0,0 @@
-
-config frp 'common'
- option log_max_days '3'
- option login_fail_exit '0'
- option enable_cpool '0'
- option time '40'
- option tcp_mux '1'
- option enabled '0'
- option vhost_http_port '80'
- option vhost_https_port '443'
- option server_addr 'yourdomain.com'
- option server_port '7000'
- option token '1234567'
- option log_level 'info'
- option enable_http_proxy '0'
- option protocol 'tcp'
- option user ''
diff --git a/package/lean/luci-app-frpc/root/etc/init.d/frp b/package/lean/luci-app-frpc/root/etc/init.d/frp
deleted file mode 100755
index 8790dabb6..000000000
--- a/package/lean/luci-app-frpc/root/etc/init.d/frp
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/bin/sh /etc/rc.common
-#Author: monokoo
-#Thanks to FW867's help
-
-START=99
-SERVICE_WRITE_PID=1
-SERVICE_DAEMONIZE=1
-
-LOGFILE="/var/etc/frp/frpc.log"
-
-echo_date(){
- local log=$1
- echo $(date +%Y/%m/%d\ %X): "$log" >> $LOGFILE
-}
-
-Reduce_Log(){
- local log=$1
- [ ! -f "$log" ] && return
- local sc=200
- [ -n "$2" ] && sc=$2
- local count=$(grep -c "" $log)
- if [ $count -gt $sc ];then
- let count=count-$sc
- sed -i "1,$count d" $log
- fi
-}
-
-conf_proxy_add() {
-
- local cfg="$1"
- local tmpconf="$2"
- local enable type domain_type custom_domains remote_port local_ip local_port enable_http_auth enable_host_header_rewrite host_header_rewrite
- local subdomain proxy_protocol_version use_encryption use_compression http_user http_pwd remark locations
- local enable_plugin plugin plugin_http_user plugin_http_passwd plugin_unix_path stcp_role stcp_secretkey stcp_servername
- local enable_https_plugin https_plugin plugin_local_addr plugin_crt_path plugin_key_path plugin_host_header_rewrite plugin_header_X_From_Where
-
- config_get_bool enable "$cfg" enable 1
- [ "$enable" -gt 0 ] || return 1
-
- config_get type "$cfg" type
- config_get custom_domains "$cfg" custom_domains
- config_get subdomain "$cfg" subdomain
- config_get remote_port "$cfg" remote_port
- config_get local_ip "$cfg" local_ip
- config_get local_port "$cfg" local_port
- config_get locations "$cfg" locations
- config_get host_header_rewrite "$cfg" host_header_rewrite
- config_get http_user "$cfg" http_user
- config_get http_pwd "$cfg" http_pwd
- config_get remark "$cfg" remark
- config_get plugin "$cfg" plugin
- config_get plugin_http_user "$cfg" plugin_http_user
- config_get plugin_http_passwd "$cfg" plugin_http_passwd
- config_get plugin_unix_path "$cfg" plugin_unix_path
- config_get stcp_role "$cfg" stcp_role
- config_get stcp_secretkey "$cfg" stcp_secretkey
- config_get stcp_servername "$cfg" stcp_servername
- config_get proxy_protocol_version "$cfg" proxy_protocol_version
- config_get https_plugin "$cfg" https_plugin
- config_get plugin_local_addr "$cfg" plugin_local_addr
- config_get plugin_crt_path "$cfg" plugin_crt_path
- config_get plugin_key_path "$cfg" plugin_key_path
- config_get plugin_host_header_rewrite "$cfg" plugin_host_header_rewrite
- config_get plugin_header_X_From_Where "$cfg" plugin_header_X_From_Where
-
-
- [ -n "$remark" ] && [ -n "$type" ] || return 1
-
- echo "" >>$tmpconf
- echo "[$remark]" >>$tmpconf
- echo "type=$type" >>$tmpconf
- [ -n "$custom_domains" ] && echo "custom_domains=$custom_domains" >>$tmpconf
- [ -n "$subdomain" ] && echo "subdomain=$subdomain" >>$tmpconf
- [ -n "$remote_port" ] && echo "remote_port=$remote_port" >>$tmpconf
- [ -z "$stcp_role" ] && [ -n "$local_ip" ] && echo "local_ip=$local_ip" >>$tmpconf
- [ -z "$stcp_role" ] && [ -n "$local_port" ] && echo "local_port=$local_port" >>$tmpconf
- [ -n "$locations" ] && echo "locations=$locations" >>$tmpconf
- [ -n "$http_user" -a -n "$http_pwd" ] && {
- echo "http_user=$http_user" >>$tmpconf
- echo "http_pwd=$http_pwd" >>$tmpconf
- }
- [ -n "$host_header_rewrite" ] && echo "host_header_rewrite=$host_header_rewrite" >>$tmpconf
- [ -n "$plugin" ] && echo "plugin=$plugin" >>$tmpconf
- [ -n "$plugin_http_user" -a -n "$plugin_http_passwd" ] && {
- echo "plugin_http_user=$plugin_http_user" >>$tmpconf
- echo "plugin_http_passwd=$plugin_http_passwd" >>$tmpconf
- }
- [ -n "$plugin_unix_path" ] && echo "plugin_unix_path=$plugin_unix_path" >>$tmpconf
-
- [ -n "$stcp_role" ] && {
- if [ "$stcp_role" == "visitor" ]; then
- echo "role=$stcp_role" >>$tmpconf
- [ -n "$local_ip" ] && echo "bind_addr=$local_ip" >>$tmpconf
- [ -n "$local_port" ] && echo "bind_port=$local_port" >>$tmpconf
- [ -n "$stcp_servername" ] && echo "server_name=$stcp_servername" >>$tmpconf || return 1
- else
- [ -n "$local_ip" ] && echo "local_ip=$local_ip" >>$tmpconf
- [ -n "$local_port" ] && echo "local_port=$local_port" >>$tmpconf
- fi
- [ -n "$stcp_secretkey" ] && echo "sk=$stcp_secretkey" >>$tmpconf || return 1
- }
-
- [ -n "$proxy_protocol_version" ] && {
- if [ "$proxy_protocol_version" != "disable" ]; then
- echo "proxy_protocol_version=$proxy_protocol_version" >>$tmpconf
- fi
- }
-
- [ -n "$https_plugin" ] && echo "plugin=$https_plugin" >>$tmpconf
- [ -n "$plugin_local_addr" ] && echo "plugin_local_addr=$plugin_local_addr" >>$tmpconf
- [ -n "$plugin_crt_path" -a -n "$plugin_key_path" ] && {
- echo "plugin_crt_path=$plugin_crt_path" >>$tmpconf
- echo "plugin_key_path=$plugin_key_path" >>$tmpconf
- }
- [ -n "$plugin_host_header_rewrite" ] && echo "plugin_host_header_rewrite=$plugin_host_header_rewrite" >>$tmpconf
- [ -n "$plugin_header_X_From_Where" ] && echo "plugin_header_X_From_Where=$plugin_header_X_From_Where" >>$tmpconf
-
- frp_write_bool use_encryption $cfg 1
- frp_write_bool use_compression $cfg 1
-
-}
-
-frp_write_bool() {
- local opt="$1"
- local config="$2"
- local def="$3"
- local val
-
- config_get_bool val $config "$opt" "$def"
- if [ "$val" -eq 0 ]; then
- echo "${opt}=false" >> $tmpconf
- else
- echo "${opt}=true" >> $tmpconf
- fi
-}
-
-frp_add_cru(){
-time=$1
-if [ ! -f "/etc/crontabs/root" ] || [ -z "$(cat /etc/crontabs/root | grep frp)" ]; then
- sed -i '/frp/d' /etc/crontabs/root >/dev/null 2>&1
- echo "*/$time * * * * /etc/init.d/frp restart" >> /etc/crontabs/root
-fi
-
-}
-
-frp_del_cru(){
-if [ ! -f "/etc/crontabs/root" ] || [ -n "$(cat /etc/crontabs/root | grep frp)" ]; then
- sed -i '/frp/d' /etc/crontabs/root >/dev/null 2>&1
-fi
-}
-
-download_binary(){
- echo_date "开始下载frpc二进制文件..."
- /usr/bin/wget --no-check-certificate --timeout=10 --tries=1 -o $LOGFILE https://github.com/fatedier/frp/releases/download/v0.13.0/frp_0.13.0_linux_arm.tar.gz -O /tmp/frp_0.13.0_linux_arm.tar.gz
- [ ! -s "/tmp/frp_0.13.0_linux_arm.tar.gz" ] && /usr/bin/wget -q --no-check-certificate --timeout=10 --tries=1 https://any.mokoo.xyz/app/frp_0.13.0_linux_arm.tar.gz -O /tmp/frp_0.13.0_linux_arm.tar.gz
- [ -f "/tmp/frp_0.13.0_linux_arm.tar.gz" ] && tar -xf /tmp/frp_0.13.0_linux_arm.tar.gz -C /tmp && \
- mv /tmp/frp_0.13.0_linux_arm/frpc /usr/bin/frpc
- rm -rf /tmp/frp_0.13.0_linux_arm*
- if [ -f "/usr/bin/frpc" ]; then
- chmod +x /usr/bin/frpc && echo_date "成功下载frpc二进制文件"
- else
- echo_date "下载frpc二进制文件失败,请重试!"
- fi
-}
-
-boot() {
- sleep 10s
- start
-}
-
-start() {
-
- config_load "frp"
- local enabled server_addr server_port time privilege_token user tcp_mux enable_cpool tls_enable
- local pool_count log_level log_max_days login_fail_exit http_proxy protocol admin_port admin_user admin_pwd
-
- config_get_bool enabled common enabled 1
-
- [ "$enabled" -gt 0 ] || return 1
-
- config_get server_addr common server_addr
- config_get server_port common server_port
- config_get token common token
- config_get user common user
- config_get enable_cpool common enable_cpool
- config_get pool_count common pool_count
- config_get log_level common log_level
- config_get log_max_days common log_max_days
- config_get http_proxy common http_proxy
- config_get protocol common protocol
- config_get time common time
- config_get admin_port common admin_port
- config_get admin_user common admin_user
- config_get admin_pwd common admin_pwd
-
-
- mkdir -p /var/etc/frp
- [ ! -f "$LOGFILE" ] && touch $LOGFILE
-
- [ ! -f "/usr/bin/frpc" ] && download_binary
- [ ! -f "/usr/bin/frpc" ] && logger -t Frp 'Download frpc failed, please retry.' && exit 0
-
- local tmpconf="/var/etc/frp/frpc.conf"
-
- echo "[common]" >$tmpconf
- echo "server_addr=${server_addr}" >>$tmpconf
- echo "server_port=${server_port}" >>$tmpconf
- echo "token=${token}" >>$tmpconf
- [ -n "$user" ] && echo "user=$user" >>$tmpconf
- echo "log_level=${log_level}" >>$tmpconf
- echo "log_max_days=${log_max_days}" >>$tmpconf
- echo "protocol=${protocol}" >>$tmpconf
- echo "log_file=$LOGFILE" >>$tmpconf
- [ -n "$http_proxy" ] && echo "http_proxy=$http_proxy" >>$tmpconf
- [ -n "$pool_count" ] && echo "pool_count=$pool_count" >>$tmpconf
- [ -n "$admin_port" ] && echo "admin_addr=0.0.0.0" >>$tmpconf && echo "admin_port=$admin_port" >>$tmpconf
- [ -n "$admin_user" ] && echo "admin_user=$admin_user" >>$tmpconf
- [ -n "$admin_pwd" ] && echo "admin_pwd=$admin_pwd" >>$tmpconf
-
-
- config_load "frp"
- frp_write_bool tcp_mux common 1
- frp_write_bool tls_enable common 0
- frp_write_bool login_fail_exit common 1
- config_foreach conf_proxy_add proxy "$tmpconf"
-
- [ "$(cat "$tmpconf" | grep -c "type=")" -gt 0 ] || (echo_date "frp服务启动失败,请首先添加服务列表!" && exit 0)
- logger -t FRPC 'Starting frp service'
- SERVICE_DAEMONIZE=1 \
- service_start /usr/bin/frpc -c $tmpconf
-
- [ "$time" -gt 0 ] && frp_add_cru $time
- [ -z "$(pgrep /usr/bin/frpc)" ] && echo_date "frp服务启动失败,请检查服务端 “TCP多路复用(tcp_mux)”设置,确保与客户端完全一致!"
-
- return 0
-}
-
-stop() {
- frp_del_cru
- if [ -n "`pidof frpc`" ]; then
- logger -t FRPC 'Shutting down frp service'
- service_stop /usr/bin/frpc
- Reduce_Log $LOGFILE
- fi
- return 0
-}
diff --git a/package/lean/luci-app-frpc/root/etc/uci-defaults/luci-frp b/package/lean/luci-app-frpc/root/etc/uci-defaults/luci-frp
deleted file mode 100755
index 46016e1c5..000000000
--- a/package/lean/luci-app-frpc/root/etc/uci-defaults/luci-frp
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@frp[-1]
- add ucitrack frp
- set ucitrack.@frp[-1].init=frp
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-frpc/root/usr/share/rpcd/acl.d/luci-app-frpc.json b/package/lean/luci-app-frpc/root/usr/share/rpcd/acl.d/luci-app-frpc.json
deleted file mode 100644
index 8ff0cfdf6..000000000
--- a/package/lean/luci-app-frpc/root/usr/share/rpcd/acl.d/luci-app-frpc.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "luci-app-frpc": {
- "description": "Grant UCI access for luci-app-frpc",
- "read": {
- "uci": [ "frp" ]
- },
- "write": {
- "uci": [ "frp" ]
- }
- }
-}
diff --git a/package/lean/luci-app-frps/Makefile b/package/lean/luci-app-frps/Makefile
deleted file mode 100644
index 89ddcba79..000000000
--- a/package/lean/luci-app-frps/Makefile
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright 2020 lwz322
-# Licensed to the public under the MIT License.
-#
-
-include $(TOPDIR)/rules.mk
-
-PKG_NAME:=luci-app-frps
-PKG_VERSION:=0.0.2
-PKG_RELEASE:=4
-
-PKG_LICENSE:=MIT
-PKG_LICENSE_FILES:=LICENSE
-
-PKG_MAINTAINER:=lwz322
-
-LUCI_TITLE:=LuCI support for Frps
-LUCI_DEPENDS:=+wget +frps
-LUCI_PKGARCH:=all
-
-define Package/$(PKG_NAME)/conffiles
-/etc/config/frps
-endef
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-frps/luasrc/controller/frps.lua b/package/lean/luci-app-frps/luasrc/controller/frps.lua
deleted file mode 100644
index d4c4ab058..000000000
--- a/package/lean/luci-app-frps/luasrc/controller/frps.lua
+++ /dev/null
@@ -1,26 +0,0 @@
--- Copyright 2020 lwz322
--- Licensed to the public under the MIT License.
-
-module("luci.controller.frps", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/frps") then
- return
- end
-
- entry({"admin", "services", "frps"}, firstchild(), _("Frps")).dependent = false
-
- entry({"admin", "services", "frps", "common"}, cbi("frps/common"), _("Settings"), 1)
- entry({"admin", "services", "frps", "server"}, cbi("frps/server"), _("Server"), 2).leaf = true
-
- entry({"admin", "services", "frps", "status"}, call("action_status"))
-end
-
-
-function action_status()
- local e = {}
- e.running = luci.sys.call("pidof frps >/dev/null") == 0
- e.bin_version = luci.sys.exec("frps -v")
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-frps/luasrc/model/cbi/frps/common.lua b/package/lean/luci-app-frps/luasrc/model/cbi/frps/common.lua
deleted file mode 100644
index f2b553064..000000000
--- a/package/lean/luci-app-frps/luasrc/model/cbi/frps/common.lua
+++ /dev/null
@@ -1,93 +0,0 @@
--- Copyright 2020 lwz322
--- Licensed to the public under the MIT License.
-
-local m, s, o
-
-m = Map("frps")
-m.title = translate("Frps - Common Settings")
-m.description = translate("Frp is a fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.
Project GitHub URL")
-
-m:section(SimpleSection).template = "frps/frps_status"
-
-s = m:section(NamedSection, "main", "frps")
-s.addremove = false
-s.anonymous = true
-
-s:tab("general", translate("General Options"))
-s:tab("advanced", translate("Advanced Options"))
-s:tab("dashboard", translate("Dashboard Options"))
-
-o = s:taboption("general", Flag, "enabled", translate("Enabled"))
-
-o = s:taboption("general", Value, "client_file", translate("Client file"))
-o.default = "/usr/bin/frps"
-o.rmempty = false
-
-o = s:taboption("general", ListValue, "run_user", translate("Run daemon as user"))
-o:value("", translate("root"))
-local user
-for user in luci.util.execi("cat /etc/passwd | cut -d':' -f1") do
- o:value(user)
-end
-
-o = s:taboption("general", Flag, "enable_logging", translate("Enable logging"))
-
-o = s:taboption("general", Value, "log_file", translate("Log file"))
-o:depends("enable_logging", "1")
-o.default = "/var/log/frps.log"
-
-o = s:taboption("general", ListValue, "log_level", translate("Log level"))
-o:depends("enable_logging", "1")
-o:value("trace",translate("Trace"))
-o:value("debug",translate("Debug"))
-o:value("info",translate("Info"))
-o:value("warn",translate("Warning"))
-o:value("error",translate("Error"))
-o.default = "Warn"
-
-o = s:taboption("general", ListValue, "log_max_days", translate("Log max days"))
-o.description = translate("Maximum number of days to keep log files is 3 day.")
-o:depends("enable_logging", "1")
-o.datatype = "uinteger"
-o:value("1", translate("1"))
-o:value("2", translate("2"))
-o:value("3", translate("3"))
-o.default = "3"
-o.optional = false
-
-o = s:taboption("general", Flag, "disable_log_color", translate("Disable log color"))
-o:depends("enable_logging", "1")
-o.enabled = "true"
-o.disabled = "false"
-
-o = s:taboption("advanced", Value, "max_pool_count", translate("Max pool count"))
-o.description = translate("pool_count in each proxy will change to max_pool_count if they exceed the maximum value")
-o.datatype = "uinteger"
-
-o = s:taboption("advanced", Value, "max_ports_per_client", translate("Max ports per-client"))
-o.description = translate("max ports can be used for each client, default value is 0 means no limit")
-o.datatype = "uinteger"
-o.defalut = '0'
-o.placeholder = '0'
-
-o = s:taboption("advanced", Value, "subdomain_host", translate("Subdomain host"))
-o.description = translatef("if subdomain_host is not empty, you can set subdomain when type is http or https in frpc's configure file; when subdomain is test, the host used by routing is test.frps.com")
-o.datatype = "host"
-
-o = s:taboption("dashboard", Value, "dashboard_addr", translate("Dashboard addr"))
-o.description = translatef("dashboard addr's default value is same with bind_addr")
-o.default = "0.0.0.0"
-o.datatype = "host"
-
-o = s:taboption("dashboard", Value, "dashboard_port", translate("Dashboard port"))
-o.description = translatef("dashboard is available only if dashboard_port is set")
-o.default = "7500"
-o.datatype = "port"
-
-o = s:taboption("dashboard", Value, "dashboard_user", translate("Dashboard user"))
-o.description = translatef("dashboard user and passwd for basic auth protect, if not set, both default value is admin")
-
-o = s:taboption("dashboard", Value, "dashboard_pwd", translate("Dashboard password"))
-o.password = true
-
-return m
diff --git a/package/lean/luci-app-frps/luasrc/model/cbi/frps/server.lua b/package/lean/luci-app-frps/luasrc/model/cbi/frps/server.lua
deleted file mode 100644
index 2201468f6..000000000
--- a/package/lean/luci-app-frps/luasrc/model/cbi/frps/server.lua
+++ /dev/null
@@ -1,54 +0,0 @@
--- Copyright 2020 lwz322
--- Licensed to the public under the MIT License.
-
-local m, s, o
-
-m = Map("frps")
-m.title = translate("Frps - Server Settings")
-m.description = translate("FRPS Server Settings")
-
-s = m:section(NamedSection, "main", "frps")
-s.anonymous = true
-s.addremove = false
-
-o = s:option(Value, "bind_port", translate("Bind port"))
-o.datatype = "port"
-o.rmempty = false
-
-o = s:option(Value, "token", translate("Token"))
-o.password = true
-
-o = s:option(Flag, "tcp_mux", translate("TCP mux"))
-o.enabled = "true"
-o.disabled = "false"
-o.defalut = o.enabled
-o.rmempty = false
-
-o = s:option(Flag, "tls_only", translate("Enforce frps only accept TLS connection"))
-o.description = translatef("Requirements: frpc v0.25.0+, frps v0.32.0+")
-o.enabled = "true"
-o.disabled = "false"
-o.default = o.disabled
-o.rmempty = false
-
-o = s:option(Value, "bind_udp_port", translate("UDP bind port"))
-o.description = translatef("Optional: udp port to help make udp hole to penetrate nat")
-o.datatype = "port"
-
-o = s:option(Value, "kcp_bind_port", translate("KCP bind port"))
-o.description = translatef("Optional: udp port used for kcp protocol, it can be same with 'bind port'; if not set, kcp is disabled in frps")
-o.datatype = "port"
-
-o = s:option(Value, "vhost_http_port", translate("vhost http port"))
-o.description = translatef("Optional: if you want to support virtual host, you must set the http port for listening")
-o.datatype = "port"
-
-o = s:option(Value, "vhost_https_port", translate("vhost https port"))
-o.description = translatef("Optional: Note: http port and https port can be same with bind_port")
-o.datatype = "port"
-
-o = s:option(DynamicList, "extra_setting", translate("Extra Settings"))
-o.description = translatef("List of extra settings will be added to config file. Format: option=value, eg. detailed_errors_to_client=false
.(NO SPACE!)")
-o.placeholder = "option=value"
-
-return m
\ No newline at end of file
diff --git a/package/lean/luci-app-frps/luasrc/view/frps/frps_status.htm b/package/lean/luci-app-frps/luasrc/view/frps/frps_status.htm
deleted file mode 100644
index 47aecceb2..000000000
--- a/package/lean/luci-app-frps/luasrc/view/frps/frps_status.htm
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-frps/po/zh-cn/frps.po b/package/lean/luci-app-frps/po/zh-cn/frps.po
deleted file mode 100644
index 41cb850b7..000000000
--- a/package/lean/luci-app-frps/po/zh-cn/frps.po
+++ /dev/null
@@ -1,251 +0,0 @@
-msgid ""
-msgstr "Content-Type: text/plain; charset=UTF-8"
-
-#: luasrc/model/cbi/frps/common.lua:54
-# msgid "-- default --"
-# msgstr "-- 默认 --"
-
-#: luasrc/model/cbi/frps/common.lua:44
-msgid "Advanced Options"
-msgstr "高级选项"
-
-#: luasrc/model/cbi/frps/server.lua:14
-msgid "Bind port"
-msgstr "绑定端口"
-
-#: luasrc/model/cbi/frps/common.lua:22
-msgid "Client file"
-msgstr "客户端文件"
-
-#: luasrc/model/cbi/frps/common.lua:7
-msgid "Frps - Common Settings"
-msgstr "Frps - 通用设置"
-
-#: luasrc/model/cbi/frps/common.lua:18
-msgid "Dashboard Options"
-msgstr "面板选项"
-
-#: luasrc/model/cbi/frps/common.lua:78
-msgid "Dashboard addr"
-msgstr "面板绑定地址"
-
-#: luasrc/model/cbi/frps/common.lua:91
-msgid "Dashboard password"
-msgstr "面板登录密码"
-
-#: luasrc/model/cbi/frps/common.lua:83
-msgid "Dashboard port"
-msgstr "面板绑定端口"
-
-#: luasrc/model/cbi/frps/common.lua:88
-msgid "Dashboard user"
-msgstr "面板登录用户名"
-
-#: luasrc/model/cbi/frps/common.lua:69
-# msgid "Debug"
-# msgstr "调试"
-
-#: luasrc/model/cbi/frps/common.lua:80
-msgid "Disable log color"
-msgstr "禁用日志颜色"
-
-#: luasrc/model/cbi/frps/common.lua:60
-msgid "Enable logging"
-msgstr "启用日志"
-
-#: luasrc/model/cbi/frps/common.lua:47
-msgid "Enabled"
-msgstr "已启用"
-
-#: luasrc/model/cbi/frps/common.lua:72
-msgid "Error"
-msgstr "错误"
-
-#: luasrc/model/cbi/frps/server.lua:8
-msgid "Frps - Server Settings"
-msgstr "Frps - 服务器设定"
-
-#: luasrc/model/cbi/frps/server.lua:8
-msgid "FRPS Server Settings"
-msgstr "Frps 服务器设定"
-
-#: luasrc/model/cbi/frps/common.lua:32
-msgid "Frp is a fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.
Project GitHub URL"
-msgstr "Frp 是一个可用于内网穿透的高性能的反向代理应用。
Github 项目地址"
-
-#: luasrc/model/cbi/frps/common.lua:43
-msgid "General Options"
-msgstr "常规选项"
-
-#: luasrc/model/cbi/frps/common.lua:70
-msgid "Info"
-msgstr "信息"
-
-#: luasrc/model/cbi/frps/server.lua:31
-msgid "KCP bind port"
-msgstr "KCP绑定端口"
-
-#: luasrc/model/cbi/frps/common.lua:62
-msgid "Log file"
-msgstr "日志文件"
-
-#: luasrc/model/cbi/frps/common.lua:66
-msgid "Log level"
-msgstr "日志等级"
-
-#: luasrc/model/cbi/frps/common.lua:48
-msgid "Log max days"
-msgstr "日志保存天数"
-
-#: luasrc/model/cbi/frps/common.lua:49
-msgid "Maximum number of days to keep log files is 3 day."
-msgstr "保留日志文件的最大天数为 3 天。"
-
-#: luasrc/model/cbi/frps/common.lua:85
-msgid "Max pool count"
-msgstr "最大连接数"
-
-#: luasrc/model/cbi/frps/common.lua:89
-msgid "Max ports per-client"
-msgstr "单客户端最大端口映射数"
-
-#: luasrc/view/frps/status_header.htm:26
-msgid "Not Running"
-msgstr "服务未运行"
-
-#: luasrc/model/cbi/frps/server.lua:40
-msgid "Optional: Note: http port and https port can be same with bind_port"
-msgstr "(可选)提示:http/https端口可以和绑定端口设定为一致"
-
-#: luasrc/model/cbi/frps/server.lua:36
-msgid ""
-"Optional: if you want to support virtual host, you must set the http port "
-"for listening"
-msgstr "(可选)如果您希望支持虚拟主机,则必须设定http端口"
-
-#: luasrc/model/cbi/frps/server.lua:28
-msgid "Optional: udp port to help make udp hole to penetrate nat"
-msgstr "(可选)设定UDP端口以帮助UDP协议穿透NAT"
-
-#: luasrc/model/cbi/frps/server.lua:32
-msgid ""
-"Optional: udp port used for kcp protocol, it can be same with 'bind port'; "
-"if not set, kcp is disabled in frps"
-msgstr "(可选)UDP端口用于KCP协议,可与绑定端口设定为一致;留空以禁用KCP"
-
-#: luasrc/model/cbi/frps/common.lua:53
-msgid "Run daemon as user"
-msgstr "以用户身份运行"
-
-#: luasrc/controller/frps.lua:22
-msgid "Server"
-msgstr "服务端"
-
-#: luasrc/controller/frps.lua:19
-msgid "Settings"
-msgstr "设置"
-
-#: luasrc/model/cbi/frps/common.lua:95
-msgid "Subdomain host"
-msgstr "子域名"
-
-#: luasrc/model/cbi/frps/server.lua:21
-msgid "TCP mux"
-msgstr "TCP 复用"
-
-#: luasrc/model/cbi/frps/server.lua:27
-msgid "Enforce frps only accept TLS connection"
-msgstr "强制frps只接受TLS连接"
-
-#: luasrc/model/cbi/frps/server.lua:28
-msgid "Requirements: frpc v0.25.0+, frps v0.32.0+"
-msgstr "要求:frpc版本0.25.0+, frps版本0.32.0+"
-
-#: luasrc/model/cbi/frps/server.lua:18
-msgid "Token"
-msgstr "令牌"
-
-#: luasrc/model/cbi/frps/common.lua:68
-msgid "Trace"
-msgstr "追踪"
-
-#: luasrc/model/cbi/frps/server.lua:27
-msgid "UDP bind port"
-msgstr "UDP绑定端口"
-
-#: luasrc/model/cbi/frps/common.lua:71
-msgid "Log level"
-msgstr "日志等级"
-
-#: luasrc/model/cbi/frps/common.lua:41
-msgid "Trace"
-msgstr "追踪"
-
-#: luasrc/model/cbi/frps/common.lua:42
-msgid "Debug"
-msgstr "调试"
-
-#: luasrc/model/cbi/frps/common.lua:43
-msgid "Info"
-msgstr "信息"
-
-#: luasrc/model/cbi/frps/common.lua:44
-msgid "Warning"
-msgstr "警告"
-
-#: luasrc/model/cbi/frps/common.lua:45
-msgid "Error"
-msgstr "错误"
-
-#: luasrc/model/cbi/frps/common.lua:99
-msgid "dashboard addr's default value is same with bind_addr"
-msgstr "面板地址默认和绑定地址一致"
-
-#: luasrc/model/cbi/frps/common.lua:102
-msgid "dashboard is available only if dashboard_port is set"
-msgstr "仅在设定面板绑定端口后才可使用面板功能"
-
-#: luasrc/model/cbi/frps/common.lua:105
-msgid ""
-"dashboard user and passwd for basic auth protect, if not set, both default "
-"value is admin"
-msgstr "面板用户名/密码用于基本安全认证;若留空,则用户名/密码均为admin"
-
-#: luasrc/model/cbi/frps/common.lua:96
-msgid ""
-"if subdomain_host is not empty, you can set subdomain when type is http or "
-"https in frpc's configure file; when subdomain is test, the host used by "
-"routing is test.frps.com"
-msgstr "如果subdomain_host不为空,可以在frpc配置文件中设置类型为http(s)的subdomain;subdomain为test,路由将使用test.frps.com"
-
-#: luasrc/model/cbi/frps/common.lua:90
-msgid ""
-"max ports can be used for each client, default value is 0 means no limit"
-msgstr "每个客户端最多可映射端口数,留空则默认为0(不限制)"
-
-#: luasrc/model/cbi/frps/common.lua:86
-msgid ""
-"pool_count in each proxy will change to max_pool_count if they exceed the "
-"maximum value"
-msgstr "代理连接数(pool_count)超过最大值时将变更为最大连接数(max_pool_count)"
-
-#: luasrc/model/cbi/frps/server.lua:35
-msgid "vhost http port"
-msgstr "虚拟主机http绑定端口"
-
-#: luasrc/model/cbi/frps/server.lua:39
-msgid "vhost https port"
-msgstr "虚拟主机https绑定端口"
-
-#: luasrc/model/cbi/frps/server.lua:50
-msgid "Extra Settings"
-msgstr "额外设置"
-
-#: luasrc/model/cbi/frps/server.lua:51
-msgid "List of extra settings will be added to config file. Format: option=value, eg. detailed_errors_to_client=false
.(NO SPACE!)"
-msgstr "额外设置列表将会被添加到config文件中。 格式:option=value,如:detailed_errors_to_client=false
.(不含空格!)"
-
-#: luasrc/view/frps/frps_status.htm:7
-msgid "Open Frps page"
-msgstr "打开 Frps 管理面板"
-
diff --git a/package/lean/luci-app-frps/root/etc/config/frps b/package/lean/luci-app-frps/root/etc/config/frps
deleted file mode 100644
index bfdbac22c..000000000
--- a/package/lean/luci-app-frps/root/etc/config/frps
+++ /dev/null
@@ -1,8 +0,0 @@
-config frps 'main'
- option enabled '0'
- option server 'frps'
- option dashboard_addr '0.0.0.0'
- option client_file '/usr/bin/frps'
- option bind_port '7000'
- option dashboard_port '7500'
- option tcp_mux 'true'
diff --git a/package/lean/luci-app-frps/root/etc/init.d/frps b/package/lean/luci-app-frps/root/etc/init.d/frps
deleted file mode 100755
index f9876d265..000000000
--- a/package/lean/luci-app-frps/root/etc/init.d/frps
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/bin/sh /etc/rc.common
-#
-# Copyright 2020 lwz322
-# Licensed to the public under the MIT License.
-#
-
-START=99
-USE_PROCD=1
-
-NAME="frps"
-CONFIG_FOLDER="/var/etc/$NAME"
-
-_log() {
- local level="$1" ; shift
- local msg="$@"
- logger -p "daemon.$level" -t "$NAME" "$msg"
-
- echo "[$level] $msg" >&2
-}
-
-_info() {
- _log "info" $@
-}
-
-_err() {
- _log "err" $@
-}
-
-append_options() {
- local file="$1" ; shift
- local o v
- for o in "$@" ; do
- v="$(eval echo "\$$o")"
- if [ -n "$v" ] ; then
- # add brackets when ipv6 address
- if ( echo "$o" | grep -qE 'addr|ip' ) &&
- ( echo "$v" | grep -q ':' ) ; then
- v="[$v]"
- fi
-
- echo "${o} = $v" >>"$file"
- fi
- done
-}
-
-append_setting() {
- local file="$1" ; shift
- local s="$1"
- if [ -n "$s" ] ; then
- echo "$s" >>"$file"
- fi
-}
-
-frps_scetion_validate() {
- uci_validate_section "$NAME" "frps" "$1" \
- 'enabled:bool:0' \
- 'client_file:file:/usr/bin/frps' \
- 'run_user:string' \
- 'enable_logging:bool:0' \
- 'log_file:string:/var/log/frps.log' \
- 'log_level:or("trace", "debug", "info", "warn", "error"):warn' \
- 'log_max_days:uinteger:3' \
- 'disable_log_color:or("true", "false")' \
- 'max_pool_count:uinteger' \
- 'max_ports_per_client:uinteger:0' \
- 'subdomain_host:host' \
- 'dashboard_addr:host' \
- 'dashboard_port:port' \
- 'dashboard_user:string' \
- 'dashboard_pwd:string' \
- 'bind_port:port' \
- 'token:string' \
- 'tcp_mux:or("true", "false"):true' \
- 'tls_only:or("true", "false"):false' \
- 'bind_udp_port:port' \
- 'kcp_bind_port:port' \
- 'vhost_http_port:port' \
- 'vhost_https_port:port'
-}
-
-client_file_validate() {
- local file="$1"
-
- test -f "$file" || return 1
- test -x "$file" || chmod 755 "$file"
-
- eval "$file" -h | grep -q "$NAME"
- return $?
-}
-
-add_rule_extra_option() {
- append_setting "$2" "$1"
-}
-
-create_config_file() {
- local config_file="$1"
- local tmp_file="$(mktemp /tmp/frps-XXXXXX)"
-
- echo "[common]" > "$tmp_file"
-
- append_options "$tmp_file" \
- "bind_port" "token" "tcp_mux" "tls_only" "bind_udp_port" "kcp_bind_port" "vhost_http_port" "vhost_https_port"
-
-
- if [ "x$enable_logging" = "x1" ] ; then
- if [ -z "$log_file" ]; then
- log_file="/var/log/frps.log"
- fi
-
- append_options "$tmp_file" \
- "log_file" "log_level" "log_max_days" "disable_log_color"
-
- if [ -f "$log_file" ] ; then
- echo > "$log_file"
- else
- local log_folder="$(dirname "$log_file")"
-
- if [ ! -d "$log_folder" ] ; then
- mkdir -p "$log_folder"
- fi
- fi
-
- if [ -n "$run_user" ] && ( user_exists "$run_user" ) ; then
- chmod 644 "$log_file"
- chown "$run_user" "$log_file"
- else
- run_user=""
- fi
- fi
-
- append_options "$tmp_file" \
- "max_pool_count" "max_ports_per_client" "subdomain_host" "dashboard_addr" "dashboard_port" "dashboard_user" "dashboard_pwd"
-
- extra_setting=$(uci get frps.main.extra_setting 2>/dev/null)
- if [ -n "$extra_setting" ] ; then
- for o in $extra_setting ;do
- echo "$o" >> "$tmp_file"
- done
- fi
-
- sed '/^$/d' "$tmp_file" >"$config_file"
-
- if [ "$?" = "0" ] ; then
- rm -f "$tmp_file"
- fi
-}
-
-start_instance() {
- local section="$1"
-
- if ! frps_scetion_validate "$section" ; then
- _err "Config validate failed."
- return 1
- fi
-
- if [ "x$enabled" != "x1" ] ; then
- _info "Instance \"$section\" disabled."
- return 1
- fi
-
- if [ -z "$client_file" ] || ( ! client_file_validate "$client_file" ) ; then
- _err "Client file not valid."
- return 1
- fi
-
- test -d "$CONFIG_FOLDER" || mkdir -p "$CONFIG_FOLDER"
-
- local config_file="$CONFIG_FOLDER/frps.$section.ini"
-
- create_config_file "$config_file"
-
- if [ ! -f "$config_file" ] ; then
- _err "Could not create config file: \"$config_file\""
- return 1
- fi
-
- procd_open_instance "$NAME.$section"
- procd_set_param command "$client_file"
- procd_append_param command -c "$config_file"
- procd_set_param respawn
- procd_set_param file "$config_file"
-
- if [ -n "$run_user" ] ; then
- procd_set_param user "$run_user"
- fi
-
- procd_close_instance
-}
-
-service_triggers() {
- procd_add_reload_trigger "$NAME"
-}
-
-start_service() {
- config_load "$NAME"
- config_foreach start_instance "frps"
-}
diff --git a/package/lean/luci-app-frps/root/etc/uci-defaults/40_luci-frps b/package/lean/luci-app-frps/root/etc/uci-defaults/40_luci-frps
deleted file mode 100755
index 6f1b2a18b..000000000
--- a/package/lean/luci-app-frps/root/etc/uci-defaults/40_luci-frps
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@frps[-1]
- add ucitrack frps
- set ucitrack.@frps[-1].init=frps
- commit ucitrack
-EOF
-
-rm -rf /tmp/luci-indexcache /tmp/luci-modulecache
-exit 0
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/Makefile b/package/lean/luci-app-go-aliyundrive-webdav/Makefile
deleted file mode 100644
index 1f7f09f71..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/Makefile
+++ /dev/null
@@ -1,15 +0,0 @@
-include $(TOPDIR)/rules.mk
-
-PKG_NAME:=luci-app-go-aliyundrive-webdav
-PKG_VERSION:=1.0.2
-PKG_RELEASE:=20211031
-
-PKG_MAINTAINER:=jerrykuku
-
-LUCI_TITLE:=Luci for go-aliyundrive-webdav
-LUCI_PKGARCH:=all
-LUCI_DEPENDS:=+lua +libuci-lua +go-aliyundrive-webdav
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/controller/go-aliyundrive-webdav.lua b/package/lean/luci-app-go-aliyundrive-webdav/luasrc/controller/go-aliyundrive-webdav.lua
deleted file mode 100644
index 8c62e441e..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/controller/go-aliyundrive-webdav.lua
+++ /dev/null
@@ -1,38 +0,0 @@
--- Copyright (C) 2021 jerryk
--- This is free software, licensed under the APACHE LICENSE, VERSION 2.0.
-
-module("luci.controller.go-aliyundrive-webdav", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/go-aliyundrive-webdav") then
- return
- end
- entry({"admin", "services", "go-aliyundrive-webdav"}, alias("admin", "services", "go-aliyundrive-webdav", "client"), _("Go AliyunDrive WebDAV"), 10).dependent = true -- 首页
- entry({"admin", "services", "go-aliyundrive-webdav", "client"}, cbi("go-aliyundrive-webdav/client"), _("Settings"), 1).leaf = true -- 客户端配置
- entry({"admin", "services", "go-aliyundrive-webdav", "log"}, cbi("go-aliyundrive-webdav/log"), _("Log"), 2).leaf = true -- 日志页面
-
- entry({"admin", "services", "go-aliyundrive-webdav", "status"}, call("action_status")).leaf = true
- entry({"admin", "services", "go-aliyundrive-webdav", "logtail"}, call("action_logtail")).leaf = true
-
-end
-
-function action_status()
- local e = {}
- e.running = luci.sys.call("pidof go-aliyundrive-webdav >/dev/null") == 0
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
-
-function action_logtail()
- local fs = require "nixio.fs"
- local log_path = "/var/log/go-aliyundrive-webdav.log"
- local e = {}
- e.running = luci.sys.call("pidof go-aliyundrive-webdav >/dev/null") == 0
- if fs.access(log_path) then
- e.log = luci.sys.exec("tail -n 100 %s | sed 's/\\x1b\\[[0-9;]*m//g'" % log_path)
- else
- e.log = ""
- end
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/client.lua b/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/client.lua
deleted file mode 100644
index 998b1c70d..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/client.lua
+++ /dev/null
@@ -1,49 +0,0 @@
--- Copyright (C) 2021 jerryk
--- This is free software, licensed under the APACHE LICENSE, VERSION 2.0.
-
-
-local uci = luci.model.uci.cursor()
-local m, e
-
-m = Map("go-aliyundrive-webdav")
-m.title = translate("Go AliyunDrive WebDAV")
-m.description = translate("Project GitHub URL")
-
-m:section(SimpleSection).template = "go-aliyundrive-webdav/go-aliyundrive-webdav_status"
-
-e = m:section(TypedSection, "go-aliyundrive-webdav")
-e.anonymous = true
-
-enable = e:option(Flag, "enabled", translate("Enabled"))
-enable.rmempty = false
-
-rt_token = e:option(Value, "rt", translate("Refresh Token"))
-rt_token.description = translate("How to get refresh token")
-
-port = e:option(Value, "port", translate("Port"))
-port.default = "8085"
-port.datatype = "port"
-
-path = e:option(Value, "path", translate("Mounting directory"))
-path.description = translate("Access aliyundrive directory, defaults /.")
-path.default = "/"
-
-user = e:option(Value, "user", translate("Username"))
-user.default = "admin"
-
-pwd = e:option(Value, "pwd", translate("Password"))
-pwd.password = true
-pwd.default = "123456"
-
-verbose = e:option(Flag, "verbose", translate("Enable detailed logging"))
-verbose.default = "0"
-verbose.rmempty = false
-verbose.optional = false
-
-
-local e = luci.http.formvalue("cbi.apply")
-if e then
- io.popen("/etc/init.d/go-aliyundrive-webdav restart")
-end
-
-return m
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/log.lua b/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/log.lua
deleted file mode 100644
index 4a8959db3..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/model/cbi/go-aliyundrive-webdav/log.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-log = SimpleForm("logview")
-log.submit = false
-log.reset = false
-
-t = log:field(DummyValue, '', '')
-t.rawhtml = true
-t.template = 'go-aliyundrive-webdav/go-aliyundrive-webdav_log'
-
-return log
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_log.htm b/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_log.htm
deleted file mode 100644
index e7b49f2dc..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_log.htm
+++ /dev/null
@@ -1,15 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_status.htm b/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_status.htm
deleted file mode 100644
index 25eb5e73c..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/luasrc/view/go-aliyundrive-webdav/go-aliyundrive-webdav_status.htm
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
-
-
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/po/zh-cn/go-aliyundrive-webdav.po b/package/lean/luci-app-go-aliyundrive-webdav/po/zh-cn/go-aliyundrive-webdav.po
deleted file mode 100644
index a27a9edad..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/po/zh-cn/go-aliyundrive-webdav.po
+++ /dev/null
@@ -1,55 +0,0 @@
-msgid ""
-msgstr "Content-Type: text/plain; charset=UTF-8\n"
-
-msgid "Go AliyunDrive"
-msgstr "Go 阿里云盘"
-
-msgid "Go AliyunDrive WebDAV"
-msgstr "Go 阿里云盘 WebDAV"
-
-msgid "Enabled"
-msgstr "启用"
-
-msgid "Refresh Token"
-msgstr "Refresh Token"
-
-msgid "Port"
-msgstr "监听端口"
-
-msgid "Mounting directory"
-msgstr "挂载目录"
-
-msgid "Access aliyundrive directory, defaults /."
-msgstr "访问 aliyundrive 目录,默认为 / 。"
-
-msgid "Username"
-msgstr "用户名"
-
-msgid "Password"
-msgstr "密码"
-
-msgid "Collecting data..."
-msgstr "获取数据中..."
-
-msgid "RUNNING"
-msgstr "运行中"
-
-msgid "NOT RUNNING"
-msgstr "未运行"
-
-msgid "Settings"
-msgstr "设置"
-
-msgid "Log"
-msgstr "日志"
-
-msgid "Enable detailed logging"
-msgstr "启用详细日志记录"
-
-msgid "Project GitHub URL"
-msgstr "GitHub 项目地址"
-
-msgid "How to get refresh token"
-msgstr "查看获取 refresh token 的方法"
-
-
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/po/zh_Hans b/package/lean/luci-app-go-aliyundrive-webdav/po/zh_Hans
deleted file mode 120000
index 41451e4a1..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/po/zh_Hans
+++ /dev/null
@@ -1 +0,0 @@
-zh-cn
\ No newline at end of file
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/root/etc/uci-defaults/luci-go-aliyundrive-webdav b/package/lean/luci-app-go-aliyundrive-webdav/root/etc/uci-defaults/luci-go-aliyundrive-webdav
deleted file mode 100755
index 4ea0e82a1..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/root/etc/uci-defaults/luci-go-aliyundrive-webdav
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@go-aliyundrive-webdav[-1]
- add ucitrack go-aliyundrive-webdav
- set ucitrack.@go-aliyundrive-webdav[-1].init=go-aliyundrive-webdav
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-go-aliyundrive-webdav/root/usr/share/rpcd/acl.d/luci-app-go-aliyundrive-webdav.json b/package/lean/luci-app-go-aliyundrive-webdav/root/usr/share/rpcd/acl.d/luci-app-go-aliyundrive-webdav.json
deleted file mode 100644
index e623bd798..000000000
--- a/package/lean/luci-app-go-aliyundrive-webdav/root/usr/share/rpcd/acl.d/luci-app-go-aliyundrive-webdav.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "luci-app-go-aliyundrive-webdav": {
- "description": "Grant UCI access for luci-app-go-aliyundrive-webdav",
- "read": {
- "uci": [ "go-aliyundrive-webdav" ]
- },
- "write": {
- "uci": [ "go-aliyundrive-webdav" ]
- }
- }
-}
diff --git a/package/lean/luci-app-guest-wifi/Makefile b/package/lean/luci-app-guest-wifi/Makefile
deleted file mode 100644
index 814bac85d..000000000
--- a/package/lean/luci-app-guest-wifi/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2018 By-Ameng Openwrt.org
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=luci-app-guest-wifi
-LUCI_DEPENDS:=
-LUCI_PKGARCH:=all
-PKG_VERSION:=1.0
-PKG_RELEASE:=1
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-guest-wifi/luasrc/controller/guest-wifi.lua b/package/lean/luci-app-guest-wifi/luasrc/controller/guest-wifi.lua
deleted file mode 100644
index 27f5d3896..000000000
--- a/package/lean/luci-app-guest-wifi/luasrc/controller/guest-wifi.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-module("luci.controller.guest-wifi", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/guest-wifi") then
- return
- end
-
- entry({"admin", "network", "guest-wifi"}, cbi("guest-wifi"), translate("Guest-wifi"), 19).dependent = true
-end
diff --git a/package/lean/luci-app-guest-wifi/luasrc/model/cbi/guest-wifi.lua b/package/lean/luci-app-guest-wifi/luasrc/model/cbi/guest-wifi.lua
deleted file mode 100644
index 1fbd555ca..000000000
--- a/package/lean/luci-app-guest-wifi/luasrc/model/cbi/guest-wifi.lua
+++ /dev/null
@@ -1,65 +0,0 @@
-
-require("luci.tools.webadmin")
-
-m = Map("guest-wifi", translate("Guest-wifi"))
-
-s = m:section(TypedSection, "guest-wifi", translate("Config"), translate("You can set guest wifi here. The wifi will be disconnected when enabling/disabling. When modifying the password, first disable the guest wifi, and then do the modification, save and apply. Finally check both Enable and Create, save and apply."))
-s.anonymous = true
-s.addremove = false
-
-enable = s:option(Flag, "enable", translate("Enable"), translate("Enable or disable guest wifi"))
-enable.default = false
-enable.optional = false
-enable.rmempty = false
-
-create = s:option(Flag, "create", translate("Create/Remove"), translate("Check to create guest wifi when enabled, or check to remove guest wifi when disabled."))
-create.default = false
-create.optional = false
-create.rmempty = false
-
-device = s:option(ListValue, "device", translate("Define device"), translate("Define device of guest wifi"))
-device:value("radio0", "radio0")
-device:value("radio1", "radio1")
-device:value("radio2", "radio2")
-device.default = "radio0"
-
-wifi_name = s:option(Value, "wifi_name", translate("Wifi name"), translate("Define the name of guest wifi"))
-wifi_name.default = "Guest-WiFi"
-wifi_name.rmempty = true
-
-interface_name = s:option(Value, "interface_name", translate("Interface name"), translate("Define the interface name of guest wifi"))
-interface_name.default = "guest"
-interface_name.rmempty = true
-
-interface_ip = s:option(Value, "interface_ip", translate("Interface IP address"), translate("Define IP address for guest wifi"))
-interface_ip.datatype = "ip4addr"
-interface_ip.default ="192.168.4.1"
-
-encryption = s:option(Value, "encryption", translate("Encryption"), translate("Define encryption of guest wifi"))
-encryption:value("psk", "WPA-PSK")
-encryption:value("psk2", "WPA2-PSK")
-encryption:value("none", "No Encryption")
-encryption.default = "psk2"
-encryption.widget = "select"
-
-passwd = s:option(Value, "passwd", translate("Password"), translate("Define the password of guest wifi"))
-passwd.password = true
-passwd.default = "guestnetwork"
-
-isolate = s:option(ListValue, "isolate", translate("Isolation"), translate("Enalbe or disable isolation"))
-isolate:value("1", translate("YES"))
-isolate:value("0", translate("NO"))
-
-start = s:option(Value, "start", translate("Start address"), translate("Lowest leased address as offset from the network address"))
-start.default = "50"
-start.rmempty = true
-
-limit = s:option(Value, "limit", translate("Client Limit"), translate("Maximum number of leased addresses"))
-limit.default = "200"
-limit.rmempty = true
-
-leasetime = s:option(Value, "leasetime", translate("DHCP lease time"), translate("Expiry time of leased addresses, minimum is 2 minutes (2m)"))
-leasetime.default = "1h"
-leasetime.rmempty = true
-
-return m
diff --git a/package/lean/luci-app-guest-wifi/po/zh-cn/guest-wifi.po b/package/lean/luci-app-guest-wifi/po/zh-cn/guest-wifi.po
deleted file mode 100644
index d5971e7ab..000000000
--- a/package/lean/luci-app-guest-wifi/po/zh-cn/guest-wifi.po
+++ /dev/null
@@ -1,90 +0,0 @@
-msgid "guest-wifi"
-msgstr "访客网络"
-
-msgid "Guest-wifi"
-msgstr "访客网络"
-
-msgid "Config"
-msgstr "设置"
-
-msgid "You can set guest wifi here. The wifi will be disconnected when enabling/disabling. When modifying the password, first disable the guest wifi, and then do the modification, save and apply. Finally check both Enable and Create, save and apply."
-msgstr "在此可以设置访客网络。启用/禁用访客网络时wifi会断开。当修改访客名称密码时,先禁用访客网络并保存应用,然后修改后保存应用,最后同时勾选启用和创建并保存应用。"
-
-msgid "Enable"
-msgstr "启用"
-
-msgid "Enable or disable guest wifi"
-msgstr "启用/禁用访客网络"
-
-msgid "Create/Remove"
-msgstr"创建/删除"
-
-msgid "Check to create guest wifi when enabled, or check to remove guest wifi when disabled."
-msgstr "与启用同时勾选,可创建和启用访客网络;启用不勾选时勾选,可删除访客网络。"
-
-msgid "Define device"
-msgstr "指定设备"
-
-msgid "Define device of guest wifi"
-msgstr "指定用于访客网络的设备"
-
-msgid "Wifi name"
-msgstr "无线名称"
-
-msgid "Define the name of guest wifi"
-msgstr "指定访客网络的无线名称"
-
-msgid "Interface name"
-msgstr "接口名称"
-
-msgid "Define the interface name of guest wifi"
-msgstr "设置访客网络的接口名称"
-
-msgid "Interface IP address"
-msgstr "接口IP地址"
-
-msgid "Define IP address for guest wifi"
-msgstr "指定访客网络的IP地址(不能与其他LAN的IP地址段相同)"
-
-msgid "Encryption"
-msgstr "加密"
-
-msgid "Define encryption of guest wifi"
-msgstr "设置访客网络的无线加密方式"
-
-msgid "Password"
-msgstr "密码"
-
-msgid "Define the password of guest wifi"
-msgstr "设备访客网络的无线密码"
-
-msgid "Isolation"
-msgstr "隔离"
-
-msgid "Enalbe or disable isolation"
-msgstr "开启或关闭与其它LAN网段的隔离"
-
-msgid "Start address"
-msgstr "起始地址"
-
-msgid "Lowest leased address as offset from the network address"
-msgstr "网络地址分配的起始地址"
-
-msgid "Client Limit"
-msgstr "客户端最大数目"
-
-msgid "Maximum number of leased addresses"
-msgstr "最大地址分配数量"
-
-msgid "DHCP lease time"
-msgstr "DHCP地址租用时间"
-
-msgid "Expiry time of leased addresses, minimum is 2 minutes (2m)"
-msgstr "租用地址的到期时间,最短2分钟(2m)"
-
-msgid "YES"
-msgstr "是"
-
-msgid "NO"
-msgstr "否"
-
diff --git a/package/lean/luci-app-guest-wifi/root/etc/config/guest-wifi b/package/lean/luci-app-guest-wifi/root/etc/config/guest-wifi
deleted file mode 100644
index d43f5f5b3..000000000
--- a/package/lean/luci-app-guest-wifi/root/etc/config/guest-wifi
+++ /dev/null
@@ -1,14 +0,0 @@
-
-config guest-wifi
- option enable '0'
- option wifi_name 'Guest-WiFi'
- option interface_name 'guest'
- option encryption 'psk2'
- option passwd 'guestnetwork'
- option interface_ip '192.168.4.1'
- option isolate '1'
- option start '50'
- option limit '200'
- option leasetime '1h'
- option device 'radio0'
- option create '0'
diff --git a/package/lean/luci-app-guest-wifi/root/etc/init.d/guest-wifi b/package/lean/luci-app-guest-wifi/root/etc/init.d/guest-wifi
deleted file mode 100755
index 18ad8b164..000000000
--- a/package/lean/luci-app-guest-wifi/root/etc/init.d/guest-wifi
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/bin/sh /etc/rc.common
-# Copyright (C) 2015
-# Must keep author's information if you use this file.
-
-START=50
-
-HISTORY_DIR="/etc/config/guestwifi"
-[ -e /etc/config/guestwifi ]||mkdir -p /etc/config/guestwifi
-
-enabled=$(uci get guest-wifi.@guest-wifi[0].enable)
-wifi_name=$(uci get guest-wifi.@guest-wifi[0].wifi_name)
-interface_name=$(uci get guest-wifi.@guest-wifi[0].interface_name)
-encryption=$(uci get guest-wifi.@guest-wifi[0].encryption)
-passwd=$(uci get guest-wifi.@guest-wifi[0].passwd)
-interface_ip=$(uci get guest-wifi.@guest-wifi[0].interface_ip)
-isolate=$(uci get guest-wifi.@guest-wifi[0].isolate)
-start=$(uci get guest-wifi.@guest-wifi[0].start)
-limit=$(uci get guest-wifi.@guest-wifi[0].limit)
-leasetime=$(uci get guest-wifi.@guest-wifi[0].leasetime)
-device=$(uci get guest-wifi.@guest-wifi[0].device)
-create=$(uci get guest-wifi.@guest-wifi[0].create)
-
-
-start() {
- [ $enabled = 1 ] && {
- [ $create = 1 ] && {
- [ -f /etc/config/guestwifi/guest_del ] || echo "#! /bin/sh" > ${HISTORY_DIR}/guest_del
- chmod 0755 ${HISTORY_DIR}/guest_del
- add_interface
- add_ssid
- mod_dhcp
- mod_fw
- /etc/init.d/network restart
- }
- uci set guest-wifi.@guest-wifi[0].create='0'
- uci commit guest-wifi
- uci del wireless.$interface_name.disabled
- uci commit wireless
- wifi
- }
-}
-
-
-stop() {
- [ $enabled = 0 ] && {
- [ $create = 1 ] && {
- ${HISTORY_DIR}/guest_del
- rule_c=`uci show firewall |grep "Hide My LAN for $wifi_name"|grep -o "[0-9]*[0-9]"`
- uci del firewall.@rule[$rule_c]
- uci commit firewall
- rule_b=`uci show firewall |grep "Allow DHCP request for $wifi_name"|grep -o "[0-9]*[0-9]"`
- uci del firewall.@rule[$rule_b]
- uci commit firewall
- rule_a=`uci show firewall |grep "Allow DNS Queries for $wifi_name"|grep -o "[0-9]*[0-9]"`
- uci del firewall.@rule[$rule_a]
- uci commit firewall
- /etc/config/guestwifi/guest_del
- rm -rf /etc/config/guestwifi/guest_del
- /etc/init.d/network restart
- }
- uci set guest-wifi.@guest-wifi[0].create='0'
- uci commit guest-wifi
- uci set wireless.$interface_name.disabled='1'
- uci commit wireless
- wifi
- }
-}
-
-restart() {
- stop
- sleep 2
- start
-}
-
-add_interface() {
- name=`uci show network |grep "$interface_ip"`
- if [ $? = 1 ]; then
- uci set network.$interface_name=interface
- uci set network.$interface_name.proto='static'
- uci set network.$interface_name.ipaddr="$interface_ip"
- uci set network.$interface_name.netmask='255.255.255.0'
- echo "uci del network.$interface_name" >> ${HISTORY_DIR}/guest_del
- echo "uci commit network" >> ${HISTORY_DIR}/guest_del
- uci commit network
- fi
-}
-
-add_ssid() {
- check_name=`uci show wireless |grep "$wifi_name"`
- if [ $? = 1 ]; then
- uci set wireless.$interface_name=wifi-iface
- uci set wireless.$interface_name.device="$device"
- uci set wireless.$interface_name.mode='ap'
- uci set wireless.$interface_name.network="$interface_name"
- uci set wireless.$interface_name.ssid="$wifi_name"
- uci set wireless.$interface_name.encryption="$encryption"
- uci set wireless.$interface_name.isolate="$isolate"
- if [ "$encryption" != "none" ]; then
- uci set wireless.$interface_name.key="$passwd"
- fi
- echo "uci del wireless.$interface_name" >> ${HISTORY_DIR}/guest_del
- echo "uci commit wireless" >> ${HISTORY_DIR}/guest_del
- uci commit wireless
- fi
-}
-
-mod_dhcp() {
- check_dhcp=`uci show dhcp |grep "$interface_name=dhcp"`
- if [ $? = 1 ]; then
- uci set dhcp.$interface_name=dhcp
- uci set dhcp.$interface_name.interface="$interface_name"
- uci set dhcp.$interface_name.start="$start"
- uci set dhcp.$interface_name.limit="$limit"
- uci set dhcp.$interface_name.leasetime="$leasetime"
- echo "uci del dhcp.$interface_name" >> ${HISTORY_DIR}/guest_del
- echo "uci commit dhcp" >> ${HISTORY_DIR}/guest_del
- uci commit dhcp
- fi
-}
-
-mod_fw() {
- num_a=`uci show firewall |grep '=zone' |wc -l`
- num_b=`uci show firewall |grep '=forwarding' |wc -l`
-
- check_zone=`uci show firewall |grep "name=\'$interface_name\'"`
- if [ $? = 1 ]; then
- uci add firewall zone
- echo "uci del firewall.@zone[$num_a]" >> ${HISTORY_DIR}/guest_del
- echo "uci commit firewall" >> ${HISTORY_DIR}/guest_del
- uci set firewall.@zone[$num_a]=zone
- uci set firewall.@zone[$num_a].name="$interface_name"
- uci set firewall.@zone[$num_a].network="$interface_name"
- uci set firewall.@zone[$num_a].forward='REJECT'
- uci set firewall.@zone[$num_a].output='ACCEPT'
- uci set firewall.@zone[$num_a].input='REJECT'
- uci commit firewall
- fi
-
- check_forward=`uci show firewall |grep "forwarding\[.*\].src=\'"$interface_name\'""`
- if [ $? = 1 ]; then
- uci add firewall forwarding
- echo "uci del firewall.@forwarding[$num_b]" >> ${HISTORY_DIR}/guest_del
- echo "uci commit firewall" >> ${HISTORY_DIR}/guest_del
- uci set firewall.@forwarding[$num_b]=forwarding
- uci set firewall.@forwarding[$num_b].src="$interface_name"
- uci set firewall.@forwarding[$num_b].dest='wan'
- uci commit firewall
- fi
-
- check_DNS=`uci show firewall |grep "Allow DNS Queries for $wifi_name"`
- if [ $? = 1 ]; then
- num_c=`uci show firewall |grep '=rule' |wc -l`
- uci add firewall rule
- uci set firewall.@rule[$num_c]=rule
- uci set firewall.@rule[$num_c].name="Allow DNS Queries for $wifi_name"
- uci set firewall.@rule[$num_c].src="$interface_name"
- uci set firewall.@rule[$num_c].dest_port='53'
- uci set firewall.@rule[$num_c].proto='tcpudp'
- uci set firewall.@rule[$num_c].target='ACCEPT'
- uci commit firewall
- unset num_c
- fi
-
- check_DHCP=`uci show firewall |grep "Allow DHCP request for $wifi_name"`
- if [ $? = 1 ]; then
- num_c=`uci show firewall |grep '=rule' |wc -l`
- uci add firewall rule
- uci set firewall.@rule[$num_c]=rule
- uci set firewall.@rule[$num_c].name="Allow DHCP request for $wifi_name"
- uci set firewall.@rule[$num_c].src="$interface_name"
- uci set firewall.@rule[$num_c].src_port='67-68'
- uci set firewall.@rule[$num_c].dest_port='67-68'
- uci set firewall.@rule[$num_c].proto='udp'
- uci set firewall.@rule[$num_c].target='ACCEPT'
- uci commit firewall
- unset num_c
- fi
-
- check_HIDE=`uci show firewall |grep "Hide My LAN for $wifi_name"`
- if [ $? = 1 ]; then
- num_c=`uci show firewall |grep '=rule' |wc -l`
- uci add firewall rule
- uci set firewall.@rule[$num_c]=rule
- uci set firewall.@rule[$num_c].enabled='1'
- uci set firewall.@rule[$num_c].name="Hide My LAN for $wifi_name"
- uci set firewall.@rule[$num_c].proto='all'
- uci set firewall.@rule[$num_c].src="$interface_name"
- #convert netmask to cidr
- local lan_netmask=`uci get network.lan.netmask`
- local nbits=0
- local IFS=.
- for netmask_dec in $lan_netmask ; do
- case $netmask_dec in
- 255) let nbits+=8 ;;
- 254) let nbits+=7 ;;
- 252) let nbits+=6 ;;
- 248) let nbits+=5 ;;
- 240) let nbits+=4 ;;
- 224) let nbits+=3 ;;
- 192) let nbits+=2 ;;
- 128) let nbits+=1 ;;
- 0) ;;
- *) echo "Error: $netmask_dec can not be recognised as netmask decimal." && exit 1 ;;
- esac
- done
- unset netmask_dec
- uci set firewall.@rule[$num_c].dest_ip="`uci get network.lan.ipaddr`/$nbits"
- uci set firewall.@rule[$num_c].target='REJECT'
- uci commit firewall
- unset num_c
- fi
-}
-
diff --git a/package/lean/luci-app-guest-wifi/root/etc/uci-defaults/luci-guest-wifi b/package/lean/luci-app-guest-wifi/root/etc/uci-defaults/luci-guest-wifi
deleted file mode 100755
index 02ad18b3f..000000000
--- a/package/lean/luci-app-guest-wifi/root/etc/uci-defaults/luci-guest-wifi
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@guest-wifi[-1]
- add ucitrack guest-wifi
- set ucitrack.@guest-wifi[-1].init=guest-wifi
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-haproxy-tcp/Makefile b/package/lean/luci-app-haproxy-tcp/Makefile
deleted file mode 100644
index 0be02c161..000000000
--- a/package/lean/luci-app-haproxy-tcp/Makefile
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (C) 2019 Openwrt.org
-#
-# This is a free software, use it under Apache Licene 2.0 & GNU General Public License v3.0.
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=luci for haproxy and shadowsocks
-LUCI_DEPENDS:=+haproxy
-LUCI_PKGARCH:=all
-PKG_NAME:=luci-app-haproxy-tcp
-PKG_VERSION=1.4
-PKG_RELEASE:=2
-PKG_MAINTAINER:=Alex Zhuo <1886090@gmail.com>
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-define Package/$(PKG_NAME)/postinst
-#!/bin/sh
-rm -rf /tmp/luci*
-echo stopping haproxy
-/etc/init.d/haproxy stop
-/etc/init.d/haproxy disable
-echo haproxy disabled
-endef
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-haproxy-tcp/luasrc/controller/haproxy.lua b/package/lean/luci-app-haproxy-tcp/luasrc/controller/haproxy.lua
deleted file mode 100644
index 849209694..000000000
--- a/package/lean/luci-app-haproxy-tcp/luasrc/controller/haproxy.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-module("luci.controller.haproxy", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/haproxy") then
- return
- end
-
- entry({"admin", "services", "haproxy"}, cbi("haproxy"), _("HAProxy")).dependent = true
-end
diff --git a/package/lean/luci-app-haproxy-tcp/luasrc/model/cbi/haproxy.lua b/package/lean/luci-app-haproxy-tcp/luasrc/model/cbi/haproxy.lua
deleted file mode 100644
index 382fa5b02..000000000
--- a/package/lean/luci-app-haproxy-tcp/luasrc/model/cbi/haproxy.lua
+++ /dev/null
@@ -1,68 +0,0 @@
---Alex<1886090@gmail.com>
-local fs = require "nixio.fs"
-
-function sync_value_to_file(value, file) --用来写入文件的函数,目前这种方式已经弃用
- value = value:gsub("\r\n?", "\n")
- local old_value = nixio.fs.readfile(file)
- if value ~= old_value then
- nixio.fs.writefile(file, value)
- end
-end
-local state_msg = ""
-local haproxy_on = (luci.sys.call("pidof haproxy > /dev/null") == 0)
-local router_ip = luci.sys.exec("uci get network.lan.ipaddr")
-if haproxy_on then
- state_msg = "" .. translate("Running") .. ""
-else
- state_msg = "" .. translate("Not running") .. ""
-end
-m = Map("haproxy", translate("HAProxy"), translate("HAProxy能够检测Shadowsocks服务器的连通情况,从而实现负载均衡和高可用的功能,支持主备用服务器宕机自动切换,并且可以设置多个主服务器用于分流,规定每个分流节点的流量比例等。前提条件是你的所有Shadowsocks服务器的【加密方式】和【密码】一致。
使用方法:配置好你的Shadowsocks服务器ip地址和端口,然后开启Shadowsocks服务,将服务器地址填写为【127.0.0.1】,端口【2222】,其他参数和之前一样即可,你可以通过访问【路由器的IP:1111/haproxy】输入用户名admin,密码root来观察各节点健康状况,红色为宕机,绿色正常,使用说明请点击这里") .. "
后台监控页面:" .. router_ip .. ":1111/haproxy 用户名admin,密码root" .. "
状态 - " .. state_msg)
-s = m:section(TypedSection,"arguments","")
- s.addremove = false
- s.anonymous = true
- view_enable = s:option(Flag, "enabled", translate("Enable"))
- --通过读写配置文件控制HAProxy这种方式已经弃用
- --view_cfg = s:option(TextValue, "1", nil)
- --view_cfg.rmempty = false
- --view_cfg.rows = 43
-
- --function view_cfg.cfgvalue()
- -- return nixio.fs.readfile("/etc/haproxy.cfg") or ""
- --end
- --function view_cfg.write(self, section, value)
- -- sync_value_to_file(value, "/etc/haproxy.cfg")
- --end
-s = m:section(TypedSection, "main_server","" .. translate("Main Server List") .. "")
- s.anonymous = true
- s.addremove = true
- o = s:option(Value, "server_name", translate("Display Name"), translate("Only English Characters,No spaces"))
- o.rmempty = false
-
- o = s:option(Flag,"validate",translate("validate"))
-
- o = s:option(Value, "server_ip", translate("Proxy Server IP"))
-
- o = s:option(Value, "server_port", translate("Proxy Server Port"))
- o.datatype = "uinteger"
- o = s:option(Value, "server_weight", translate("Weight"))
- o.datatype = "uinteger"
-
-s = m:section(TypedSection,"backup_server","" .. translate("Backup Server List") .. "")
- s.anonymous = true
- s.addremove = true
- o = s:option(Value, "server_name", translate("Display Name"), translate("Only English Characters,No spaces"))
- o.rmempty = false
-
- o = s:option(Flag, "validate", translate("validate"))
-
- o = s:option(Value, "server_ip", translate("Proxy Server IP"))
-
- o = s:option(Value, "server_port", translate("Proxy Server Port"))
- o.datatype = "uinteger"
--- ---------------------------------------------------
-local apply = luci.http.formvalue("cbi.apply")
-if apply then
- os.execute("/etc/haproxy_init.sh restart >/dev/null 2>&1 &")
-end
-
-return m
diff --git a/package/lean/luci-app-haproxy-tcp/po/zh-cn/haproxy-tcp.po b/package/lean/luci-app-haproxy-tcp/po/zh-cn/haproxy-tcp.po
deleted file mode 100644
index f0f2aa837..000000000
--- a/package/lean/luci-app-haproxy-tcp/po/zh-cn/haproxy-tcp.po
+++ /dev/null
@@ -1,34 +0,0 @@
-msgid "Running"
-msgstr "运行中"
-
-msgid "Not running"
-msgstr "未运行"
-
-msgid "Main Server List"
-msgstr "主服务器列表"
-
-msgid "Display Name"
-msgstr "服务器名称"
-
-msgid "Only English Characters,No spaces"
-msgstr "仅限英文字母,不要有空格"
-
-msgid "Proxy Server IP"
-msgstr "代理服务器IP或域名"
-
-msgid "Proxy Server Port"
-msgstr "代理服务器端口"
-
-msgid "Weight"
-msgstr "分流权重"
-
-msgid "Backup Server List"
-msgstr "备用服务器列表"
-
-msgid "validate"
-msgstr "生效"
-
-
-
-
-
diff --git a/package/lean/luci-app-haproxy-tcp/root/etc/config/haproxy b/package/lean/luci-app-haproxy-tcp/root/etc/config/haproxy
deleted file mode 100644
index 5d324ca3f..000000000
--- a/package/lean/luci-app-haproxy-tcp/root/etc/config/haproxy
+++ /dev/null
@@ -1,29 +0,0 @@
-
-config arguments
- option enabled '0'
-
-config main_server
- option server_weight '10'
- option server_ip '1.2.3.4'
- option server_port '443'
- option server_name 'JP1'
- option validate '1'
-
-config backup_server
- option server_name 'JP2'
- option server_ip '2.2.2.2'
- option server_port '8038'
- option validate '1'
-
-config backup_server
- option server_name 'JP3'
- option server_ip '3.3.3.3'
- option server_port '443'
- option validate '1'
-
-config backup_server
- option server_name 'JP4'
- option server_ip '4.4.4.4'
- option server_port '443'
- option validate '1'
-
diff --git a/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_init.sh b/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_init.sh
deleted file mode 100755
index c3a01dc2d..000000000
--- a/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_init.sh
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/bin/sh /etc/rc.common
-
-CFG_FILE=/etc/haproxy.cfg
-stop(){
- logger -t alex stopping haproxy
- echo "stopping haproxy"
- /etc/init.d/haproxy disable
- /etc/init.d/haproxy stop
- [ -f /etc/haproxy_backup ] && {
- cp /etc/haproxy_backup /etc/init.d/haproxy
- }
- iptables -t nat -D OUTPUT -j HAPROXY &> /dev/null
- iptables -t nat -F HAPROXY &> /dev/null
- sleep 1
- iptables -t nat -X HAPROXY &> /dev/null
-}
-start(){
- echo "starting haproxy"
- logger -t restarting haproxy
- echo global > $CFG_FILE
- cat >> $CFG_FILE </dev/null`
- local server_name=`uci get haproxy.@main_server[$COUNTER].server_name 2>/dev/null`
- local server_port=`uci get haproxy.@main_server[$COUNTER].server_port 2>/dev/null`
- local server_weight=`uci get haproxy.@main_server[$COUNTER].server_weight 2>/dev/null`
- local validate=`uci get haproxy.@main_server[$COUNTER].validate 2>/dev/null`
- if [ -z "$server_ip" ] || [ -z "$server_name" ] || [ -z "$server_port" ] || [ -z "$server_weight" ]; then
- echo break
- break
- fi
- echo the main server $COUNTER $server_ip $server_name $server_port $server_weight
- [ "$validate" = 1 ] && {
- echo server $server_name $server_ip:$server_port weight $server_weight maxconn 1024 check resolvers mydns inter 1500 rise 3 fall 3 >> $CFG_FILE
- }
- iptables -t nat -A HAPROXY -p tcp -d $server_ip -j ACCEPT
- COUNTER=$(($COUNTER+1))
- done
- COUNTER=0
- #添加备用服务器
- while true
- do
- local server_ip=`uci get haproxy.@backup_server[$COUNTER].server_ip 2>/dev/null`
- local server_name=`uci get haproxy.@backup_server[$COUNTER].server_name 2>/dev/null`
- local server_port=`uci get haproxy.@backup_server[$COUNTER].server_port 2>/dev/null`
- local validate=`uci get haproxy.@backup_server[$COUNTER].validate 2>/dev/null`
- if [ -z "$server_ip" ] || [ -z "$server_name" ] || [ -z "$server_port" ]; then
- echo break
- break
- fi
- echo the backup server $COUNTER $server_ip $server_name $server_port
- [ "$validate" = 1 ] && {
- echo server $server_name $server_ip:$server_port weight 10 check resolvers mydns backup inter 1500 rise 3 fall 3 >> $CFG_FILE
- }
- iptables -t nat -A HAPROXY -p tcp -d $server_ip -j ACCEPT
- COUNTER=$(($COUNTER+1))
- done
- iptables -t nat -I OUTPUT -j HAPROXY
- /etc/init.d/haproxy enable
- /etc/init.d/haproxy restart
- cp /etc/init.d/haproxy /etc/haproxy_backup
- cp /etc/haproxy_start /etc/init.d/haproxy
-}
-
-restart(){
- echo luci for haproxy
- sleep 1s
- local vt_enabled=`uci get haproxy.@arguments[0].enabled 2>/dev/null`
- logger -t haproxy is initializing enabled is $vt_enabled
- echo $vt_enabled
- if [ "$vt_enabled" = 1 ]; then
- [ -f /etc/haproxy_backup ] && {
- cp /etc/haproxy_backup /etc/init.d/haproxy
- }
- iptables -t nat -D OUTPUT -j HAPROXY &> /dev/null
- iptables -t nat -F HAPROXY &> /dev/null
- sleep 1
- iptables -t nat -X HAPROXY &> /dev/null
- start;
- else
- stop;
- fi
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_start b/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_start
deleted file mode 100755
index f0c9e52b1..000000000
--- a/package/lean/luci-app-haproxy-tcp/root/etc/haproxy_start
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/sh /etc/rc.common
-# Copyright (C) 2009-2010 OpenWrt.org
-
-START=99
-STOP=80
-
-SERVICE_USE_PID=1
-
-HAPROXY_BIN="/usr/sbin/haproxy"
-HAPROXY_CONFIG="/etc/haproxy.cfg"
-HAPROXY_PID="/var/run/haproxy.pid"
-
-start() {
- service_start $HAPROXY_BIN -q -D -f "$HAPROXY_CONFIG" -p "$HAPROXY_PID"
- local COUNTER=0
- #添加主服务器
- iptables -t nat -D OUTPUT -j HAPROXY &> /dev/null
- iptables -t nat -X HAPROXY
- iptables -t nat -N HAPROXY
- iptables -t nat -F HAPROXY
-
- while true
- do
- local server_ip=`uci get haproxy.@main_server[$COUNTER].server_ip 2>/dev/null`
- local server_name=`uci get haproxy.@main_server[$COUNTER].server_name 2>/dev/null`
- local server_port=`uci get haproxy.@main_server[$COUNTER].server_port 2>/dev/null`
- local server_weight=`uci get haproxy.@main_server[$COUNTER].server_weight 2>/dev/null`
- local validate=`uci get haproxy.@main_server[$COUNTER].validate 2>/dev/null`
- if [ -z "$server_ip" ] || [ -z "$server_name" ] || [ -z "$server_port" ] || [ -z "$server_weight" ]; then
- echo break
- break
- fi
- echo the main2 server $COUNTER $server_ip $server_name $server_port $server_weight
- [ "$validate" = 1 ] && {
- iptables -t nat -A HAPROXY -p tcp -d $server_ip -j ACCEPT
- }
- COUNTER=$(($COUNTER+1))
- done
- COUNTER=0
- #添加备用服务器
- while true
- do
- local server_ip=`uci get haproxy.@backup_server[$COUNTER].server_ip 2>/dev/null`
- local server_name=`uci get haproxy.@backup_server[$COUNTER].server_name 2>/dev/null`
- local server_port=`uci get haproxy.@backup_server[$COUNTER].server_port 2>/dev/null`
- local validate=`uci get haproxy.@backup_server[$COUNTER].validate 2>/dev/null`
- if [ -z "$server_ip" ] || [ -z "$server_name" ] || [ -z "$server_port" ]; then
- echo break
- break
- fi
- echo the backup2 server $COUNTER $server_ip $server_name $server_port
- [ "$validate" = 1 ] && {
- iptables -t nat -A HAPROXY -p tcp -d $server_ip -j ACCEPT
- }
- COUNTER=$(($COUNTER+1))
- done
-
- iptables -t nat -I OUTPUT -j HAPROXY
-}
-
-stop() {
- kill -9 $(cat $HAPROXY_PID | tr "\n" " ")
- service_stop $HAPROXY_BIN
- iptables -t nat -D OUTPUT -j HAPROXY &> /dev/null
- iptables -t nat -F HAPROXY &> /dev/null
- sleep 1
- iptables -t nat -X HAPROXY &> /dev/null
-}
-
-reload() {
- $HAPROXY_BIN -D -q -f $HAPROXY_CONFIG -p $HAPROXY_PID -sf $(cat $HAPROXY_PID | tr "\n" " ")
- #$HAPROXY_BIN -D -q -f $HAPROXY_CONFIG -p $HAPROXY_PID -sf $(cat $HAPROXY_PID)
-}
diff --git a/package/lean/luci-app-haproxy-tcp/root/etc/uci-defaults/99_luci-haproxy b/package/lean/luci-app-haproxy-tcp/root/etc/uci-defaults/99_luci-haproxy
deleted file mode 100755
index 72913f935..000000000
--- a/package/lean/luci-app-haproxy-tcp/root/etc/uci-defaults/99_luci-haproxy
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-/etc/init.d/haproxy disable
-/etc/init.d/haproxy stop
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-ipsec-vpnd/Makefile b/package/lean/luci-app-ipsec-vpnd/Makefile
deleted file mode 100644
index 8324d6ab3..000000000
--- a/package/lean/luci-app-ipsec-vpnd/Makefile
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2016 Openwrt.org
-#
-# This is free software, licensed under the Apache License, Version 2.0 .
-#
-
-include $(TOPDIR)/rules.mk
-
-LUCI_TITLE:=LuCI support for IPSec VPN Server (IKEv1 with PSK and Xauth)
-LUCI_DEPENDS:=+strongswan +strongswan-minimal +strongswan-mod-xauth-generic +strongswan-mod-kernel-libipsec +kmod-tun
-LUCI_PKGARCH:=all
-PKG_VERSION:=1.0
-PKG_RELEASE:=11
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-ipsec-vpnd/luasrc/controller/ipsec-server.lua b/package/lean/luci-app-ipsec-vpnd/luasrc/controller/ipsec-server.lua
deleted file mode 100644
index 4594275e6..000000000
--- a/package/lean/luci-app-ipsec-vpnd/luasrc/controller/ipsec-server.lua
+++ /dev/null
@@ -1,18 +0,0 @@
-module("luci.controller.ipsec-server", package.seeall)
-
-function index()
- if not nixio.fs.access("/etc/config/ipsec") then
- return
- end
-
- entry({"admin", "vpn"}, firstchild(), "VPN", 45).dependent = false
- entry({"admin", "vpn", "ipsec-server"}, cbi("ipsec-server"), _("IPSec VPN Server"), 80).dependent = false
- entry({"admin", "vpn", "ipsec-server", "status"}, call("act_status")).leaf = true
-end
-
-function act_status()
- local e = {}
- e.running = luci.sys.call("pgrep ipsec >/dev/null") == 0
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-ipsec-vpnd/luasrc/model/cbi/ipsec-server.lua b/package/lean/luci-app-ipsec-vpnd/luasrc/model/cbi/ipsec-server.lua
deleted file mode 100644
index aa11ae1d8..000000000
--- a/package/lean/luci-app-ipsec-vpnd/luasrc/model/cbi/ipsec-server.lua
+++ /dev/null
@@ -1,35 +0,0 @@
-mp = Map("ipsec")
-mp.title = translate("IPSec VPN Server")
-mp.description = translate("IPSec VPN connectivity using the native built-in VPN Client on iOS or Andriod (IKEv1 with PSK and Xauth)")
-
-mp:section(SimpleSection).template = "ipsec/ipsec_status"
-
-s = mp:section(NamedSection, "ipsec", "service")
-s.anonymouse = true
-
-enabled = s:option(Flag, "enabled", translate("Enable"))
-enabled.default = 0
-enabled.rmempty = false
-
-clientip = s:option(Value, "clientip", translate("VPN Client IP"))
-clientip.datatype = "ip4addr"
-clientip.description = translate("LAN DHCP reserved started IP addresses with the same subnet mask")
-clientip.optional = false
-clientip.rmempty = false
-
-clientdns = s:option(Value, "clientdns", translate("VPN Client DNS"))
-clientdns.datatype = "ip4addr"
-clientdns.description = translate("DNS using in VPN tunnel.Set to the router's LAN IP is recommended")
-clientdns.optional = false
-clientdns.rmempty = false
-
-account = s:option(Value, "account", translate("Account"))
-account.datatype = "string"
-
-password = s:option(Value, "password", translate("Password"))
-password.password = true
-
-secret = s:option(Value, "secret", translate("Secret Pre-Shared Key"))
-secret.password = true
-
-return mp
diff --git a/package/lean/luci-app-ipsec-vpnd/luasrc/view/ipsec/ipsec_status.htm b/package/lean/luci-app-ipsec-vpnd/luasrc/view/ipsec/ipsec_status.htm
deleted file mode 100644
index 1e256c73b..000000000
--- a/package/lean/luci-app-ipsec-vpnd/luasrc/view/ipsec/ipsec_status.htm
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
diff --git a/package/lean/luci-app-ipsec-vpnd/po/zh-cn/ipsec.po b/package/lean/luci-app-ipsec-vpnd/po/zh-cn/ipsec.po
deleted file mode 100644
index e051d341c..000000000
--- a/package/lean/luci-app-ipsec-vpnd/po/zh-cn/ipsec.po
+++ /dev/null
@@ -1,34 +0,0 @@
-msgid "IPSec VPN Server"
-msgstr "IPSec VPN 服务器"
-
-msgid "IPSec VPN connectivity using the native built-in VPN Client on iOS or Andriod (IKEv1 with PSK and Xauth)"
-msgstr "使用iOS 或者 Andriod (IKEv1 with PSK and Xauth) 原生内置 IPSec VPN 客户端进行连接"
-
-msgid "VPN Client IP"
-msgstr "VPN客户端地址段"
-
-msgid "LAN DHCP reserved started IP addresses with the same subnet mask"
-msgstr "VPN客户端使用独立子网段,默认为 10.10.10.2/24"
-
-msgid "VPN Client DNS"
-msgstr "VPN客户端DNS服务器"
-
-msgid "DNS using in VPN tunnel.Set to the router's LAN IP is recommended"
-msgstr "指定VPN客户端的DNS地址。推荐设置为 ipsec0 虚拟接口地址,默认为 10.10.10.1"
-
-msgid "Account"
-msgstr "账户"
-
-msgid "Secret Pre-Shared Key"
-msgstr "PSK密钥"
-
-msgid "IPSec VPN Server status"
-msgstr "IPSec VPN 服务器运行状态"
-
-msgid "Disable from startup"
-msgstr "禁止开机启动"
-
-msgid "Enable on startup"
-msgstr "允许开机启动"
-
-
diff --git a/package/lean/luci-app-ipsec-vpnd/root/etc/config/ipsec b/package/lean/luci-app-ipsec-vpnd/root/etc/config/ipsec
deleted file mode 100644
index 4cd3f6422..000000000
--- a/package/lean/luci-app-ipsec-vpnd/root/etc/config/ipsec
+++ /dev/null
@@ -1,9 +0,0 @@
-
-config service 'ipsec'
- option clientdns '10.10.10.1'
- option account 'lean'
- option secret 'myopenwrt'
- option enabled '0'
- option password '12345678'
- option clientip '10.10.10.2/24'
-
diff --git a/package/lean/luci-app-ipsec-vpnd/root/etc/init.d/ipsec b/package/lean/luci-app-ipsec-vpnd/root/etc/init.d/ipsec
deleted file mode 100755
index 5a4c6a217..000000000
--- a/package/lean/luci-app-ipsec-vpnd/root/etc/init.d/ipsec
+++ /dev/null
@@ -1,427 +0,0 @@
-#!/bin/sh /etc/rc.common
-
-START=90
-STOP=10
-
-USE_PROCD=1
-PROG=/usr/lib/ipsec/starter
-
-. $IPKG_INSTROOT/lib/functions.sh
-. $IPKG_INSTROOT/lib/functions/network.sh
-
-IPSEC_SECRETS_FILE=/etc/ipsec.secrets
-IPSEC_CONN_FILE=/etc/ipsec.conf
-STRONGSWAN_CONF_FILE=/etc/strongswan.conf
-
-IPSEC_VAR_SECRETS_FILE=/var/ipsec/ipsec.secrets
-IPSEC_VAR_CONN_FILE=/var/ipsec/ipsec.conf
-STRONGSWAN_VAR_CONF_FILE=/var/ipsec/strongswan.conf
-
-WAIT_FOR_INTF=0
-
-file_reset() {
- : > "$1"
-}
-
-xappend() {
- local file="$1"
- shift
-
- echo "${@}" >> "${file}"
-}
-
-remove_include() {
- local file="$1"
- local include="$2"
-
- sed -i "\_${include}_d" "${file}"
-}
-
-remove_includes() {
- remove_include "${IPSEC_CONN_FILE}" "${IPSEC_VAR_CONN_FILE}"
- remove_include "${IPSEC_SECRETS_FILE}" "${IPSEC_VAR_SECRETS_FILE}"
- remove_include "${STRONGSWAN_CONF_FILE}" "${STRONGSWAN_VAR_CONF_FILE}"
-}
-
-do_include() {
- local conf="$1"
- local uciconf="$2"
- local backup=`mktemp -t -p /tmp/ ipsec-init-XXXXXX`
-
- [ ! -f "${conf}" ] && rm -rf "${conf}"
- touch "${conf}"
-
- cat "${conf}" | grep -v "${uciconf}" > "${backup}"
- mv "${backup}" "${conf}"
- xappend "${conf}" "include ${uciconf}"
- file_reset "${uciconf}"
-}
-
-ipsec_reset() {
- do_include "${IPSEC_CONN_FILE}" "${IPSEC_VAR_CONN_FILE}"
-}
-
-ipsec_xappend() {
- xappend "${IPSEC_VAR_CONN_FILE}" "$@"
-}
-
-swan_reset() {
- do_include "${STRONGSWAN_CONF_FILE}" "${STRONGSWAN_VAR_CONF_FILE}"
-}
-
-swan_xappend() {
- xappend "${STRONGSWAN_VAR_CONF_FILE}" "$@"
-}
-
-secret_reset() {
- do_include "${IPSEC_SECRETS_FILE}" "${IPSEC_VAR_SECRETS_FILE}"
-}
-
-secret_xappend() {
- xappend "${IPSEC_VAR_SECRETS_FILE}" "$@"
-}
-
-warning() {
- echo "WARNING: $@" >&2
-}
-
-add_crypto_proposal() {
- local encryption_algorithm
- local hash_algorithm
- local dh_group
-
- config_get encryption_algorithm "$1" encryption_algorithm
- config_get hash_algorithm "$1" hash_algorithm
- config_get dh_group "$1" dh_group
-
- [ -n "${encryption_algorithm}" ] && \
- crypto="${crypto:+${crypto},}${encryption_algorithm}${hash_algorithm:+-${hash_algorithm}}${dh_group:+-${dh_group}}"
-}
-
-set_crypto_proposal() {
- local conf="$1"
- local proposal
-
- crypto=""
-
- config_get crypto_proposal "$conf" crypto_proposal ""
- for proposal in $crypto_proposal; do
- add_crypto_proposal "$proposal"
- done
-
- [ -n "${crypto}" ] && {
- local force_crypto_proposal
-
- config_get_bool force_crypto_proposal "$conf" force_crypto_proposal
-
- [ "${force_crypto_proposal}" = "1" ] && crypto="${crypto}!"
- }
-
- crypto_proposal="${crypto}"
-}
-
-config_conn() {
- # Generic ipsec conn section shared by tunnel and transport
- local mode
- local local_subnet
- local local_nat
- local local_sourceip
- local local_updown
- local local_firewall
- local remote_subnet
- local remote_sourceip
- local remote_updown
- local remote_firewall
- local ikelifetime
- local lifetime
- local margintime
- local keyingtries
- local dpdaction
- local dpddelay
- local inactivity
- local keyexchange
-
- config_get mode "$1" mode "route"
- config_get local_subnet "$1" local_subnet ""
- config_get local_nat "$1" local_nat ""
- config_get local_sourceip "$1" local_sourceip ""
- config_get local_updown "$1" local_updown ""
- config_get local_firewall "$1" local_firewall ""
- config_get remote_subnet "$1" remote_subnet ""
- config_get remote_sourceip "$1" remote_sourceip ""
- config_get remote_updown "$1" remote_updown ""
- config_get remote_firewall "$1" remote_firewall ""
- config_get ikelifetime "$1" ikelifetime "3h"
- config_get lifetime "$1" lifetime "1h"
- config_get margintime "$1" margintime "9m"
- config_get keyingtries "$1" keyingtries "3"
- config_get dpdaction "$1" dpdaction "none"
- config_get dpddelay "$1" dpddelay "30s"
- config_get inactivity "$1" inactivity
- config_get keyexchange "$1" keyexchange "ikev2"
-
- [ -n "$local_nat" ] && local_subnet=$local_nat
-
- ipsec_xappend "conn $config_name-$1"
- ipsec_xappend " left=%any"
- ipsec_xappend " right=$remote_gateway"
-
- [ -n "$local_sourceip" ] && ipsec_xappend " leftsourceip=$local_sourceip"
- [ -n "$local_subnet" ] && ipsec_xappend " leftsubnet=$local_subnet"
-
- [ -n "$local_firewall" ] && ipsec_xappend " leftfirewall=$local_firewall"
- [ -n "$remote_firewall" ] && ipsec_xappend " rightfirewall=$remote_firewall"
-
- ipsec_xappend " ikelifetime=$ikelifetime"
- ipsec_xappend " lifetime=$lifetime"
- ipsec_xappend " margintime=$margintime"
- ipsec_xappend " keyingtries=$keyingtries"
- ipsec_xappend " dpdaction=$dpdaction"
- ipsec_xappend " dpddelay=$dpddelay"
-
- [ -n "$inactivity" ] && ipsec_xappend " inactivity=$inactivity"
-
- if [ "$auth_method" = "psk" ]; then
- ipsec_xappend " leftauth=psk"
- ipsec_xappend " rightauth=psk"
-
- [ "$remote_sourceip" != "" ] && ipsec_xappend " rightsourceip=$remote_sourceip"
- [ "$remote_subnet" != "" ] && ipsec_xappend " rightsubnet=$remote_subnet"
-
- ipsec_xappend " auto=$mode"
- else
- warning "AuthenticationMethod $auth_method not supported"
- fi
-
- [ -n "$local_identifier" ] && ipsec_xappend " leftid=$local_identifier"
- [ -n "$remote_identifier" ] && ipsec_xappend " rightid=$remote_identifier"
- [ -n "$local_updown" ] && ipsec_xappend " leftupdown=$local_updown"
- [ -n "$remote_updown" ] && ipsec_xappend " rightupdown=$remote_updown"
- ipsec_xappend " keyexchange=$keyexchange"
-
- set_crypto_proposal "$1"
- [ -n "${crypto_proposal}" ] && ipsec_xappend " esp=$crypto_proposal"
- [ -n "${ike_proposal}" ] && ipsec_xappend " ike=$ike_proposal"
-}
-
-config_tunnel() {
- config_conn "$1"
-
- # Specific for the tunnel part
- ipsec_xappend " type=tunnel"
-}
-
-config_transport() {
- config_conn "$1"
-
- # Specific for the transport part
- ipsec_xappend " type=transport"
-}
-
-config_remote() {
- local enabled
- local gateway
- local pre_shared_key
- local auth_method
-
- config_name=$1
-
- config_get_bool enabled "$1" enabled 0
- [ $enabled -eq 0 ] && return
-
- config_get gateway "$1" gateway
- config_get pre_shared_key "$1" pre_shared_key
- config_get auth_method "$1" authentication_method
- config_get local_identifier "$1" local_identifier ""
- config_get remote_identifier "$1" remote_identifier ""
-
- [ "$gateway" = "any" ] && remote_gateway="%any" || remote_gateway="$gateway"
-
- [ -z "$local_identifier" ] && {
- local ipdest
-
- [ "$remote_gateway" = "%any" ] && ipdest="1.1.1.1" || ipdest="$remote_gateway"
- local_gateway=`ip route get $ipdest | awk -F"src" '/src/{gsub(/ /,"");print $2}'`
- }
-
- [ -n "$local_identifier" ] && secret_xappend -n "$local_identifier " || secret_xappend -n "$local_gateway "
- [ -n "$remote_identifier" ] && secret_xappend -n "$remote_identifier " || secret_xappend -n "$remote_gateway "
-
- secret_xappend ": PSK \"$pre_shared_key\""
-
- set_crypto_proposal "$1"
- ike_proposal="$crypto_proposal"
-
- config_list_foreach "$1" tunnel config_tunnel
-
- config_list_foreach "$1" transport config_transport
-
- ipsec_xappend ""
-}
-
-config_ipsec() {
- local debug
- local rtinstall_enabled
- local routing_tables_ignored
- local routing_table
- local routing_table_id
- local interface
- local device_list
-
- ipsec_reset
- secret_reset
- swan_reset
-
- ipsec_xappend "# generated by /etc/init.d/ipsec"
- ipsec_xappend "version 2"
- ipsec_xappend ""
-
- secret_xappend "# generated by /etc/init.d/ipsec"
-
- config_get debug "$1" debug 0
- config_get_bool rtinstall_enabled "$1" rtinstall_enabled 1
- [ $rtinstall_enabled -eq 1 ] && install_routes=yes || install_routes=no
-
- # prepare extra charon config option ignore_routing_tables
- for routing_table in $(config_get "$1" "ignore_routing_tables"); do
- if [ "$routing_table" -ge 0 ] 2>/dev/null; then
- routing_table_id=$routing_table
- else
- routing_table_id=$(sed -n '/[ \t]*[0-9]\+[ \t]\+'$routing_table'[ \t]*$/s/[ \t]*\([0-9]\+\).*/\1/p' /etc/iproute2/rt_tables)
- fi
-
- [ -n "$routing_table_id" ] && append routing_tables_ignored "$routing_table_id"
- done
-
- local interface_list=$(config_get "$1" "interface")
- if [ -z "$interface_list" ]; then
- WAIT_FOR_INTF=0
- else
- for interface in $interface_list; do
- network_get_device device $interface
- [ -n "$device" ] && append device_list "$device" ","
- done
- [ -n "$device_list" ] && WAIT_FOR_INTF=0 || WAIT_FOR_INTF=1
- fi
-
- swan_xappend "# generated by /etc/init.d/ipsec"
- swan_xappend "charon {"
- swan_xappend " load_modular = yes"
- swan_xappend " install_routes = $install_routes"
- [ -n "$routing_tables_ignored" ] && swan_xappend " ignore_routing_tables = $routing_tables_ignored"
- [ -n "$device_list" ] && swan_xappend " interfaces_use = $device_list"
- swan_xappend " plugins {"
- swan_xappend " include /etc/strongswan.d/charon/*.conf"
- swan_xappend " }"
- swan_xappend " syslog {"
- swan_xappend " identifier = ipsec"
- swan_xappend " daemon {"
- swan_xappend " default = $debug"
- swan_xappend " }"
- swan_xappend " auth {"
- swan_xappend " default = $debug"
- swan_xappend " }"
- swan_xappend " }"
- swan_xappend "}"
-}
-
-prepare_env() {
- mkdir -p /var/ipsec
- remove_includes
- config_load ipsec
- config_foreach config_ipsec ipsec
- config_foreach config_remote remote
-}
-
-service_running() {
- ipsec status > /dev/null 2>&1
-}
-
-reload_service() {
- local bool vt_enabled=`uci get ipsec.@service[0].enabled 2>/dev/null`
- [ "$vt_enabled" = 0 ] && /etc/init.d/ipsec stop && return
- running && {
- prepare_env
- [ $WAIT_FOR_INTF -eq 0 ] && {
- ipsec rereadall
- ipsec reload
- return
- }
- }
- [ "$vt_enabled" = 1 ] && start
-}
-
-check_ipsec_interface() {
- local intf
-
- for intf in $(config_get "$1" interface); do
- procd_add_interface_trigger "interface.*" "$intf" /etc/init.d/ipsec reload
- done
-}
-
-service_triggers() {
- procd_add_reload_trigger "ipsec"
- config load "ipsec"
- config_foreach check_ipsec_interface ipsec
-}
-
-start_service() {
- local vt_enabled=`uci get ipsec.@service[0].enabled 2>/dev/null`
- local vt_clientip=`uci get ipsec.@service[0].clientip`
- local vt_clientdns=`uci get ipsec.@service[0].clientdns`
- local vt_account=`uci get ipsec.@service[0].account`
- local vt_password=`uci get ipsec.@service[0].password 2>/dev/null`
- local vt_secret=`uci get ipsec.@service[0].secret 2>/dev/null`
-
- [ "$vt_enabled" = 0 ] && /etc/init.d/ipsec stop && return
-
- cat > /etc/ipsec.conf < /etc/ipsec.secrets </dev/null
-iptables -D FORWARD -m policy --dir out --pol ipsec --proto esp -j ACCEPT 2>/dev/null
-iptables -D INPUT -m policy --dir in --pol ipsec --proto esp -j ACCEPT 2>/dev/null
-iptables -D OUTPUT -m policy --dir out --pol ipsec --proto esp -j ACCEPT 2>/dev/null
-
-iptables -I FORWARD -m policy --dir in --pol ipsec --proto esp -j ACCEPT
-iptables -I FORWARD -m policy --dir out --pol ipsec --proto esp -j ACCEPT
-iptables -I INPUT -m policy --dir in --pol ipsec --proto esp -j ACCEPT
-iptables -I OUTPUT -m policy --dir out --pol ipsec --proto esp -j ACCEPT
-
-echo 1 > /proc/sys/net/ipv4/conf/br-lan/proxy_arp
diff --git a/package/lean/luci-app-ipsec-vpnd/root/etc/uci-defaults/luci-ipsec b/package/lean/luci-app-ipsec-vpnd/root/etc/uci-defaults/luci-ipsec
deleted file mode 100755
index fd7b33d36..000000000
--- a/package/lean/luci-app-ipsec-vpnd/root/etc/uci-defaults/luci-ipsec
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete firewall.ipsecd
- set firewall.ipsecd=include
- set firewall.ipsecd.type=script
- set firewall.ipsecd.path=/etc/ipsec.include
- set firewall.ipsecd.reload=1
- commit firewall
-EOF
-
-uci -q batch <<-EOF >/dev/null
- delete network.VPN
- set network.VPN=interface
- set network.VPN.ifname="ipsec0"
- set network.VPN.proto="static"
- set network.VPN.ipaddr="10.10.10.1"
- set network.VPN.netmask="255.255.255.0"
-
- commit network
-
- delete firewall.ike
- add firewall rule
- rename firewall.@rule[-1]="ike"
- set firewall.@rule[-1].name="ike"
- set firewall.@rule[-1].target="ACCEPT"
- set firewall.@rule[-1].src="wan"
- set firewall.@rule[-1].proto="udp"
- set firewall.@rule[-1].dest_port="500"
-
- delete firewall.ipsec
- add firewall rule
- rename firewall.@rule[-1]="ipsec"
- set firewall.@rule[-1].name="ipsec"
- set firewall.@rule[-1].target="ACCEPT"
- set firewall.@rule[-1].src="wan"
- set firewall.@rule[-1].proto="udp"
- set firewall.@rule[-1].dest_port="4500"
-
- delete firewall.ah
- add firewall rule
- rename firewall.@rule[-1]="ah"
- set firewall.@rule[-1].name="ah"
- set firewall.@rule[-1].target="ACCEPT"
- set firewall.@rule[-1].src="wan"
- set firewall.@rule[-1].proto="ah"
-
- delete firewall.esp
- add firewall rule
- rename firewall.@rule[-1]="esp"
- set firewall.@rule[-1].name="esp"
- set firewall.@rule[-1].target="ACCEPT"
- set firewall.@rule[-1].src="wan"
- set firewall.@rule[-1].proto="esp"
-
- delete firewall.VPN
- set firewall.VPN=zone
- set firewall.VPN.name="VPN"
- set firewall.VPN.input="ACCEPT"
- set firewall.VPN.forward="ACCEPT"
- set firewall.VPN.output="ACCEPT"
- set firewall.VPN.network="VPN"
-
- delete firewall.vpn
- set firewall.vpn=forwarding
- set firewall.vpn.name="vpn"
- set firewall.vpn.dest="wan"
- set firewall.vpn.src="VPN"
-
- commit firewall
-EOF
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@ipsec[-1]
- add ucitrack ipsec
- set ucitrack.@ipsec[-1].init=ipsec
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-jd-dailybonus/Makefile b/package/lean/luci-app-jd-dailybonus/Makefile
deleted file mode 100644
index 079d3b89e..000000000
--- a/package/lean/luci-app-jd-dailybonus/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-include $(TOPDIR)/rules.mk
-
-PKG_NAME:=luci-app-jd-dailybonus
-PKG_VERSION:=1.0.6
-PKG_RELEASE:=20211016
-
-LUCI_TITLE:=Luci for JD dailybonus Script
-LUCI_PKGARCH:=all
-LUCI_DEPENDS:=+node +wget +lua +libuci-lua
-
-define Package/$(PKG_NAME)/conffiles
-/etc/config/jd-dailybonus
-endef
-
-include $(TOPDIR)/feeds/luci/luci.mk
-
-# call BuildPackage - OpenWrt buildroot signature
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/controller/jd-dailybonus.lua b/package/lean/luci-app-jd-dailybonus/luasrc/controller/jd-dailybonus.lua
deleted file mode 100644
index b7e7128ef..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/controller/jd-dailybonus.lua
+++ /dev/null
@@ -1,123 +0,0 @@
--- Copyright (C) 2020 jerrykuku
--- Licensed to the public under the GNU General Public License v3.
-module('luci.controller.jd-dailybonus', package.seeall)
-function index()
- if not nixio.fs.access('/etc/config/jd-dailybonus') then
- return
- end
-
- entry({'admin', 'services', 'jd-dailybonus'}, alias('admin', 'services', 'jd-dailybonus', 'client'), _('京东签到服务'), 10).dependent = true -- 首页
- entry({'admin', 'services', 'jd-dailybonus', 'client'}, cbi('jd-dailybonus/client', {hidesavebtn = true, hideresetbtn = true}), _('客户端'), 10).leaf = true -- 基本设置
- entry({'admin', 'services', 'jd-dailybonus', 'log'}, form('jd-dailybonus/log'), _('日志'), 30).leaf = true -- 日志页面
- entry({'admin', 'services', 'jd-dailybonus', 'script'}, form('jd-dailybonus/script'), _('脚本查看'), 20).leaf = true -- 直接配置脚本
- entry({'admin', 'services', 'jd-dailybonus', 'run'}, call('run')) -- 执行程序
- entry({'admin', 'services', 'jd-dailybonus', 'update'}, call('update')) -- 执行更新
- entry({'admin', 'services', 'jd-dailybonus', 'check_update'}, call('check_update')) -- 检查更新
- entry({'admin', 'services', 'jd-dailybonus', 'qrcode'}, call('qrcode')) -- 获取二维码
- entry({'admin', 'services', 'jd-dailybonus', 'check_login'}, call('check_login')) -- 获取二维码
- entry({'admin', 'services', 'jd-dailybonus', 'realtime_log'}, call('get_log')) -- 获取实时日志
-end
-
--- 执行程序
-function run()
- local running = luci.sys.call("busybox ps -w | grep JD_DailyBonus.js | grep -v grep >/dev/null") == 0
- if not running then
- luci.sys.call('sh /usr/share/jd-dailybonus/newapp.sh -r')
- end
- luci.http.write('')
-end
-
---检查更新
-function check_update()
- local jd = 'jd-dailybonus'
- local e = {}
- local new_version = luci.sys.exec('sh /usr/share/jd-dailybonus/newapp.sh -n')
- e.new_version = new_version
- e.error = 0
- luci.http.prepare_content('application/json')
- luci.http.write_json(e)
-end
-
---执行更新
-function update()
- local jd = 'jd-dailybonus'
- local e = {}
- local uci = luci.model.uci.cursor()
- local version = luci.http.formvalue('version')
- --下载脚本
- local code = luci.sys.exec('sh /usr/share/jd-dailybonus/newapp.sh -u')
- e.error = code
- luci.http.prepare_content('application/json')
- luci.http.write_json(e)
-end
-
-local User_Agent='Mozilla/5.0 (iPad; CPU OS 12_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1'
-local Host='Host: plogin.m.jd.com'
-local Accept='Accept: application/json, text/plain, */*'
-local Accept_Language='Accept-Language: zh-cn'
-local cookie='/tmp/jd_cookie'
-
-function get_timestamp()
- return os.time()*1000
-end
-
-function get_s_token()
- luci.sys.exec('rm -f ' .. cookie)
- local timestamp = get_timestamp()
- local url =
- 'https://plogin.m.jd.com/cgi-bin/mm/new_login_entrance?lang=chs&appid=300&returnurl=https://wq.jd.com/passport/LoginRedirect?state=' ..
- timestamp .. '&returnurl=https://home.m.jd.com/myJd/newhome.action?sceneval=2&ufc=&/myJd/home.action&source=wq_passport'
- local referer =
- 'https://plogin.m.jd.com/login/login?appid=300&returnurl=https://wq.jd.com/passport/LoginRedirect?state=' ..
- timestamp .. '&returnurl=https://home.m.jd.com/myJd/newhome.action?sceneval=2&ufc=&/myJd/home.action&source=wq_passport'
- local s_token = luci.sys.exec("echo -n $(wget --header='"..Accept.."' --header='"..Accept_Language.."' --header='"..Host.."' --referer='"..referer.."' --user-agent='"..User_Agent.."' --save-cookies="..cookie.." --keep-session-cookies -q -O - '"..url.."' | sed s/[[:space:]]//g | grep -oE '\"s_token\":\"(.+?)\"' | awk -F \\\" '{print $4}')")
- return s_token
-end
-
---获取二维码
-function qrcode()
- local timestamp = get_timestamp()
- local s_token = get_s_token()
- local url = 'https://plogin.m.jd.com/cgi-bin/m/tmauthreflogurl?s_token='..s_token..'&v='..timestamp..'&remember=true'
- local referer = 'https://plogin.m.jd.com/login/login?appid=300&returnurl=https://wq.jd.com/passport/LoginRedirect?state=' .. timestamp .. '&returnurl=https://home.m.jd.com/myJd/newhome.action?sceneval=2&ufc=&/myJd/home.action&source=wq_passport'
- local response = luci.sys.exec("echo -n $(wget --header='"..Accept.."' --header='"..Accept_Language.."' --header='"..Host.."' --referer='"..referer.."' --user-agent='"..User_Agent.."' --load-cookies="..cookie.." --save-cookies="..cookie.." --keep-session-cookies -q -O - '"..url.."')")
- local token = luci.sys.exec("echo -n $(echo \'"..response.."\' | grep -oE '\"token\":\"(.+?)\"' | awk -F \\\" '{print $4}')")
- local ou_state = luci.sys.exec("echo -n $(echo \'"..response.."\' | grep -oE '\"ou_state\":(\\d+)' | awk -F : '{print $2}')")
- local okl_token = luci.sys.exec("echo -n $(cat "..cookie.." | grep okl_token | awk '{print $7}')")
- local return_json = {
- qrcode_url = 'https://plogin.m.jd.com/cgi-bin/m/tmauth?appid=300&client_type=m&token=' .. token,
- check_url = 'https://plogin.m.jd.com/cgi-bin/m/tmauthchecktoken?&token=' .. token .. '&ou_state=' .. ou_state .. '&okl_token=' .. okl_token,
- }
- luci.http.prepare_content('application/json')
- luci.http.write_json(return_json)
-end
-
---检测登录
-function check_login()
- local uci = luci.model.uci.cursor()
- local data = luci.http.formvalue()
- local post_data = 'lang=chs&appid=300&source=wq_passport&returnurl=https://wqlogin2.jd.com/passport/LoginRedirect?state=1100399130787&returnurl=//home.m.jd.com/myJd/newhome.action?sceneval=2&ufc=&/myJd/home.action'
- local referer='https://plogin.m.jd.com/login/login?appid=300&returnurl=https://wqlogin2.jd.com/passport/LoginRedirect?state='
- local response = luci.sys.exec("echo -n $(wget --post-data='"..post_data.."' --header='"..Accept.."' --header='"..Accept_Language.."' --header='"..Host.."' --referer='"..referer.."' --user-agent='"..User_Agent.."' --load-cookies="..cookie.." --save-cookies="..cookie.." --keep-session-cookies -q -O - '"..data.check_url.."')")
- local return_json = {
- error = tonumber(luci.sys.exec("echo -n $(echo \'"..response.."\' | grep -oE '\"errcode\":(\\d+)' | awk -F : '{print $2}')")),
- msg = luci.sys.exec("echo -n $(echo \'"..response.."\' | grep -oE '\"message\":\"(.+?)\"' | awk -F \\\" '{print $4}')"),
- }
- if return_json.error == 0 then
- local pt_key = luci.sys.exec("echo -n $(cat "..cookie.." | grep pt_key | awk '{print $7}')")
- local pt_pin = luci.sys.exec("echo -n $(cat "..cookie.." | grep pt_pin | awk '{print $7}')")
- return_json.cookie = 'pt_key=' .. pt_key .. ';pt_pin=' .. pt_pin .. ';'
- end
-
- luci.http.prepare_content('application/json')
- luci.http.write_json(return_json)
-end
-
-function get_log()
- local fs = require "nixio.fs"
- local e = {}
- e.running = luci.sys.call("busybox ps -w | grep JD_DailyBonus.js | grep -v grep >/dev/null") == 0
- e.log = fs.readfile("/var/log/jd_dailybonus.log") or ""
- luci.http.prepare_content("application/json")
- luci.http.write_json(e)
-end
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/client.lua b/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/client.lua
deleted file mode 100644
index 2b74aae1a..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/client.lua
+++ /dev/null
@@ -1,120 +0,0 @@
-local uci = luci.model.uci.cursor()
-local sys = require 'luci.sys'
-
-m = Map('jd-dailybonus')
-m.title = translate('京东签到服务')
-m.description = translate(' GitHub 项目地址 ')
-
--- [[ 基本设置 ]]--
-
-s = m:section(TypedSection, 'global')
-s.anonymous = true
-
-o = s:option(DynamicList, "Cookies", translate("账号 Cookie 列表"))
-o.rmempty = false
-o.description = translate('双击输入框可调出二维码,扫码后自动填入。')
-
-o = s:option(DummyValue, '', '')
-o.rawhtml = true
-o.template = 'jd-dailybonus/cookie_tools'
-
-o = s:option(DynamicList, "jrBody", translate('金融 POST Body'))
-o.rmempty = false
-o.default = ''
-o.description = translate('京东金融签到 POST Body(以reqData=开头),与上方的Cookies列表一一对应,没有可不填(可能导致京东金融签到失败)')
-
-o = s:option(Value, 'stop', translate('延迟签到'))
-o.rmempty = false
-o.default = 0
-o.datatype = integer
-o.description = translate('自定义延迟签到,单位毫秒. 默认分批并发无延迟. (延迟作用于每个签到接口, 如填入延迟则切换顺序签到. ) ')
-
-o = s:option(Value, 'out', translate('接口超时'))
-o.rmempty = false
-o.default = 0
-o.datatype = integer
-o.description = translate('接口超时退出,单位毫秒 用于可能发生的网络不稳定, 0则关闭.')
-
--- server chan
-
-o = s:option(ListValue, 'serverurl', translate('Server酱的推送接口地址'))
-o:value('scu', translate('SCU'))
-o:value('sct', translate('SCT'))
-o.default = 'scu'
-o.rmempty = false
-o.description = translate('选择Server酱的推送接口')
-
-o = s:option(Value, 'serverchan', translate('Server酱 SCKEY'))
-o.rmempty = true
-o.description = translate('微信推送,基于Server酱服务,请自行登录 http://sc.ftqq.com/ 绑定并获取 SCKEY。')
-
--- Dingding
-
-o = s:option(Value, 'dd_token', translate('Dingding Bot Token'))
-o.rmempty = true
-o.description = translate('创建一个群机器人并获取API Token,设置安全关键字为:京东')
-
--- pushplus
-
-o = s:option(Value, 'pp_token', translate('pushplus Token'))
-o.rmempty = true
-o.description = translate('微信推送,基于pushplus服务,请自行登录 https://www.pushplus.plus/ 绑定并获取 Token。')
-
--- telegram
-
-o = s:option(Value, 'tg_token', translate('Telegram Bot Token'))
-o.rmempty = true
-o.description = translate('首先在Telegram上搜索BotFather机器人,创建一个属于自己的通知机器人,并获取Token。')
-
-o = s:option(Value, 'tg_userid', translate('Telegram UserID'))
-o.rmempty = true
-o.description = translate('在Telegram上搜索getuserIDbot机器人,获取UserID。')
-
---Auto Run Script Service
-
-o = s:option(Flag, 'auto_run', translate('自动签到'))
-o.rmempty = false
-
-o = s:option(ListValue, 'auto_run_time_h', translate('每天签到时间(小时)'))
-for t = 0, 23 do
- o:value(t, t)
-end
-o.default = 1
-o.rmempty = true
-o:depends('auto_run', '1')
-o = s:option(ListValue, 'auto_run_time_m', translate('每天签到时间(分钟)'))
-for t = 0, 59 do
- o:value(t, t)
-end
-o.default = 1
-o.rmempty = true
-o:depends('auto_run', '1')
-
--- Auto Update Script Service
-
-o = s:option(Flag, 'auto_update', translate('自动更新'))
-o.rmempty = false
-
-o = s:option(ListValue, 'auto_update_time', translate('每天更新时间'))
-for t = 0, 23 do
- o:value(t, t .. ':01')
-end
-o.default = 1
-o.rmempty = true
-o:depends('auto_update', '1')
-
-o = s:option(Value, 'remote_url', translate('更新源地址'))
-o:value('https://raw.githubusercontent.com/NobyDa/Script/master/JD-DailyBonus/JD_DailyBonus.js', translate('GitHub'))
-o:value('https://raw.sevencdn.com/NobyDa/Script/master/JD-DailyBonus/JD_DailyBonus.js', translate('GitHub CDN 01'))
-o:value('https://cdn.jsdelivr.net/gh/NobyDa/Script/JD-DailyBonus/JD_DailyBonus.js', translate('GitHub CDN 02'))
-o:value('https://ghproxy.com/https://raw.githubusercontent.com/NobyDa/Script/master/JD-DailyBonus/JD_DailyBonus.js', translate('韩国首尔'))
-o.default = 'nil'
-o.rmempty = false
-o.description = translate('当GitHub源无法更新时,可以选择使用国内Gitee源,GitHub CDN可能比原地址更晚更新,但速度快')
-
-o = s:option(DummyValue, '', '')
-o.rawhtml = true
-o.version = sys.exec('uci get jd-dailybonus.@global[0].version')
-o.template = 'jd-dailybonus/update_service'
-
-return m
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/log.lua b/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/log.lua
deleted file mode 100644
index 8cedc362d..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/log.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-log = SimpleForm("logview")
-log.submit = false
-log.reset = false
-
-t = log:field(DummyValue, '', '')
-t.rawhtml = true
-t.template = 'jd-dailybonus/log'
-
-return log
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/script.lua b/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/script.lua
deleted file mode 100644
index 50162cc5e..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/model/cbi/jd-dailybonus/script.lua
+++ /dev/null
@@ -1,22 +0,0 @@
-local fs = require "nixio.fs"
-
-s = SimpleForm("scriptview")
-
-view_cfg = s:field(TextValue, "conf")
-view_cfg.rmempty = false
-view_cfg.rows = 43
-
-function sync_value_to_file(value, file)
- value = value:gsub("\r\n?", "\n")
- local old_value = fs.readfile(file)
- if value ~= old_value then fs.writefile(file, value) end
-end
-
-function view_cfg.cfgvalue()
- return fs.readfile("/usr/share/jd-dailybonus/JD_DailyBonus.js") or ""
-end
-function view_cfg.write(self, section, value)
- sync_value_to_file(value, "/usr/share/jd-dailybonus/JD_DailyBonus.js")
-end
-
-return s
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/cookie_tools.htm b/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/cookie_tools.htm
deleted file mode 100644
index 76f29855d..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/cookie_tools.htm
+++ /dev/null
@@ -1,167 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/log.htm b/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/log.htm
deleted file mode 100644
index 5b142c1d0..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/log.htm
+++ /dev/null
@@ -1,17 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/update_service.htm b/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/update_service.htm
deleted file mode 100644
index bff88f260..000000000
--- a/package/lean/luci-app-jd-dailybonus/luasrc/view/jd-dailybonus/update_service.htm
+++ /dev/null
@@ -1,62 +0,0 @@
-<%+cbi/valueheader%>
-
-
-
-
-
-
-
-
-
-
-
-<%+cbi/valuefooter%>
diff --git a/package/lean/luci-app-jd-dailybonus/root/etc/config/jd-dailybonus b/package/lean/luci-app-jd-dailybonus/root/etc/config/jd-dailybonus
deleted file mode 100644
index 15a263163..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/etc/config/jd-dailybonus
+++ /dev/null
@@ -1,14 +0,0 @@
-config global
- option version '2.1.3'
- option out '0'
- option stop '100'
- option serverchan ''
- option remote_url 'https://raw.githubusercontent.com/NobyDa/Script/master/JD-DailyBonus/JD_DailyBonus.js'
- option serverurl 'scu'
- option auto_update '1'
- option auto_update_time '23'
- option auto_run '1'
- option auto_run_time '0'
- option auto_run_time_m '1'
- option auto_run_time_h '1'
- list Cookies '双击扫码添加 Cookie 数据'
diff --git a/package/lean/luci-app-jd-dailybonus/root/etc/init.d/jd-dailybonus b/package/lean/luci-app-jd-dailybonus/root/etc/init.d/jd-dailybonus
deleted file mode 100755
index eeea2440f..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/etc/init.d/jd-dailybonus
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh /etc/rc.common
-#
-# Copyright (C) 2020 Jerryk
-#
-# This is free software, licensed under the GNU General Public License v3.
-# See /LICENSE for more information.
-#
-
-START=25
-STOP=10
-
-NAME=jd-dailybonus
-CRON_FILE=/etc/crontabs/root
-
-
-del_cron() {
- sed -i '/jd-dailybonus/d' $CRON_FILE
- /etc/init.d/cron restart
-}
-
-start_service(){
- sh /usr/share/jd-dailybonus/newapp.sh -s
-}
-
-stop_service() {
- del_cron
-}
-
-service_triggers() {
- procd_add_reload_trigger "jd-dailybonus"
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/etc/uci-defaults/luci-jd-dailybonus b/package/lean/luci-app-jd-dailybonus/root/etc/uci-defaults/luci-jd-dailybonus
deleted file mode 100755
index faf357268..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/etc/uci-defaults/luci-jd-dailybonus
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-uci -q batch <<-EOF >/dev/null
- delete ucitrack.@jd-dailybonus[-1]
- add ucitrack jd-dailybonus
- set ucitrack.@jd-dailybonus[-1].init=jd-dailybonus
- commit ucitrack
-EOF
-
-rm -f /tmp/luci-indexcache
-exit 0
diff --git a/package/lean/luci-app-jd-dailybonus/root/lib/upgrade/keep.d/jd-dailybonus b/package/lean/luci-app-jd-dailybonus/root/lib/upgrade/keep.d/jd-dailybonus
deleted file mode 100644
index b180f45be..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/lib/upgrade/keep.d/jd-dailybonus
+++ /dev/null
@@ -1,2 +0,0 @@
-/usr/share/jd-dailybonus/CookieSet.json
-/usr/share/jd-dailybonus/JD_DailyBonus.js
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/CHANGELOG.md b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/CHANGELOG.md
deleted file mode 100644
index d3ffcd00d..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/CHANGELOG.md
+++ /dev/null
@@ -1,717 +0,0 @@
-## Change Log
-
-### v2.88.0 (2018/08/10)
-- [#2996](https://github.com/request/request/pull/2996) fix(uuid): import versioned uuid (@kwonoj)
-- [#2994](https://github.com/request/request/pull/2994) Update to oauth-sign 0.9.0 (@dlecocq)
-- [#2993](https://github.com/request/request/pull/2993) Fix header tests (@simov)
-- [#2904](https://github.com/request/request/pull/2904) #515, #2894 Strip port suffix from Host header if the protocol is known. (#2904) (@paambaati)
-- [#2791](https://github.com/request/request/pull/2791) Improve AWS SigV4 support. (#2791) (@vikhyat)
-- [#2977](https://github.com/request/request/pull/2977) Update test certificates (@simov)
-
-### v2.87.0 (2018/05/21)
-- [#2943](https://github.com/request/request/pull/2943) Replace hawk dependency with a local implemenation (#2943) (@hueniverse)
-
-### v2.86.0 (2018/05/15)
-- [#2885](https://github.com/request/request/pull/2885) Remove redundant code (for Node.js 0.9.4 and below) and dependency (@ChALkeR)
-- [#2942](https://github.com/request/request/pull/2942) Make Test GREEN Again! (@simov)
-- [#2923](https://github.com/request/request/pull/2923) Alterations for failing CI tests (@gareth-robinson)
-
-### v2.85.0 (2018/03/12)
-- [#2880](https://github.com/request/request/pull/2880) Revert "Update hawk to 7.0.7 (#2880)" (@simov)
-
-### v2.84.0 (2018/03/12)
-- [#2793](https://github.com/request/request/pull/2793) Fixed calculation of oauth_body_hash, issue #2792 (@dvishniakov)
-- [#2880](https://github.com/request/request/pull/2880) Update hawk to 7.0.7 (#2880) (@kornel-kedzierski)
-
-### v2.83.0 (2017/09/27)
-- [#2776](https://github.com/request/request/pull/2776) Updating tough-cookie due to security fix. (#2776) (@karlnorling)
-
-### v2.82.0 (2017/09/19)
-- [#2703](https://github.com/request/request/pull/2703) Add Node.js v8 to Travis CI (@ryysud)
-- [#2751](https://github.com/request/request/pull/2751) Update of hawk and qs to latest version (#2751) (@Olivier-Moreau)
-- [#2658](https://github.com/request/request/pull/2658) Fixed some text in README.md (#2658) (@Marketionist)
-- [#2635](https://github.com/request/request/pull/2635) chore(package): update aws-sign2 to version 0.7.0 (#2635) (@greenkeeperio-bot)
-- [#2641](https://github.com/request/request/pull/2641) Update README to simplify & update convenience methods (#2641) (@FredKSchott)
-- [#2541](https://github.com/request/request/pull/2541) Add convenience method for HTTP OPTIONS (#2541) (@jamesseanwright)
-- [#2605](https://github.com/request/request/pull/2605) Add promise support section to README (#2605) (@FredKSchott)
-- [#2579](https://github.com/request/request/pull/2579) refactor(lint): replace eslint with standard (#2579) (@ahmadnassri)
-- [#2598](https://github.com/request/request/pull/2598) Update codecov to version 2.0.2 🚀 (@greenkeeperio-bot)
-- [#2590](https://github.com/request/request/pull/2590) Adds test-timing keepAlive test (@nicjansma)
-- [#2589](https://github.com/request/request/pull/2589) fix tabulation on request example README.MD (@odykyi)
-- [#2594](https://github.com/request/request/pull/2594) chore(dependencies): har-validator to 5.x [removes babel dep] (@ahmadnassri)
-
-### v2.81.0 (2017/03/09)
-- [#2584](https://github.com/request/request/pull/2584) Security issue: Upgrade qs to version 6.4.0 (@sergejmueller)
-- [#2578](https://github.com/request/request/pull/2578) safe-buffer doesn't zero-fill by default, its just a polyfill. (#2578) (@mikeal)
-- [#2566](https://github.com/request/request/pull/2566) Timings: Tracks 'lookup', adds 'wait' time, fixes connection re-use (#2566) (@nicjansma)
-- [#2574](https://github.com/request/request/pull/2574) Migrating to safe-buffer for improved security. (@mikeal)
-- [#2573](https://github.com/request/request/pull/2573) fixes #2572 (@ahmadnassri)
-
-### v2.80.0 (2017/03/04)
-- [#2571](https://github.com/request/request/pull/2571) Correctly format the Host header for IPv6 addresses (@JamesMGreene)
-- [#2558](https://github.com/request/request/pull/2558) Update README.md example snippet (@FredKSchott)
-- [#2221](https://github.com/request/request/pull/2221) Adding a simple Response object reference in argument specification (@calamarico)
-- [#2452](https://github.com/request/request/pull/2452) Adds .timings array with DNC, TCP, request and response times (@nicjansma)
-- [#2553](https://github.com/request/request/pull/2553) add ISSUE_TEMPLATE, move PR template (@FredKSchott)
-- [#2539](https://github.com/request/request/pull/2539) Create PULL_REQUEST_TEMPLATE.md (@FredKSchott)
-- [#2524](https://github.com/request/request/pull/2524) Update caseless to version 0.12.0 🚀 (@greenkeeperio-bot)
-- [#2460](https://github.com/request/request/pull/2460) Fix wrong MIME type in example (@OwnageIsMagic)
-- [#2514](https://github.com/request/request/pull/2514) Change tags to keywords in package.json (@humphd)
-- [#2492](https://github.com/request/request/pull/2492) More lenient gzip decompression (@addaleax)
-
-### v2.79.0 (2016/11/18)
-- [#2368](https://github.com/request/request/pull/2368) Fix typeof check in test-pool.js (@forivall)
-- [#2394](https://github.com/request/request/pull/2394) Use `files` in package.json (@SimenB)
-- [#2463](https://github.com/request/request/pull/2463) AWS support for session tokens for temporary credentials (@simov)
-- [#2467](https://github.com/request/request/pull/2467) Migrate to uuid (@simov, @antialias)
-- [#2459](https://github.com/request/request/pull/2459) Update taper to version 0.5.0 🚀 (@greenkeeperio-bot)
-- [#2448](https://github.com/request/request/pull/2448) Make other connect timeout test more reliable too (@mscdex)
-
-### v2.78.0 (2016/11/03)
-- [#2447](https://github.com/request/request/pull/2447) Always set request timeout on keep-alive connections (@mscdex)
-
-### v2.77.0 (2016/11/03)
-- [#2439](https://github.com/request/request/pull/2439) Fix socket 'connect' listener handling (@mscdex)
-- [#2442](https://github.com/request/request/pull/2442) 👻😱 Node.js 0.10 is unmaintained 😱👻 (@greenkeeperio-bot)
-- [#2435](https://github.com/request/request/pull/2435) Add followOriginalHttpMethod to redirect to original HTTP method (@kirrg001)
-- [#2414](https://github.com/request/request/pull/2414) Improve test-timeout reliability (@mscdex)
-
-### v2.76.0 (2016/10/25)
-- [#2424](https://github.com/request/request/pull/2424) Handle buffers directly instead of using "bl" (@zertosh)
-- [#2415](https://github.com/request/request/pull/2415) Re-enable timeout tests on Travis + other fixes (@mscdex)
-- [#2431](https://github.com/request/request/pull/2431) Improve timeouts accuracy and node v6.8.0+ compatibility (@mscdex, @greenkeeperio-bot)
-- [#2428](https://github.com/request/request/pull/2428) Update qs to version 6.3.0 🚀 (@greenkeeperio-bot)
-- [#2420](https://github.com/request/request/pull/2420) change .on to .once, remove possible memory leaks (@duereg)
-- [#2426](https://github.com/request/request/pull/2426) Remove "isFunction" helper in favor of "typeof" check (@zertosh)
-- [#2425](https://github.com/request/request/pull/2425) Simplify "defer" helper creation (@zertosh)
-- [#2402](https://github.com/request/request/pull/2402) form-data@2.1.1 breaks build 🚨 (@greenkeeperio-bot)
-- [#2393](https://github.com/request/request/pull/2393) Update form-data to version 2.1.0 🚀 (@greenkeeperio-bot)
-
-### v2.75.0 (2016/09/17)
-- [#2381](https://github.com/request/request/pull/2381) Drop support for Node 0.10 (@simov)
-- [#2377](https://github.com/request/request/pull/2377) Update form-data to version 2.0.0 🚀 (@greenkeeperio-bot)
-- [#2353](https://github.com/request/request/pull/2353) Add greenkeeper ignored packages (@simov)
-- [#2351](https://github.com/request/request/pull/2351) Update karma-tap to version 3.0.1 🚀 (@greenkeeperio-bot)
-- [#2348](https://github.com/request/request/pull/2348) form-data@1.0.1 breaks build 🚨 (@greenkeeperio-bot)
-- [#2349](https://github.com/request/request/pull/2349) Check error type instead of string (@scotttrinh)
-
-### v2.74.0 (2016/07/22)
-- [#2295](https://github.com/request/request/pull/2295) Update tough-cookie to 2.3.0 (@stash-sfdc)
-- [#2280](https://github.com/request/request/pull/2280) Update karma-tap to version 2.0.1 🚀 (@greenkeeperio-bot)
-
-### v2.73.0 (2016/07/09)
-- [#2240](https://github.com/request/request/pull/2240) Remove connectionErrorHandler to fix #1903 (@zarenner)
-- [#2251](https://github.com/request/request/pull/2251) tape@4.6.0 breaks build 🚨 (@greenkeeperio-bot)
-- [#2225](https://github.com/request/request/pull/2225) Update docs (@ArtskydJ)
-- [#2203](https://github.com/request/request/pull/2203) Update browserify to version 13.0.1 🚀 (@greenkeeperio-bot)
-- [#2275](https://github.com/request/request/pull/2275) Update karma to version 1.1.1 🚀 (@greenkeeperio-bot)
-- [#2204](https://github.com/request/request/pull/2204) Add codecov.yml and disable PR comments (@simov)
-- [#2212](https://github.com/request/request/pull/2212) Fix link to http.IncomingMessage documentation (@nazieb)
-- [#2208](https://github.com/request/request/pull/2208) Update to form-data RC4 and pass null values to it (@simov)
-- [#2207](https://github.com/request/request/pull/2207) Move aws4 require statement to the top (@simov)
-- [#2199](https://github.com/request/request/pull/2199) Update karma-coverage to version 1.0.0 🚀 (@greenkeeperio-bot)
-- [#2206](https://github.com/request/request/pull/2206) Update qs to version 6.2.0 🚀 (@greenkeeperio-bot)
-- [#2205](https://github.com/request/request/pull/2205) Use server-destory to close hanging sockets in tests (@simov)
-- [#2200](https://github.com/request/request/pull/2200) Update karma-cli to version 1.0.0 🚀 (@greenkeeperio-bot)
-
-### v2.72.0 (2016/04/17)
-- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov)
-- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou)
-- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz)
-- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov)
-- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov)
-- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo)
-
-### v2.71.0 (2016/04/12)
-- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov)
-
-### v2.70.0 (2016/04/05)
-- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot)
-- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo)
-- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot)
-- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot)
-- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot)
-- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov)
-- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot)
-- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov)
-- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot)
-- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov)
-- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm)
-- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov)
-- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot)
-
-### v2.69.0 (2016/01/27)
-- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg)
-
-### v2.68.0 (2016/01/27)
-- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods)
-- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross)
-- [#2024](https://github.com/request/request/pull/2024) Update har-validator dependency for nsp advisory #76 (@TylerDixon)
-- [#2016](https://github.com/request/request/pull/2016) Update qs to version 6.0.2 🚀 (@greenkeeperio-bot)
-- [#2007](https://github.com/request/request/pull/2007) Use the `extend` module instead of util._extend (@simov)
-- [#2003](https://github.com/request/request/pull/2003) Update browserify to version 13.0.0 🚀 (@greenkeeperio-bot)
-- [#1989](https://github.com/request/request/pull/1989) Update buffer-equal to version 1.0.0 🚀 (@greenkeeperio-bot)
-- [#1956](https://github.com/request/request/pull/1956) Check form-data content-length value before setting up the header (@jongyoonlee)
-- [#1958](https://github.com/request/request/pull/1958) Use IncomingMessage.destroy method (@simov)
-- [#1952](https://github.com/request/request/pull/1952) Adds example for Tor proxy (@prometheansacrifice)
-- [#1943](https://github.com/request/request/pull/1943) Update eslint to version 1.10.3 🚀 (@simov, @greenkeeperio-bot)
-- [#1924](https://github.com/request/request/pull/1924) Update eslint to version 1.10.1 🚀 (@greenkeeperio-bot)
-- [#1915](https://github.com/request/request/pull/1915) Remove content-length and transfer-encoding headers from defaultProxyHeaderWhiteList (@yaxia)
-
-### v2.67.0 (2015/11/19)
-- [#1913](https://github.com/request/request/pull/1913) Update http-signature to version 1.1.0 🚀 (@greenkeeperio-bot)
-
-### v2.66.0 (2015/11/18)
-- [#1906](https://github.com/request/request/pull/1906) Update README URLs based on HTTP redirects (@ReadmeCritic)
-- [#1905](https://github.com/request/request/pull/1905) Convert typed arrays into regular buffers (@simov)
-- [#1902](https://github.com/request/request/pull/1902) node-uuid@1.4.7 breaks build 🚨 (@greenkeeperio-bot)
-- [#1894](https://github.com/request/request/pull/1894) Fix tunneling after redirection from https (Original: #1881) (@simov, @falms)
-- [#1893](https://github.com/request/request/pull/1893) Update eslint to version 1.9.0 🚀 (@greenkeeperio-bot)
-- [#1852](https://github.com/request/request/pull/1852) Update eslint to version 1.7.3 🚀 (@simov, @greenkeeperio-bot, @paulomcnally, @michelsalib, @arbaaz, @nsklkn, @LoicMahieu, @JoshWillik, @jzaefferer, @ryanwholey, @djchie, @thisconnect, @mgenereu, @acroca, @Sebmaster, @KoltesDigital)
-- [#1876](https://github.com/request/request/pull/1876) Implement loose matching for har mime types (@simov)
-- [#1875](https://github.com/request/request/pull/1875) Update bluebird to version 3.0.2 🚀 (@simov, @greenkeeperio-bot)
-- [#1871](https://github.com/request/request/pull/1871) Update browserify to version 12.0.1 🚀 (@greenkeeperio-bot)
-- [#1866](https://github.com/request/request/pull/1866) Add missing quotes on x-token property in README (@miguelmota)
-- [#1874](https://github.com/request/request/pull/1874) Fix typo in README.md (@gswalden)
-- [#1860](https://github.com/request/request/pull/1860) Improve referer header tests and docs (@simov)
-- [#1861](https://github.com/request/request/pull/1861) Remove redundant call to Stream constructor (@watson)
-- [#1857](https://github.com/request/request/pull/1857) Fix Referer header to point to the original host name (@simov)
-- [#1850](https://github.com/request/request/pull/1850) Update karma-coverage to version 0.5.3 🚀 (@greenkeeperio-bot)
-- [#1847](https://github.com/request/request/pull/1847) Use node's latest version when building (@simov)
-- [#1836](https://github.com/request/request/pull/1836) Tunnel: fix wrong property name (@KoltesDigital)
-- [#1820](https://github.com/request/request/pull/1820) Set href as request.js uses it (@mgenereu)
-- [#1840](https://github.com/request/request/pull/1840) Update http-signature to version 1.0.2 🚀 (@greenkeeperio-bot)
-- [#1845](https://github.com/request/request/pull/1845) Update istanbul to version 0.4.0 🚀 (@greenkeeperio-bot)
-
-### v2.65.0 (2015/10/11)
-- [#1833](https://github.com/request/request/pull/1833) Update aws-sign2 to version 0.6.0 🚀 (@greenkeeperio-bot)
-- [#1811](https://github.com/request/request/pull/1811) Enable loose cookie parsing in tough-cookie (@Sebmaster)
-- [#1830](https://github.com/request/request/pull/1830) Bring back tilde ranges for all dependencies (@simov)
-- [#1821](https://github.com/request/request/pull/1821) Implement support for RFC 2617 MD5-sess algorithm. (@BigDSK)
-- [#1828](https://github.com/request/request/pull/1828) Updated qs dependency to 5.2.0 (@acroca)
-- [#1818](https://github.com/request/request/pull/1818) Extract `readResponseBody` method out of `onRequestResponse` (@pvoisin)
-- [#1819](https://github.com/request/request/pull/1819) Run stringify once (@mgenereu)
-- [#1814](https://github.com/request/request/pull/1814) Updated har-validator to version 2.0.2 (@greenkeeperio-bot)
-- [#1807](https://github.com/request/request/pull/1807) Updated tough-cookie to version 2.1.0 (@greenkeeperio-bot)
-- [#1800](https://github.com/request/request/pull/1800) Add caret ranges for devDependencies, except eslint (@simov)
-- [#1799](https://github.com/request/request/pull/1799) Updated karma-browserify to version 4.4.0 (@greenkeeperio-bot)
-- [#1797](https://github.com/request/request/pull/1797) Updated tape to version 4.2.0 (@greenkeeperio-bot)
-- [#1788](https://github.com/request/request/pull/1788) Pinned all dependencies (@greenkeeperio-bot)
-
-### v2.64.0 (2015/09/25)
-- [#1787](https://github.com/request/request/pull/1787) npm ignore examples, release.sh and disabled.appveyor.yml (@thisconnect)
-- [#1775](https://github.com/request/request/pull/1775) Fix typo in README.md (@djchie)
-- [#1776](https://github.com/request/request/pull/1776) Changed word 'conjuction' to read 'conjunction' in README.md (@ryanwholey)
-- [#1785](https://github.com/request/request/pull/1785) Revert: Set default application/json content-type when using json option #1772 (@simov)
-
-### v2.63.0 (2015/09/21)
-- [#1772](https://github.com/request/request/pull/1772) Set default application/json content-type when using json option (@jzaefferer)
-
-### v2.62.0 (2015/09/15)
-- [#1768](https://github.com/request/request/pull/1768) Add node 4.0 to the list of build targets (@simov)
-- [#1767](https://github.com/request/request/pull/1767) Query strings now cooperate with unix sockets (@JoshWillik)
-- [#1750](https://github.com/request/request/pull/1750) Revert doc about installation of tough-cookie added in #884 (@LoicMahieu)
-- [#1746](https://github.com/request/request/pull/1746) Missed comma in Readme (@nsklkn)
-- [#1743](https://github.com/request/request/pull/1743) Fix options not being initialized in defaults method (@simov)
-
-### v2.61.0 (2015/08/19)
-- [#1721](https://github.com/request/request/pull/1721) Minor fix in README.md (@arbaaz)
-- [#1733](https://github.com/request/request/pull/1733) Avoid useless Buffer transformation (@michelsalib)
-- [#1726](https://github.com/request/request/pull/1726) Update README.md (@paulomcnally)
-- [#1715](https://github.com/request/request/pull/1715) Fix forever option in node > 0.10 #1709 (@calibr)
-- [#1716](https://github.com/request/request/pull/1716) Do not create Buffer from Object in setContentLength(iojs v3.0 issue) (@calibr)
-- [#1711](https://github.com/request/request/pull/1711) Add ability to detect connect timeouts (@kevinburke)
-- [#1712](https://github.com/request/request/pull/1712) Set certificate expiration to August 2, 2018 (@kevinburke)
-- [#1700](https://github.com/request/request/pull/1700) debug() when JSON.parse() on a response body fails (@phillipj)
-
-### v2.60.0 (2015/07/21)
-- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews)
-
-### v2.59.0 (2015/07/20)
-- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options.
- Forever option defaults to using http(s).Agent in node 0.12+ (@simov)
-- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman)
-- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme)
-- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov)
-- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov)
-- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x)
-- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov)
-- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles)
-
-### v2.58.0 (2015/06/16)
-- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov)
-- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov)
-- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov)
-- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov)
-
-### v2.57.0 (2015/05/31)
-- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR)
-
-### v2.56.0 (2015/05/28)
-- [#1610](https://github.com/request/request/pull/1610) Bump module dependencies (@simov)
-- [#1600](https://github.com/request/request/pull/1600) Extract the querystring logic into separate module (@simov)
-- [#1607](https://github.com/request/request/pull/1607) Re-generate certificates (@simov)
-- [#1599](https://github.com/request/request/pull/1599) Move getProxyFromURI logic below the check for Invaild URI (#1595) (@simov)
-- [#1598](https://github.com/request/request/pull/1598) Fix the way http verbs are defined in order to please intellisense IDEs (@simov, @flannelJesus)
-- [#1591](https://github.com/request/request/pull/1591) A few minor fixes: (@simov)
-- [#1584](https://github.com/request/request/pull/1584) Refactor test-default tests (according to comments in #1430) (@simov)
-- [#1585](https://github.com/request/request/pull/1585) Fixing documentation regarding TLS options (#1583) (@mainakae)
-- [#1574](https://github.com/request/request/pull/1574) Refresh the oauth_nonce on redirect (#1573) (@simov)
-- [#1570](https://github.com/request/request/pull/1570) Discovered tests that weren't properly running (@seanstrom)
-- [#1569](https://github.com/request/request/pull/1569) Fix pause before response arrives (@kevinoid)
-- [#1558](https://github.com/request/request/pull/1558) Emit error instead of throw (@simov)
-- [#1568](https://github.com/request/request/pull/1568) Fix stall when piping gzipped response (@kevinoid)
-- [#1560](https://github.com/request/request/pull/1560) Update combined-stream (@apechimp)
-- [#1543](https://github.com/request/request/pull/1543) Initial support for oauth_body_hash on json payloads (@simov, @aesopwolf)
-- [#1541](https://github.com/request/request/pull/1541) Fix coveralls (@simov)
-- [#1540](https://github.com/request/request/pull/1540) Fix recursive defaults for convenience methods (@simov)
-- [#1536](https://github.com/request/request/pull/1536) More eslint style rules (@froatsnook)
-- [#1533](https://github.com/request/request/pull/1533) Adding dependency status bar to README.md (@YasharF)
-- [#1539](https://github.com/request/request/pull/1539) ensure the latest version of har-validator is included (@ahmadnassri)
-- [#1516](https://github.com/request/request/pull/1516) forever+pool test (@devTristan)
-
-### v2.55.0 (2015/04/05)
-- [#1520](https://github.com/request/request/pull/1520) Refactor defaults (@simov)
-- [#1525](https://github.com/request/request/pull/1525) Delete request headers with undefined value. (@froatsnook)
-- [#1521](https://github.com/request/request/pull/1521) Add promise tests (@simov)
-- [#1518](https://github.com/request/request/pull/1518) Fix defaults (@simov)
-- [#1515](https://github.com/request/request/pull/1515) Allow static invoking of convenience methods (@simov)
-- [#1505](https://github.com/request/request/pull/1505) Fix multipart boundary extraction regexp (@simov)
-- [#1510](https://github.com/request/request/pull/1510) Fix basic auth form data (@simov)
-
-### v2.54.0 (2015/03/24)
-- [#1501](https://github.com/request/request/pull/1501) HTTP Archive 1.2 support (@ahmadnassri)
-- [#1486](https://github.com/request/request/pull/1486) Add a test for the forever agent (@akshayp)
-- [#1500](https://github.com/request/request/pull/1500) Adding handling for no auth method and null bearer (@philberg)
-- [#1498](https://github.com/request/request/pull/1498) Add table of contents in readme (@simov)
-- [#1477](https://github.com/request/request/pull/1477) Add support for qs options via qsOptions key (@simov)
-- [#1496](https://github.com/request/request/pull/1496) Parameters encoded to base 64 should be decoded as UTF-8, not ASCII. (@albanm)
-- [#1494](https://github.com/request/request/pull/1494) Update eslint (@froatsnook)
-- [#1474](https://github.com/request/request/pull/1474) Require Colon in Basic Auth (@erykwalder)
-- [#1481](https://github.com/request/request/pull/1481) Fix baseUrl and redirections. (@burningtree)
-- [#1469](https://github.com/request/request/pull/1469) Feature/base url (@froatsnook)
-- [#1459](https://github.com/request/request/pull/1459) Add option to time request/response cycle (including rollup of redirects) (@aaron-em)
-- [#1468](https://github.com/request/request/pull/1468) Re-enable io.js/node 0.12 build (@simov, @mikeal, @BBB)
-- [#1442](https://github.com/request/request/pull/1442) Fixed the issue with strictSSL tests on 0.12 & io.js by explicitly setting a cipher that matches the cert. (@BBB, @nickmccurdy, @demohi, @simov, @0x4139)
-- [#1460](https://github.com/request/request/pull/1460) localAddress or proxy config is lost when redirecting (@simov, @0x4139)
-- [#1453](https://github.com/request/request/pull/1453) Test on Node.js 0.12 and io.js with allowed failures (@nickmccurdy, @demohi)
-- [#1426](https://github.com/request/request/pull/1426) Fixing tests to pass on io.js and node 0.12 (only test-https.js stiff failing) (@mikeal)
-- [#1446](https://github.com/request/request/pull/1446) Missing HTTP referer header with redirects Fixes #1038 (@simov, @guimon)
-- [#1428](https://github.com/request/request/pull/1428) Deprecate Node v0.8.x (@nylen)
-- [#1436](https://github.com/request/request/pull/1436) Add ability to set a requester without setting default options (@tikotzky)
-- [#1435](https://github.com/request/request/pull/1435) dry up verb methods (@sethpollack)
-- [#1423](https://github.com/request/request/pull/1423) Allow fully qualified multipart content-type header (@simov)
-- [#1430](https://github.com/request/request/pull/1430) Fix recursive requester (@tikotzky)
-- [#1429](https://github.com/request/request/pull/1429) Throw error when making HEAD request with a body (@tikotzky)
-- [#1419](https://github.com/request/request/pull/1419) Add note that the project is broken in 0.12.x (@nylen)
-- [#1413](https://github.com/request/request/pull/1413) Fix basic auth (@simov)
-- [#1397](https://github.com/request/request/pull/1397) Improve pipe-from-file tests (@nylen)
-
-### v2.53.0 (2015/02/02)
-- [#1396](https://github.com/request/request/pull/1396) Do not rfc3986 escape JSON bodies (@nylen, @simov)
-- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson)
-
-### v2.52.0 (2015/02/02)
-- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen)
-- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao)
-- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom)
-- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen)
-- [#1376](https://github.com/request/request/pull/1376) Use karma-browserify for tests. Add browser test coverage reporter. (@eiriksm)
-- [#1366](https://github.com/request/request/pull/1366) Refactor OAuth into separate module (@simov)
-- [#1373](https://github.com/request/request/pull/1373) Rewrite tunnel test to be pure Node.js (@nylen)
-- [#1371](https://github.com/request/request/pull/1371) Upgrade test reporter (@nylen)
-- [#1360](https://github.com/request/request/pull/1360) Refactor basic, bearer, digest auth logic into separate class (@simov)
-- [#1354](https://github.com/request/request/pull/1354) Remove circular dependency from debugging code (@nylen)
-- [#1351](https://github.com/request/request/pull/1351) Move digest auth into private prototype method (@simov)
-- [#1352](https://github.com/request/request/pull/1352) Update hawk dependency to ~2.3.0 (@mridgway)
-- [#1353](https://github.com/request/request/pull/1353) Correct travis-ci badge (@dogancelik)
-- [#1349](https://github.com/request/request/pull/1349) Make sure we return on errored browser requests. (@eiriksm)
-- [#1346](https://github.com/request/request/pull/1346) getProxyFromURI Extraction Refactor (@seanstrom)
-- [#1337](https://github.com/request/request/pull/1337) Standardize test ports on 6767 (@nylen)
-- [#1341](https://github.com/request/request/pull/1341) Emit FormData error events as Request error events (@nylen, @rwky)
-- [#1343](https://github.com/request/request/pull/1343) Clean up readme badges, and add Travis and Coveralls badges (@nylen)
-- [#1345](https://github.com/request/request/pull/1345) Update README.md (@Aaron-Hartwig)
-- [#1338](https://github.com/request/request/pull/1338) Always wait for server.close() callback in tests (@nylen)
-- [#1342](https://github.com/request/request/pull/1342) Add mock https server and redo start of browser tests for this purpose. (@eiriksm)
-- [#1339](https://github.com/request/request/pull/1339) Improve auth docs (@nylen)
-- [#1335](https://github.com/request/request/pull/1335) Add support for OAuth plaintext signature method (@simov)
-- [#1332](https://github.com/request/request/pull/1332) Add clean script to remove test-browser.js after the tests run (@seanstrom)
-- [#1327](https://github.com/request/request/pull/1327) Fix errors generating coverage reports. (@nylen)
-- [#1330](https://github.com/request/request/pull/1330) Return empty buffer upon empty response body and encoding is set to null (@seanstrom)
-- [#1326](https://github.com/request/request/pull/1326) Use faster container-based infrastructure on Travis (@nylen)
-- [#1315](https://github.com/request/request/pull/1315) Implement rfc3986 option (@simov, @nylen, @apoco, @DullReferenceException, @mmalecki, @oliamb, @cliffcrosland, @LewisJEllis, @eiriksm, @poislagarde)
-- [#1314](https://github.com/request/request/pull/1314) Detect urlencoded form data header via regex (@simov)
-- [#1317](https://github.com/request/request/pull/1317) Improve OAuth1.0 server side flow example (@simov)
-
-### v2.51.0 (2014/12/10)
-- [#1310](https://github.com/request/request/pull/1310) Revert changes introduced in https://github.com/request/request/pull/1282 (@simov)
-
-### v2.50.0 (2014/12/09)
-- [#1308](https://github.com/request/request/pull/1308) Add browser test to keep track of browserify compability. (@eiriksm)
-- [#1299](https://github.com/request/request/pull/1299) Add optional support for jsonReviver (@poislagarde)
-- [#1277](https://github.com/request/request/pull/1277) Add Coveralls configuration (@simov)
-- [#1307](https://github.com/request/request/pull/1307) Upgrade form-data, add back browserify compability. Fixes #455. (@eiriksm)
-- [#1305](https://github.com/request/request/pull/1305) Fix typo in README.md (@LewisJEllis)
-- [#1288](https://github.com/request/request/pull/1288) Update README.md to explain custom file use case (@cliffcrosland)
-
-### v2.49.0 (2014/11/28)
-- [#1295](https://github.com/request/request/pull/1295) fix(proxy): no-proxy false positive (@oliamb)
-- [#1292](https://github.com/request/request/pull/1292) Upgrade `caseless` to 0.8.1 (@mmalecki)
-- [#1276](https://github.com/request/request/pull/1276) Set transfer encoding for multipart/related to chunked by default (@simov)
-- [#1275](https://github.com/request/request/pull/1275) Fix multipart content-type headers detection (@simov)
-- [#1269](https://github.com/request/request/pull/1269) adds streams example for review (@tbuchok)
-- [#1238](https://github.com/request/request/pull/1238) Add examples README.md (@simov)
-
-### v2.48.0 (2014/11/12)
-- [#1263](https://github.com/request/request/pull/1263) Fixed a syntax error / typo in README.md (@xna2)
-- [#1253](https://github.com/request/request/pull/1253) Add multipart chunked flag (@simov, @nylen)
-- [#1251](https://github.com/request/request/pull/1251) Clarify that defaults() does not modify global defaults (@nylen)
-- [#1250](https://github.com/request/request/pull/1250) Improve documentation for pool and maxSockets options (@nylen)
-- [#1237](https://github.com/request/request/pull/1237) Documenting error handling when using streams (@vmattos)
-- [#1244](https://github.com/request/request/pull/1244) Finalize changelog command (@nylen)
-- [#1241](https://github.com/request/request/pull/1241) Fix typo (@alexanderGugel)
-- [#1223](https://github.com/request/request/pull/1223) Show latest version number instead of "upcoming" in changelog (@nylen)
-- [#1236](https://github.com/request/request/pull/1236) Document how to use custom CA in README (#1229) (@hypesystem)
-- [#1228](https://github.com/request/request/pull/1228) Support for oauth with RSA-SHA1 signing (@nylen)
-- [#1216](https://github.com/request/request/pull/1216) Made json and multipart options coexist (@nylen, @simov)
-- [#1225](https://github.com/request/request/pull/1225) Allow header white/exclusive lists in any case. (@RReverser)
-
-### v2.47.0 (2014/10/26)
-- [#1222](https://github.com/request/request/pull/1222) Move from mikeal/request to request/request (@nylen)
-- [#1220](https://github.com/request/request/pull/1220) update qs dependency to 2.3.1 (@FredKSchott)
-- [#1212](https://github.com/request/request/pull/1212) Improve tests/test-timeout.js (@nylen)
-- [#1219](https://github.com/request/request/pull/1219) remove old globalAgent workaround for node 0.4 (@request)
-- [#1214](https://github.com/request/request/pull/1214) Remove cruft left over from optional dependencies (@nylen)
-- [#1215](https://github.com/request/request/pull/1215) Add proxyHeaderExclusiveList option for proxy-only headers. (@RReverser)
-- [#1211](https://github.com/request/request/pull/1211) Allow 'Host' header instead of 'host' and remember case across redirects (@nylen)
-- [#1208](https://github.com/request/request/pull/1208) Improve release script (@nylen)
-- [#1213](https://github.com/request/request/pull/1213) Support for custom cookie store (@nylen, @mitsuru)
-- [#1197](https://github.com/request/request/pull/1197) Clean up some code around setting the agent (@FredKSchott)
-- [#1209](https://github.com/request/request/pull/1209) Improve multipart form append test (@simov)
-- [#1207](https://github.com/request/request/pull/1207) Update changelog (@nylen)
-- [#1185](https://github.com/request/request/pull/1185) Stream multipart/related bodies (@simov)
-
-### v2.46.0 (2014/10/23)
-- [#1198](https://github.com/request/request/pull/1198) doc for TLS/SSL protocol options (@shawnzhu)
-- [#1200](https://github.com/request/request/pull/1200) Add a Gitter chat badge to README.md (@gitter-badger)
-- [#1196](https://github.com/request/request/pull/1196) Upgrade taper test reporter to v0.3.0 (@nylen)
-- [#1199](https://github.com/request/request/pull/1199) Fix lint error: undeclared var i (@nylen)
-- [#1191](https://github.com/request/request/pull/1191) Move self.proxy decision logic out of init and into a helper (@FredKSchott)
-- [#1190](https://github.com/request/request/pull/1190) Move _buildRequest() logic back into init (@FredKSchott)
-- [#1186](https://github.com/request/request/pull/1186) Support Smarter Unix URL Scheme (@FredKSchott)
-- [#1178](https://github.com/request/request/pull/1178) update form documentation for new usage (@FredKSchott)
-- [#1180](https://github.com/request/request/pull/1180) Enable no-mixed-requires linting rule (@nylen)
-- [#1184](https://github.com/request/request/pull/1184) Don't forward authorization header across redirects to different hosts (@nylen)
-- [#1183](https://github.com/request/request/pull/1183) Correct README about pre and postamble CRLF using multipart and not mult... (@netpoetica)
-- [#1179](https://github.com/request/request/pull/1179) Lint tests directory (@nylen)
-- [#1169](https://github.com/request/request/pull/1169) add metadata for form-data file field (@dotcypress)
-- [#1173](https://github.com/request/request/pull/1173) remove optional dependencies (@seanstrom)
-- [#1165](https://github.com/request/request/pull/1165) Cleanup event listeners and remove function creation from init (@FredKSchott)
-- [#1174](https://github.com/request/request/pull/1174) update the request.cookie docs to have a valid cookie example (@seanstrom)
-- [#1168](https://github.com/request/request/pull/1168) create a detach helper and use detach helper in replace of nextTick (@seanstrom)
-- [#1171](https://github.com/request/request/pull/1171) in post can send form data and use callback (@MiroRadenovic)
-- [#1159](https://github.com/request/request/pull/1159) accept charset for x-www-form-urlencoded content-type (@seanstrom)
-- [#1157](https://github.com/request/request/pull/1157) Update README.md: body with json=true (@Rob--W)
-- [#1164](https://github.com/request/request/pull/1164) Disable tests/test-timeout.js on Travis (@nylen)
-- [#1153](https://github.com/request/request/pull/1153) Document how to run a single test (@nylen)
-- [#1144](https://github.com/request/request/pull/1144) adds documentation for the "response" event within the streaming section (@tbuchok)
-- [#1162](https://github.com/request/request/pull/1162) Update eslintrc file to no longer allow past errors (@FredKSchott)
-- [#1155](https://github.com/request/request/pull/1155) Support/use self everywhere (@seanstrom)
-- [#1161](https://github.com/request/request/pull/1161) fix no-use-before-define lint warnings (@emkay)
-- [#1156](https://github.com/request/request/pull/1156) adding curly brackets to get rid of lint errors (@emkay)
-- [#1151](https://github.com/request/request/pull/1151) Fix localAddress test on OS X (@nylen)
-- [#1145](https://github.com/request/request/pull/1145) documentation: fix outdated reference to setCookieSync old name in README (@FredKSchott)
-- [#1131](https://github.com/request/request/pull/1131) Update pool documentation (@FredKSchott)
-- [#1143](https://github.com/request/request/pull/1143) Rewrite all tests to use tape (@nylen)
-- [#1137](https://github.com/request/request/pull/1137) Add ability to specifiy querystring lib in options. (@jgrund)
-- [#1138](https://github.com/request/request/pull/1138) allow hostname and port in place of host on uri (@cappslock)
-- [#1134](https://github.com/request/request/pull/1134) Fix multiple redirects and `self.followRedirect` (@blakeembrey)
-- [#1130](https://github.com/request/request/pull/1130) documentation fix: add note about npm test for contributing (@FredKSchott)
-- [#1120](https://github.com/request/request/pull/1120) Support/refactor request setup tunnel (@seanstrom)
-- [#1129](https://github.com/request/request/pull/1129) linting fix: convert double quote strings to use single quotes (@FredKSchott)
-- [#1124](https://github.com/request/request/pull/1124) linting fix: remove unneccesary semi-colons (@FredKSchott)
-
-### v2.45.0 (2014/10/06)
-- [#1128](https://github.com/request/request/pull/1128) Add test for setCookie regression (@nylen)
-- [#1127](https://github.com/request/request/pull/1127) added tests around using objects as values in a query string (@bcoe)
-- [#1103](https://github.com/request/request/pull/1103) Support/refactor request constructor (@nylen, @seanstrom)
-- [#1119](https://github.com/request/request/pull/1119) add basic linting to request library (@FredKSchott)
-- [#1121](https://github.com/request/request/pull/1121) Revert "Explicitly use sync versions of cookie functions" (@nylen)
-- [#1118](https://github.com/request/request/pull/1118) linting fix: Restructure bad empty if statement (@FredKSchott)
-- [#1117](https://github.com/request/request/pull/1117) Fix a bad check for valid URIs (@FredKSchott)
-- [#1113](https://github.com/request/request/pull/1113) linting fix: space out operators (@FredKSchott)
-- [#1116](https://github.com/request/request/pull/1116) Fix typo in `noProxyHost` definition (@FredKSchott)
-- [#1114](https://github.com/request/request/pull/1114) linting fix: Added a `new` operator that was missing when creating and throwing a new error (@FredKSchott)
-- [#1096](https://github.com/request/request/pull/1096) No_proxy support (@samcday)
-- [#1107](https://github.com/request/request/pull/1107) linting-fix: remove unused variables (@FredKSchott)
-- [#1112](https://github.com/request/request/pull/1112) linting fix: Make return values consistent and more straitforward (@FredKSchott)
-- [#1111](https://github.com/request/request/pull/1111) linting fix: authPieces was getting redeclared (@FredKSchott)
-- [#1105](https://github.com/request/request/pull/1105) Use strict mode in request (@FredKSchott)
-- [#1110](https://github.com/request/request/pull/1110) linting fix: replace lazy '==' with more strict '===' (@FredKSchott)
-- [#1109](https://github.com/request/request/pull/1109) linting fix: remove function call from if-else conditional statement (@FredKSchott)
-- [#1102](https://github.com/request/request/pull/1102) Fix to allow setting a `requester` on recursive calls to `request.defaults` (@tikotzky)
-- [#1095](https://github.com/request/request/pull/1095) Tweaking engines in package.json (@pdehaan)
-- [#1082](https://github.com/request/request/pull/1082) Forward the socket event from the httpModule request (@seanstrom)
-- [#972](https://github.com/request/request/pull/972) Clarify gzip handling in the README (@kevinoid)
-- [#1089](https://github.com/request/request/pull/1089) Mention that encoding defaults to utf8, not Buffer (@stuartpb)
-- [#1088](https://github.com/request/request/pull/1088) Fix cookie example in README.md and make it more clear (@pipi32167)
-- [#1027](https://github.com/request/request/pull/1027) Add support for multipart form data in request options. (@crocket)
-- [#1076](https://github.com/request/request/pull/1076) use Request.abort() to abort the request when the request has timed-out (@seanstrom)
-- [#1068](https://github.com/request/request/pull/1068) add optional postamble required by .NET multipart requests (@netpoetica)
-
-### v2.43.0 (2014/09/18)
-- [#1057](https://github.com/request/request/pull/1057) Defaults should not overwrite defined options (@davidwood)
-- [#1046](https://github.com/request/request/pull/1046) Propagate datastream errors, useful in case gzip fails. (@ZJONSSON, @Janpot)
-- [#1063](https://github.com/request/request/pull/1063) copy the input headers object #1060 (@finnp)
-- [#1031](https://github.com/request/request/pull/1031) Explicitly use sync versions of cookie functions (@ZJONSSON)
-- [#1056](https://github.com/request/request/pull/1056) Fix redirects when passing url.parse(x) as URL to convenience method (@nylen)
-
-### v2.42.0 (2014/09/04)
-- [#1053](https://github.com/request/request/pull/1053) Fix #1051 Parse auth properly when using non-tunneling proxy (@isaacs)
-
-### v2.41.0 (2014/09/04)
-- [#1050](https://github.com/request/request/pull/1050) Pass whitelisted headers to tunneling proxy. Organize all tunneling logic. (@isaacs, @Feldhacker)
-- [#1035](https://github.com/request/request/pull/1035) souped up nodei.co badge (@rvagg)
-- [#1048](https://github.com/request/request/pull/1048) Aws is now possible over a proxy (@steven-aerts)
-- [#1039](https://github.com/request/request/pull/1039) extract out helper functions to a helper file (@seanstrom)
-- [#1021](https://github.com/request/request/pull/1021) Support/refactor indexjs (@seanstrom)
-- [#1033](https://github.com/request/request/pull/1033) Improve and document debug options (@nylen)
-- [#1034](https://github.com/request/request/pull/1034) Fix readme headings (@nylen)
-- [#1030](https://github.com/request/request/pull/1030) Allow recursive request.defaults (@tikotzky)
-- [#1029](https://github.com/request/request/pull/1029) Fix a couple of typos (@nylen)
-- [#675](https://github.com/request/request/pull/675) Checking for SSL fault on connection before reading SSL properties (@VRMink)
-- [#989](https://github.com/request/request/pull/989) Added allowRedirect function. Should return true if redirect is allowed or false otherwise (@doronin)
-- [#1025](https://github.com/request/request/pull/1025) [fixes #1023] Set self._ended to true once response has ended (@mridgway)
-- [#1020](https://github.com/request/request/pull/1020) Add back removed debug metadata (@FredKSchott)
-- [#1008](https://github.com/request/request/pull/1008) Moving to module instead of cutomer buffer concatenation. (@mikeal)
-- [#770](https://github.com/request/request/pull/770) Added dependency badge for README file; (@timgluz, @mafintosh, @lalitkapoor, @stash, @bobyrizov)
-- [#1016](https://github.com/request/request/pull/1016) toJSON no longer results in an infinite loop, returns simple objects (@FredKSchott)
-- [#1018](https://github.com/request/request/pull/1018) Remove pre-0.4.4 HTTPS fix (@mmalecki)
-- [#1006](https://github.com/request/request/pull/1006) Migrate to caseless, fixes #1001 (@mikeal)
-- [#995](https://github.com/request/request/pull/995) Fix parsing array of objects (@sjonnet19)
-- [#999](https://github.com/request/request/pull/999) Fix fallback for browserify for optional modules. (@eiriksm)
-- [#996](https://github.com/request/request/pull/996) Wrong oauth signature when multiple same param keys exist [updated] (@bengl)
-
-### v2.40.0 (2014/08/06)
-- [#992](https://github.com/request/request/pull/992) Fix security vulnerability. Update qs (@poeticninja)
-- [#988](https://github.com/request/request/pull/988) “--” -> “—” (@upisfree)
-- [#987](https://github.com/request/request/pull/987) Show optional modules as being loaded by the module that reqeusted them (@iarna)
-
-### v2.39.0 (2014/07/24)
-- [#976](https://github.com/request/request/pull/976) Update README.md (@pvoznenko)
-
-### v2.38.0 (2014/07/22)
-- [#952](https://github.com/request/request/pull/952) Adding support to client certificate with proxy use case (@ofirshaked)
-- [#884](https://github.com/request/request/pull/884) Documented tough-cookie installation. (@wbyoung)
-- [#935](https://github.com/request/request/pull/935) Correct repository url (@fritx)
-- [#963](https://github.com/request/request/pull/963) Update changelog (@nylen)
-- [#960](https://github.com/request/request/pull/960) Support gzip with encoding on node pre-v0.9.4 (@kevinoid)
-- [#953](https://github.com/request/request/pull/953) Add async Content-Length computation when using form-data (@LoicMahieu)
-- [#844](https://github.com/request/request/pull/844) Add support for HTTP[S]_PROXY environment variables. Fixes #595. (@jvmccarthy)
-- [#946](https://github.com/request/request/pull/946) defaults: merge headers (@aj0strow)
-
-### v2.37.0 (2014/07/07)
-- [#957](https://github.com/request/request/pull/957) Silence EventEmitter memory leak warning #311 (@watson)
-- [#955](https://github.com/request/request/pull/955) check for content-length header before setting it in nextTick (@camilleanne)
-- [#951](https://github.com/request/request/pull/951) Add support for gzip content decoding (@kevinoid)
-- [#949](https://github.com/request/request/pull/949) Manually enter querystring in form option (@charlespwd)
-- [#944](https://github.com/request/request/pull/944) Make request work with browserify (@eiriksm)
-- [#943](https://github.com/request/request/pull/943) New mime module (@eiriksm)
-- [#927](https://github.com/request/request/pull/927) Bump version of hawk dep. (@samccone)
-- [#907](https://github.com/request/request/pull/907) append secureOptions to poolKey (@medovob)
-
-### v2.35.0 (2014/05/17)
-- [#901](https://github.com/request/request/pull/901) Fixes #555 (@pigulla)
-- [#897](https://github.com/request/request/pull/897) merge with default options (@vohof)
-- [#891](https://github.com/request/request/pull/891) fixes 857 - options object is mutated by calling request (@lalitkapoor)
-- [#869](https://github.com/request/request/pull/869) Pipefilter test (@tgohn)
-- [#866](https://github.com/request/request/pull/866) Fix typo (@dandv)
-- [#861](https://github.com/request/request/pull/861) Add support for RFC 6750 Bearer Tokens (@phedny)
-- [#809](https://github.com/request/request/pull/809) upgrade tunnel-proxy to 0.4.0 (@ksato9700)
-- [#850](https://github.com/request/request/pull/850) Fix word consistency in readme (@0xNobody)
-- [#810](https://github.com/request/request/pull/810) add some exposition to mpu example in README.md (@mikermcneil)
-- [#840](https://github.com/request/request/pull/840) improve error reporting for invalid protocols (@FND)
-- [#821](https://github.com/request/request/pull/821) added secureOptions back (@nw)
-- [#815](https://github.com/request/request/pull/815) Create changelog based on pull requests (@lalitkapoor)
-
-### v2.34.0 (2014/02/18)
-- [#516](https://github.com/request/request/pull/516) UNIX Socket URL Support (@lyuzashi)
-- [#801](https://github.com/request/request/pull/801) 794 ignore cookie parsing and domain errors (@lalitkapoor)
-- [#802](https://github.com/request/request/pull/802) Added the Apache license to the package.json. (@keskival)
-- [#793](https://github.com/request/request/pull/793) Adds content-length calculation when submitting forms using form-data li... (@Juul)
-- [#785](https://github.com/request/request/pull/785) Provide ability to override content-type when `json` option used (@vvo)
-- [#781](https://github.com/request/request/pull/781) simpler isReadStream function (@joaojeronimo)
-
-### v2.32.0 (2014/01/16)
-- [#767](https://github.com/request/request/pull/767) Use tough-cookie CookieJar sync API (@stash)
-- [#764](https://github.com/request/request/pull/764) Case-insensitive authentication scheme (@bobyrizov)
-- [#763](https://github.com/request/request/pull/763) Upgrade tough-cookie to 0.10.0 (@stash)
-- [#744](https://github.com/request/request/pull/744) Use Cookie.parse (@lalitkapoor)
-- [#757](https://github.com/request/request/pull/757) require aws-sign2 (@mafintosh)
-
-### v2.31.0 (2014/01/08)
-- [#645](https://github.com/request/request/pull/645) update twitter api url to v1.1 (@mick)
-- [#746](https://github.com/request/request/pull/746) README: Markdown code highlight (@weakish)
-- [#745](https://github.com/request/request/pull/745) updating setCookie example to make it clear that the callback is required (@emkay)
-- [#742](https://github.com/request/request/pull/742) Add note about JSON output body type (@iansltx)
-- [#741](https://github.com/request/request/pull/741) README example is using old cookie jar api (@emkay)
-- [#736](https://github.com/request/request/pull/736) Fix callback arguments documentation (@mmalecki)
-- [#732](https://github.com/request/request/pull/732) JSHINT: Creating global 'for' variable. Should be 'for (var ...'. (@Fritz-Lium)
-- [#730](https://github.com/request/request/pull/730) better HTTP DIGEST support (@dai-shi)
-- [#728](https://github.com/request/request/pull/728) Fix TypeError when calling request.cookie (@scarletmeow)
-- [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris)
-- [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort)
-- [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit)
-- [#715](https://github.com/request/request/pull/715) Request.multipart no longer crashes when header 'Content-type' present (@pastaclub)
-- [#710](https://github.com/request/request/pull/710) Fixing listing in callback part of docs. (@lukasz-zak)
-- [#696](https://github.com/request/request/pull/696) Edited README.md for formatting and clarity of phrasing (@Zearin)
-- [#694](https://github.com/request/request/pull/694) Typo in README (@VRMink)
-- [#690](https://github.com/request/request/pull/690) Handle blank password in basic auth. (@diversario)
-- [#682](https://github.com/request/request/pull/682) Optional dependencies (@Turbo87)
-- [#683](https://github.com/request/request/pull/683) Travis CI support (@Turbo87)
-- [#674](https://github.com/request/request/pull/674) change cookie module,to tough-cookie.please check it . (@sxyizhiren)
-- [#666](https://github.com/request/request/pull/666) make `ciphers` and `secureProtocol` to work in https request (@richarddong)
-- [#656](https://github.com/request/request/pull/656) Test case for #304. (@diversario)
-- [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar)
-- [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm)
-- [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl)
-- [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo)
-- [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander)
-- [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker)
-- [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath)
-- [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK)
-- [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko)
-- [#589](https://github.com/request/request/pull/589) Prevent setting headers after they are sent (@geek)
-- [#587](https://github.com/request/request/pull/587) Global cookie jar disabled by default (@threepointone)
-- [#544](https://github.com/request/request/pull/544) Update http-signature version. (@davidlehn)
-- [#581](https://github.com/request/request/pull/581) Fix spelling of "ignoring." (@bigeasy)
-- [#568](https://github.com/request/request/pull/568) use agentOptions to create agent when specified in request (@SamPlacette)
-- [#564](https://github.com/request/request/pull/564) Fix redirections (@criloz)
-- [#541](https://github.com/request/request/pull/541) The exported request function doesn't have an auth method (@tschaub)
-- [#542](https://github.com/request/request/pull/542) Expose Request class (@regality)
-- [#536](https://github.com/request/request/pull/536) Allow explicitly empty user field for basic authentication. (@mikeando)
-- [#532](https://github.com/request/request/pull/532) fix typo (@fredericosilva)
-- [#497](https://github.com/request/request/pull/497) Added redirect event (@Cauldrath)
-- [#503](https://github.com/request/request/pull/503) Fix basic auth for passwords that contain colons (@tonistiigi)
-- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway)
-- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka)
-- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway)
-- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway)
-- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun)
-- [#510](https://github.com/request/request/pull/510) Add HTTP Signature support. (@davidlehn)
-- [#502](https://github.com/request/request/pull/502) Fix POST (and probably other) requests that are retried after 401 Unauthorized (@nylen)
-- [#508](https://github.com/request/request/pull/508) Honor the .strictSSL option when using proxies (tunnel-agent) (@jhs)
-- [#512](https://github.com/request/request/pull/512) Make password optional to support the format: http://username@hostname/ (@pajato1)
-- [#513](https://github.com/request/request/pull/513) add 'localAddress' support (@yyfrankyy)
-- [#498](https://github.com/request/request/pull/498) Moving response emit above setHeaders on destination streams (@kenperkins)
-- [#490](https://github.com/request/request/pull/490) Empty response body (3-rd argument) must be passed to callback as an empty string (@Olegas)
-- [#479](https://github.com/request/request/pull/479) Changing so if Accept header is explicitly set, sending json does not ov... (@RoryH)
-- [#475](https://github.com/request/request/pull/475) Use `unescape` from `querystring` (@shimaore)
-- [#473](https://github.com/request/request/pull/473) V0.10 compat (@isaacs)
-- [#471](https://github.com/request/request/pull/471) Using querystring library from visionmedia (@kbackowski)
-- [#461](https://github.com/request/request/pull/461) Strip the UTF8 BOM from a UTF encoded response (@kppullin)
-- [#460](https://github.com/request/request/pull/460) hawk 0.10.0 (@hueniverse)
-- [#462](https://github.com/request/request/pull/462) if query params are empty, then request path shouldn't end with a '?' (merges cleanly now) (@jaipandya)
-- [#456](https://github.com/request/request/pull/456) hawk 0.9.0 (@hueniverse)
-- [#429](https://github.com/request/request/pull/429) Copy options before adding callback. (@nrn, @nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki)
-- [#454](https://github.com/request/request/pull/454) Destroy the response if present when destroying the request (clean merge) (@mafintosh)
-- [#310](https://github.com/request/request/pull/310) Twitter Oauth Stuff Out of Date; Now Updated (@joemccann, @isaacs, @mscdex)
-- [#413](https://github.com/request/request/pull/413) rename googledoodle.png to .jpg (@nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki)
-- [#448](https://github.com/request/request/pull/448) Convenience method for PATCH (@mloar)
-- [#444](https://github.com/request/request/pull/444) protect against double callbacks on error path (@spollack)
-- [#433](https://github.com/request/request/pull/433) Added support for HTTPS cert & key (@mmalecki)
-- [#430](https://github.com/request/request/pull/430) Respect specified {Host,host} headers, not just {host} (@andrewschaaf)
-- [#415](https://github.com/request/request/pull/415) Fixed a typo. (@jerem)
-- [#338](https://github.com/request/request/pull/338) Add more auth options, including digest support (@nylen)
-- [#403](https://github.com/request/request/pull/403) Optimize environment lookup to happen once only (@mmalecki)
-- [#398](https://github.com/request/request/pull/398) Add more reporting to tests (@mmalecki)
-- [#388](https://github.com/request/request/pull/388) Ensure "safe" toJSON doesn't break EventEmitters (@othiym23)
-- [#381](https://github.com/request/request/pull/381) Resolving "Invalid signature. Expected signature base string: " (@landeiro)
-- [#380](https://github.com/request/request/pull/380) Fixes missing host header on retried request when using forever agent (@mac-)
-- [#376](https://github.com/request/request/pull/376) Headers lost on redirect (@kapetan)
-- [#375](https://github.com/request/request/pull/375) Fix for missing oauth_timestamp parameter (@jplock)
-- [#374](https://github.com/request/request/pull/374) Correct Host header for proxy tunnel CONNECT (@youurayy)
-- [#370](https://github.com/request/request/pull/370) Twitter reverse auth uses x_auth_mode not x_auth_type (@drudge)
-- [#369](https://github.com/request/request/pull/369) Don't remove x_auth_mode for Twitter reverse auth (@drudge)
-- [#344](https://github.com/request/request/pull/344) Make AWS auth signing find headers correctly (@nlf)
-- [#363](https://github.com/request/request/pull/363) rfc3986 on base_uri, now passes tests (@jeffmarshall)
-- [#362](https://github.com/request/request/pull/362) Running `rfc3986` on `base_uri` in `oauth.hmacsign` instead of just `encodeURIComponent` (@jeffmarshall)
-- [#361](https://github.com/request/request/pull/361) Don't create a Content-Length header if we already have it set (@danjenkins)
-- [#360](https://github.com/request/request/pull/360) Delete self._form along with everything else on redirect (@jgautier)
-- [#355](https://github.com/request/request/pull/355) stop sending erroneous headers on redirected requests (@azylman)
-- [#332](https://github.com/request/request/pull/332) Fix #296 - Only set Content-Type if body exists (@Marsup)
-- [#343](https://github.com/request/request/pull/343) Allow AWS to work in more situations, added a note in the README on its usage (@nlf)
-- [#320](https://github.com/request/request/pull/320) request.defaults() doesn't need to wrap jar() (@StuartHarris)
-- [#322](https://github.com/request/request/pull/322) Fix + test for piped into request bumped into redirect. #321 (@alexindigo)
-- [#326](https://github.com/request/request/pull/326) Do not try to remove listener from an undefined connection (@CartoDB)
-- [#318](https://github.com/request/request/pull/318) Pass servername to tunneling secure socket creation (@isaacs)
-- [#317](https://github.com/request/request/pull/317) Workaround for #313 (@isaacs)
-- [#293](https://github.com/request/request/pull/293) Allow parser errors to bubble up to request (@mscdex)
-- [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs)
-- [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1)
-- [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs)
-- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas, @vpulim)
-- [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike)
-- [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry)
-- [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek)
-- [#282](https://github.com/request/request/pull/282) OAuth Authorization header contains non-"oauth_" parameters (@jplock)
-- [#279](https://github.com/request/request/pull/279) fix tests with boundary by injecting boundry from header (@benatkin)
-- [#273](https://github.com/request/request/pull/273) Pipe back pressure issue (@mafintosh)
-- [#268](https://github.com/request/request/pull/268) I'm not OCD seriously (@TehShrike)
-- [#263](https://github.com/request/request/pull/263) Bug in OAuth key generation for sha1 (@nanodocumet)
-- [#265](https://github.com/request/request/pull/265) uncaughtException when redirected to invalid URI (@naholyr)
-- [#262](https://github.com/request/request/pull/262) JSON test should check for equality (@timshadel)
-- [#261](https://github.com/request/request/pull/261) Setting 'pool' to 'false' does NOT disable Agent pooling (@timshadel)
-- [#249](https://github.com/request/request/pull/249) Fix for the fix of your (closed) issue #89 where self.headers[content-length] is set to 0 for all methods (@sethbridges, @polotek, @zephrax, @jeromegn)
-- [#255](https://github.com/request/request/pull/255) multipart allow body === '' ( the empty string ) (@Filirom1)
-- [#260](https://github.com/request/request/pull/260) fixed just another leak of 'i' (@sreuter)
-- [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn)
-- [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax)
-- [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek)
-- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso, @vpulim)
-- [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom)
-- [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup)
-- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@vpulim)
-- [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs)
-- [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs)
-- [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs)
-- [#193](https://github.com/request/request/pull/193) Fixes GH-119 (@goatslacker)
-- [#188](https://github.com/request/request/pull/188) Add abort support to the returned request (@itay)
-- [#176](https://github.com/request/request/pull/176) Querystring option (@csainty)
-- [#182](https://github.com/request/request/pull/182) Fix request.defaults to support (uri, options, callback) api (@twilson63)
-- [#180](https://github.com/request/request/pull/180) Modified the post, put, head and del shortcuts to support uri optional param (@twilson63)
-- [#179](https://github.com/request/request/pull/179) fix to add opts in .pipe(stream, opts) (@substack)
-- [#177](https://github.com/request/request/pull/177) Issue #173 Support uri as first and optional config as second argument (@twilson63)
-- [#170](https://github.com/request/request/pull/170) can't create a cookie in a wrapped request (defaults) (@fabianonunes)
-- [#168](https://github.com/request/request/pull/168) Picking off an EasyFix by adding some missing mimetypes. (@serby)
-- [#161](https://github.com/request/request/pull/161) Fix cookie jar/headers.cookie collision (#125) (@papandreou)
-- [#162](https://github.com/request/request/pull/162) Fix issue #159 (@dpetukhov)
-- [#90](https://github.com/request/request/pull/90) add option followAllRedirects to follow post/put redirects (@jroes)
-- [#148](https://github.com/request/request/pull/148) Retry Agent (@thejh)
-- [#146](https://github.com/request/request/pull/146) Multipart should respect content-type if previously set (@apeace)
-- [#144](https://github.com/request/request/pull/144) added "form" option to readme (@petejkim)
-- [#133](https://github.com/request/request/pull/133) Fixed cookies parsing (@afanasy)
-- [#135](https://github.com/request/request/pull/135) host vs hostname (@iangreenleaf)
-- [#132](https://github.com/request/request/pull/132) return the body as a Buffer when encoding is set to null (@jahewson)
-- [#112](https://github.com/request/request/pull/112) Support using a custom http-like module (@jhs)
-- [#104](https://github.com/request/request/pull/104) Cookie handling contains bugs (@janjongboom)
-- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman)
-- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden)
-- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs)
-- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer)
-- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr)
-- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex)
-- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs)
-- [#96](https://github.com/request/request/pull/96) Authless parsed url host support (@isaacs)
-- [#81](https://github.com/request/request/pull/81) Enhance redirect handling (@danmactough)
-- [#78](https://github.com/request/request/pull/78) Don't try to do strictSSL for non-ssl connections (@isaacs)
-- [#76](https://github.com/request/request/pull/76) Bug when a request fails and a timeout is set (@Marsup)
-- [#70](https://github.com/request/request/pull/70) add test script to package.json (@isaacs, @aheckmann)
-- [#73](https://github.com/request/request/pull/73) Fix #71 Respect the strictSSL flag (@isaacs)
-- [#69](https://github.com/request/request/pull/69) Flatten chunked requests properly (@isaacs)
-- [#67](https://github.com/request/request/pull/67) fixed global variable leaks (@aheckmann)
-- [#66](https://github.com/request/request/pull/66) Do not overwrite established content-type headers for read stream deliver (@voodootikigod)
-- [#53](https://github.com/request/request/pull/53) Parse json: Issue #51 (@benatkin)
-- [#45](https://github.com/request/request/pull/45) Added timeout option (@mbrevoort)
-- [#35](https://github.com/request/request/pull/35) The "end" event isn't emitted for some responses (@voxpelli)
-- [#31](https://github.com/request/request/pull/31) Error on piping a request to a destination (@tobowers)
\ No newline at end of file
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/LICENSE b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/LICENSE
deleted file mode 100644
index a4a9aee0c..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/LICENSE
+++ /dev/null
@@ -1,55 +0,0 @@
-Apache License
-
-Version 2.0, January 2004
-
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
-
-"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
-
-You must give any other recipients of the Work or Derivative Works a copy of this License; and
-
-You must cause any modified files to carry prominent notices stating that You changed the files; and
-
-You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
-
-If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/README.md b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/README.md
deleted file mode 100644
index 9da0eb7d8..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/README.md
+++ /dev/null
@@ -1,1133 +0,0 @@
-# Deprecated!
-
-As of Feb 11th 2020, request is fully deprecated. No new changes are expected land. In fact, none have landed for some time.
-
-For more information about why request is deprecated and possible alternatives refer to
-[this issue](https://github.com/request/request/issues/3142).
-
-# Request - Simplified HTTP client
-
-[](https://nodei.co/npm/request/)
-
-[](https://travis-ci.org/request/request)
-[](https://codecov.io/github/request/request?branch=master)
-[](https://coveralls.io/r/request/request)
-[](https://david-dm.org/request/request)
-[](https://snyk.io/test/npm/request)
-[](https://gitter.im/request/request?utm_source=badge)
-
-
-## Super simple to use
-
-Request is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default.
-
-```js
-const request = require('request');
-request('http://www.google.com', function (error, response, body) {
- console.error('error:', error); // Print the error if one occurred
- console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
- console.log('body:', body); // Print the HTML for the Google homepage.
-});
-```
-
-
-## Table of contents
-
-- [Streaming](#streaming)
-- [Promises & Async/Await](#promises--asyncawait)
-- [Forms](#forms)
-- [HTTP Authentication](#http-authentication)
-- [Custom HTTP Headers](#custom-http-headers)
-- [OAuth Signing](#oauth-signing)
-- [Proxies](#proxies)
-- [Unix Domain Sockets](#unix-domain-sockets)
-- [TLS/SSL Protocol](#tlsssl-protocol)
-- [Support for HAR 1.2](#support-for-har-12)
-- [**All Available Options**](#requestoptions-callback)
-
-Request also offers [convenience methods](#convenience-methods) like
-`request.defaults` and `request.post`, and there are
-lots of [usage examples](#examples) and several
-[debugging techniques](#debugging).
-
-
----
-
-
-## Streaming
-
-You can stream any response to a file stream.
-
-```js
-request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
-```
-
-You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one).
-
-```js
-fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
-```
-
-Request can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers.
-
-```js
-request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
-```
-
-Request emits a "response" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage).
-
-```js
-request
- .get('http://google.com/img.png')
- .on('response', function(response) {
- console.log(response.statusCode) // 200
- console.log(response.headers['content-type']) // 'image/png'
- })
- .pipe(request.put('http://mysite.com/img.png'))
-```
-
-To easily handle errors when streaming requests, listen to the `error` event before piping:
-
-```js
-request
- .get('http://mysite.com/doodle.png')
- .on('error', function(err) {
- console.error(err)
- })
- .pipe(fs.createWriteStream('doodle.png'))
-```
-
-Now let’s get fancy.
-
-```js
-http.createServer(function (req, resp) {
- if (req.url === '/doodle.png') {
- if (req.method === 'PUT') {
- req.pipe(request.put('http://mysite.com/doodle.png'))
- } else if (req.method === 'GET' || req.method === 'HEAD') {
- request.get('http://mysite.com/doodle.png').pipe(resp)
- }
- }
-})
-```
-
-You can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do:
-
-```js
-http.createServer(function (req, resp) {
- if (req.url === '/doodle.png') {
- const x = request('http://mysite.com/doodle.png')
- req.pipe(x)
- x.pipe(resp)
- }
-})
-```
-
-And since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :)
-
-```js
-req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
-```
-
-Also, none of this new functionality conflicts with requests previous features, it just expands them.
-
-```js
-const r = request.defaults({'proxy':'http://localproxy.com'})
-
-http.createServer(function (req, resp) {
- if (req.url === '/doodle.png') {
- r.get('http://google.com/doodle.png').pipe(resp)
- }
-})
-```
-
-You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## Promises & Async/Await
-
-`request` supports both streaming and callback interfaces natively. If you'd like `request` to return a Promise instead, you can use an alternative interface wrapper for `request`. These wrappers can be useful if you prefer to work with Promises, or if you'd like to use `async`/`await` in ES2017.
-
-Several alternative interfaces are provided by the request team, including:
-- [`request-promise`](https://github.com/request/request-promise) (uses [Bluebird](https://github.com/petkaantonov/bluebird) Promises)
-- [`request-promise-native`](https://github.com/request/request-promise-native) (uses native Promises)
-- [`request-promise-any`](https://github.com/request/request-promise-any) (uses [any-promise](https://www.npmjs.com/package/any-promise) Promises)
-
-Also, [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original), which is available from Node.js v8.0 can be used to convert a regular function that takes a callback to return a promise instead.
-
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## Forms
-
-`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API.
-
-
-#### application/x-www-form-urlencoded (URL-Encoded Forms)
-
-URL-encoded forms are simple.
-
-```js
-request.post('http://service.com/upload', {form:{key:'value'}})
-// or
-request.post('http://service.com/upload').form({key:'value'})
-// or
-request.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ })
-```
-
-
-#### multipart/form-data (Multipart Form Uploads)
-
-For `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option.
-
-
-```js
-const formData = {
- // Pass a simple key-value pair
- my_field: 'my_value',
- // Pass data via Buffers
- my_buffer: Buffer.from([1, 2, 3]),
- // Pass data via Streams
- my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
- // Pass multiple values /w an Array
- attachments: [
- fs.createReadStream(__dirname + '/attachment1.jpg'),
- fs.createReadStream(__dirname + '/attachment2.jpg')
- ],
- // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
- // Use case: for some types of streams, you'll need to provide "file"-related information manually.
- // See the `form-data` README for more information about options: https://github.com/form-data/form-data
- custom_file: {
- value: fs.createReadStream('/dev/urandom'),
- options: {
- filename: 'topsecret.jpg',
- contentType: 'image/jpeg'
- }
- }
-};
-request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
- if (err) {
- return console.error('upload failed:', err);
- }
- console.log('Upload successful! Server responded with:', body);
-});
-```
-
-For advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.)
-
-```js
-// NOTE: Advanced use-case, for normal use see 'formData' usage above
-const r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...})
-const form = r.form();
-form.append('my_field', 'my_value');
-form.append('my_buffer', Buffer.from([1, 2, 3]));
-form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
-```
-See the [form-data README](https://github.com/form-data/form-data) for more information & examples.
-
-
-#### multipart/related
-
-Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options.
-
-```js
- request({
- method: 'PUT',
- preambleCRLF: true,
- postambleCRLF: true,
- uri: 'http://service.com/upload',
- multipart: [
- {
- 'content-type': 'application/json',
- body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
- },
- { body: 'I am an attachment' },
- { body: fs.createReadStream('image.png') }
- ],
- // alternatively pass an object containing additional options
- multipart: {
- chunked: false,
- data: [
- {
- 'content-type': 'application/json',
- body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
- },
- { body: 'I am an attachment' }
- ]
- }
- },
- function (error, response, body) {
- if (error) {
- return console.error('upload failed:', error);
- }
- console.log('Upload successful! Server responded with:', body);
- })
-```
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## HTTP Authentication
-
-```js
-request.get('http://some.server.com/').auth('username', 'password', false);
-// or
-request.get('http://some.server.com/', {
- 'auth': {
- 'user': 'username',
- 'pass': 'password',
- 'sendImmediately': false
- }
-});
-// or
-request.get('http://some.server.com/').auth(null, null, true, 'bearerToken');
-// or
-request.get('http://some.server.com/', {
- 'auth': {
- 'bearer': 'bearerToken'
- }
-});
-```
-
-If passed as an option, `auth` should be a hash containing values:
-
-- `user` || `username`
-- `pass` || `password`
-- `sendImmediately` (optional)
-- `bearer` (optional)
-
-The method form takes parameters
-`auth(username, password, sendImmediately, bearer)`.
-
-`sendImmediately` defaults to `true`, which causes a basic or bearer
-authentication header to be sent. If `sendImmediately` is `false`, then
-`request` will retry with a proper authentication header after receiving a
-`401` response from the server (which must contain a `WWW-Authenticate` header
-indicating the required authentication method).
-
-Note that you can also specify basic authentication using the URL itself, as
-detailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the
-`user:password` before the host with an `@` sign:
-
-```js
-const username = 'username',
- password = 'password',
- url = 'http://' + username + ':' + password + '@some.server.com';
-
-request({url}, function (error, response, body) {
- // Do more stuff with 'body' here
-});
-```
-
-Digest authentication is supported, but it only works with `sendImmediately`
-set to `false`; otherwise `request` will send basic authentication on the
-initial request, which will probably cause the request to fail.
-
-Bearer authentication is supported, and is activated when the `bearer` value is
-available. The value may be either a `String` or a `Function` returning a
-`String`. Using a function to supply the bearer token is particularly useful if
-used in conjunction with `defaults` to allow a single function to supply the
-last known token at the time of sending a request, or to compute one on the fly.
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## Custom HTTP Headers
-
-HTTP Headers, such as `User-Agent`, can be set in the `options` object.
-In the example below, we call the github API to find out the number
-of stars and forks for the request repository. This requires a
-custom `User-Agent` header as well as https.
-
-```js
-const request = require('request');
-
-const options = {
- url: 'https://api.github.com/repos/request/request',
- headers: {
- 'User-Agent': 'request'
- }
-};
-
-function callback(error, response, body) {
- if (!error && response.statusCode == 200) {
- const info = JSON.parse(body);
- console.log(info.stargazers_count + " Stars");
- console.log(info.forks_count + " Forks");
- }
-}
-
-request(options, callback);
-```
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## OAuth Signing
-
-[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The
-default signing algorithm is
-[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2):
-
-```js
-// OAuth1.0 - 3-legged server side flow (Twitter example)
-// step 1
-const qs = require('querystring')
- , oauth =
- { callback: 'http://mysite.com/callback/'
- , consumer_key: CONSUMER_KEY
- , consumer_secret: CONSUMER_SECRET
- }
- , url = 'https://api.twitter.com/oauth/request_token'
- ;
-request.post({url:url, oauth:oauth}, function (e, r, body) {
- // Ideally, you would take the body in the response
- // and construct a URL that a user clicks on (like a sign in button).
- // The verifier is only available in the response after a user has
- // verified with twitter that they are authorizing your app.
-
- // step 2
- const req_data = qs.parse(body)
- const uri = 'https://api.twitter.com/oauth/authenticate'
- + '?' + qs.stringify({oauth_token: req_data.oauth_token})
- // redirect the user to the authorize uri
-
- // step 3
- // after the user is redirected back to your server
- const auth_data = qs.parse(body)
- , oauth =
- { consumer_key: CONSUMER_KEY
- , consumer_secret: CONSUMER_SECRET
- , token: auth_data.oauth_token
- , token_secret: req_data.oauth_token_secret
- , verifier: auth_data.oauth_verifier
- }
- , url = 'https://api.twitter.com/oauth/access_token'
- ;
- request.post({url:url, oauth:oauth}, function (e, r, body) {
- // ready to make signed requests on behalf of the user
- const perm_data = qs.parse(body)
- , oauth =
- { consumer_key: CONSUMER_KEY
- , consumer_secret: CONSUMER_SECRET
- , token: perm_data.oauth_token
- , token_secret: perm_data.oauth_token_secret
- }
- , url = 'https://api.twitter.com/1.1/users/show.json'
- , qs =
- { screen_name: perm_data.screen_name
- , user_id: perm_data.user_id
- }
- ;
- request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) {
- console.log(user)
- })
- })
-})
-```
-
-For [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make
-the following changes to the OAuth options object:
-* Pass `signature_method : 'RSA-SHA1'`
-* Instead of `consumer_secret`, specify a `private_key` string in
- [PEM format](http://how2ssl.com/articles/working_with_pem_files/)
-
-For [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make
-the following changes to the OAuth options object:
-* Pass `signature_method : 'PLAINTEXT'`
-
-To send OAuth parameters via query params or in a post body as described in The
-[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param)
-section of the oauth1 spec:
-* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth
- options object.
-* `transport_method` defaults to `'header'`
-
-To use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either
-* Manually generate the body hash and pass it as a string `body_hash: '...'`
-* Automatically generate the body hash by passing `body_hash: true`
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## Proxies
-
-If you specify a `proxy` option, then the request (and any subsequent
-redirects) will be sent via a connection to the proxy server.
-
-If your endpoint is an `https` url, and you are using a proxy, then
-request will send a `CONNECT` request to the proxy server *first*, and
-then use the supplied connection to connect to the endpoint.
-
-That is, first it will make a request like:
-
-```
-HTTP/1.1 CONNECT endpoint-server.com:80
-Host: proxy-server.com
-User-Agent: whatever user agent you specify
-```
-
-and then the proxy server make a TCP connection to `endpoint-server`
-on port `80`, and return a response that looks like:
-
-```
-HTTP/1.1 200 OK
-```
-
-At this point, the connection is left open, and the client is
-communicating directly with the `endpoint-server.com` machine.
-
-See [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel)
-for more information.
-
-By default, when proxying `http` traffic, request will simply make a
-standard proxied `http` request. This is done by making the `url`
-section of the initial line of the request a fully qualified url to
-the endpoint.
-
-For example, it will make a single request that looks like:
-
-```
-HTTP/1.1 GET http://endpoint-server.com/some-url
-Host: proxy-server.com
-Other-Headers: all go here
-
-request body or whatever
-```
-
-Because a pure "http over http" tunnel offers no additional security
-or other features, it is generally simpler to go with a
-straightforward HTTP proxy in this case. However, if you would like
-to force a tunneling proxy, you may set the `tunnel` option to `true`.
-
-You can also make a standard proxied `http` request by explicitly setting
-`tunnel : false`, but **note that this will allow the proxy to see the traffic
-to/from the destination server**.
-
-If you are using a tunneling proxy, you may set the
-`proxyHeaderWhiteList` to share certain headers with the proxy.
-
-You can also set the `proxyHeaderExclusiveList` to share certain
-headers only with the proxy and not with destination host.
-
-By default, this set is:
-
-```
-accept
-accept-charset
-accept-encoding
-accept-language
-accept-ranges
-cache-control
-content-encoding
-content-language
-content-length
-content-location
-content-md5
-content-range
-content-type
-connection
-date
-expect
-max-forwards
-pragma
-proxy-authorization
-referer
-te
-transfer-encoding
-user-agent
-via
-```
-
-Note that, when using a tunneling proxy, the `proxy-authorization`
-header and any headers from custom `proxyHeaderExclusiveList` are
-*never* sent to the endpoint server, but only to the proxy server.
-
-
-### Controlling proxy behaviour using environment variables
-
-The following environment variables are respected by `request`:
-
- * `HTTP_PROXY` / `http_proxy`
- * `HTTPS_PROXY` / `https_proxy`
- * `NO_PROXY` / `no_proxy`
-
-When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request.
-
-`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables.
-
-Here's some examples of valid `no_proxy` values:
-
- * `google.com` - don't proxy HTTP/HTTPS requests to Google.
- * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google.
- * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo!
- * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether.
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## UNIX Domain Sockets
-
-`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme:
-
-```js
-/* Pattern */ 'http://unix:SOCKET:PATH'
-/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path')
-```
-
-Note: The `SOCKET` path is assumed to be absolute to the root of the host file system.
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## TLS/SSL Protocol
-
-TLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be
-set directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent).
-
-```js
-const fs = require('fs')
- , path = require('path')
- , certFile = path.resolve(__dirname, 'ssl/client.crt')
- , keyFile = path.resolve(__dirname, 'ssl/client.key')
- , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem')
- , request = require('request');
-
-const options = {
- url: 'https://api.some-server.com/',
- cert: fs.readFileSync(certFile),
- key: fs.readFileSync(keyFile),
- passphrase: 'password',
- ca: fs.readFileSync(caFile)
-};
-
-request.get(options);
-```
-
-### Using `options.agentOptions`
-
-In the example below, we call an API that requires client side SSL certificate
-(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol:
-
-```js
-const fs = require('fs')
- , path = require('path')
- , certFile = path.resolve(__dirname, 'ssl/client.crt')
- , keyFile = path.resolve(__dirname, 'ssl/client.key')
- , request = require('request');
-
-const options = {
- url: 'https://api.some-server.com/',
- agentOptions: {
- cert: fs.readFileSync(certFile),
- key: fs.readFileSync(keyFile),
- // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format:
- // pfx: fs.readFileSync(pfxFilePath),
- passphrase: 'password',
- securityOptions: 'SSL_OP_NO_SSLv3'
- }
-};
-
-request.get(options);
-```
-
-It is able to force using SSLv3 only by specifying `secureProtocol`:
-
-```js
-request.get({
- url: 'https://api.some-server.com/',
- agentOptions: {
- secureProtocol: 'SSLv3_method'
- }
-});
-```
-
-It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs).
-This can be useful, for example, when using self-signed certificates.
-To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`.
-The certificate the domain presents must be signed by the root certificate specified:
-
-```js
-request.get({
- url: 'https://api.some-server.com/',
- agentOptions: {
- ca: fs.readFileSync('ca.cert.pem')
- }
-});
-```
-
-The `ca` value can be an array of certificates, in the event you have a private or internal corporate public-key infrastructure hierarchy. For example, if you want to connect to https://api.some-server.com which presents a key chain consisting of:
-1. its own public key, which is signed by:
-2. an intermediate "Corp Issuing Server", that is in turn signed by:
-3. a root CA "Corp Root CA";
-
-you can configure your request as follows:
-
-```js
-request.get({
- url: 'https://api.some-server.com/',
- agentOptions: {
- ca: [
- fs.readFileSync('Corp Issuing Server.pem'),
- fs.readFileSync('Corp Root CA.pem')
- ]
- }
-});
-```
-
-[back to top](#table-of-contents)
-
-
----
-
-## Support for HAR 1.2
-
-The `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`.
-
-A validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching.
-
-```js
- const request = require('request')
- request({
- // will be ignored
- method: 'GET',
- uri: 'http://www.google.com',
-
- // HTTP Archive Request Object
- har: {
- url: 'http://www.mockbin.com/har',
- method: 'POST',
- headers: [
- {
- name: 'content-type',
- value: 'application/x-www-form-urlencoded'
- }
- ],
- postData: {
- mimeType: 'application/x-www-form-urlencoded',
- params: [
- {
- name: 'foo',
- value: 'bar'
- },
- {
- name: 'hello',
- value: 'world'
- }
- ]
- }
- }
- })
-
- // a POST request will be sent to http://www.mockbin.com
- // with body an application/x-www-form-urlencoded body:
- // foo=bar&hello=world
-```
-
-[back to top](#table-of-contents)
-
-
----
-
-## request(options, callback)
-
-The first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional.
-
-- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()`
-- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string.
-- `method` - http method (default: `"GET"`)
-- `headers` - http headers (default: `{}`)
-
----
-
-- `qs` - object containing querystring values to be appended to the `uri`
-- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`
-- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat`
-- `useQuerystring` - if true, use `querystring` to stringify and parse
- querystrings, otherwise use `qs` (default: `false`). Set this option to
- `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the
- default `foo[0]=bar&foo[1]=baz`.
-
----
-
-- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object.
-- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above.
-- `formData` - data to pass for a `multipart/form-data` request. See
- [Forms](#forms) section above.
-- `multipart` - array of objects which contain their own headers and `body`
- attributes. Sends a `multipart/related` request. See [Forms](#forms) section
- above.
- - Alternatively you can pass in an object `{chunked: false, data: []}` where
- `chunked` is used to specify whether the request is sent in
- [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding)
- In non-chunked requests, data items with body streams are not allowed.
-- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request.
-- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request.
-- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON.
-- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body.
-- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body.
-
----
-
-- `auth` - a hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above.
-- `oauth` - options for OAuth HMAC-SHA1 signing. See documentation above.
-- `hawk` - options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example).
-- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`, and optionally `session` (note that this only works for services that require session as part of the canonical string). Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. If you are using SigV4, you can also include a `service` property that specifies the service name. **Note:** you need to `npm install aws4` first.
-- `httpSignature` - options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.
-
----
-
-- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise.
-- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`)
-- `followOriginalHttpMethod` - by default we redirect to HTTP method GET. you can enable this property to redirect to the original HTTP method (default: `false`)
-- `maxRedirects` - the maximum number of redirects to follow (default: `10`)
-- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain.
-
----
-
-- `encoding` - encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.)
-- `gzip` - if `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below.
-- `jar` - if `true`, remember cookies for future use (or define your custom cookie jar; see examples section)
-
----
-
-- `agent` - `http(s).Agent` instance to use
-- `agentClass` - alternatively specify your agent's class name
-- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions).
-- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+
-- `pool` - an object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified.
- - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`).
- - Note that if you are sending multiple requests in a loop and creating
- multiple new `pool` objects, `maxSockets` will not work as intended. To
- work around this, either use [`request.defaults`](#requestdefaultsoptions)
- with your pool options or create the pool object with the `maxSockets`
- property outside of the loop.
-- `timeout` - integer containing number of milliseconds, controls two timeouts.
- - **Read timeout**: Time to wait for a server to send response headers (and start the response body) before aborting the request.
- - **Connection timeout**: Sets the socket to timeout after `timeout` milliseconds of inactivity. Note that increasing the timeout beyond the OS-wide TCP connection timeout will not have any effect ([the default in Linux can be anywhere from 20-120 seconds][linux-timeout])
-
-[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout
-
----
-
-- `localAddress` - local interface to bind for network connections.
-- `proxy` - an HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`)
-- `strictSSL` - if `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option.
-- `tunnel` - controls the behavior of
- [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling)
- as follows:
- - `undefined` (default) - `true` if the destination is `https`, `false` otherwise
- - `true` - always tunnel to the destination by making a `CONNECT` request to
- the proxy
- - `false` - request the destination as a `GET` request.
-- `proxyHeaderWhiteList` - a whitelist of headers to send to a
- tunneling proxy.
-- `proxyHeaderExclusiveList` - a whitelist of headers to send
- exclusively to a tunneling proxy and not to destination.
-
----
-
-- `time` - if `true`, the request-response cycle (including all redirects) is timed at millisecond resolution. When set, the following properties are added to the response object:
- - `elapsedTime` Duration of the entire request/response in milliseconds (*deprecated*).
- - `responseStartTime` Timestamp when the response began (in Unix Epoch milliseconds) (*deprecated*).
- - `timingStart` Timestamp of the start of the request (in Unix Epoch milliseconds).
- - `timings` Contains event timestamps in millisecond resolution relative to `timingStart`. If there were redirects, the properties reflect the timings of the final request in the redirect chain:
- - `socket` Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_socket) module's `socket` event fires. This happens when the socket is assigned to the request.
- - `lookup` Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_lookup) module's `lookup` event fires. This happens when the DNS has been resolved.
- - `connect`: Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_connect) module's `connect` event fires. This happens when the server acknowledges the TCP connection.
- - `response`: Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_response) module's `response` event fires. This happens when the first bytes are received from the server.
- - `end`: Relative timestamp when the last bytes of the response are received.
- - `timingPhases` Contains the durations of each request phase. If there were redirects, the properties reflect the timings of the final request in the redirect chain:
- - `wait`: Duration of socket initialization (`timings.socket`)
- - `dns`: Duration of DNS lookup (`timings.lookup` - `timings.socket`)
- - `tcp`: Duration of TCP connection (`timings.connect` - `timings.socket`)
- - `firstByte`: Duration of HTTP server response (`timings.response` - `timings.connect`)
- - `download`: Duration of HTTP download (`timings.end` - `timings.response`)
- - `total`: Duration entire HTTP round-trip (`timings.end`)
-
-- `har` - a [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-12) for details)*
-- `callback` - alternatively pass the request's callback in the options object
-
-The callback argument gets 3 arguments:
-
-1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object)
-2. An [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) object (Response object)
-3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied)
-
-[back to top](#table-of-contents)
-
-
----
-
-## Convenience methods
-
-There are also shorthand methods for different HTTP METHODs and some other conveniences.
-
-
-### request.defaults(options)
-
-This method **returns a wrapper** around the normal request API that defaults
-to whatever options you pass to it.
-
-**Note:** `request.defaults()` **does not** modify the global request API;
-instead, it **returns a wrapper** that has your default settings applied to it.
-
-**Note:** You can call `.defaults()` on the wrapper that is returned from
-`request.defaults` to add/override defaults that were previously defaulted.
-
-For example:
-```js
-//requests using baseRequest() will set the 'x-token' header
-const baseRequest = request.defaults({
- headers: {'x-token': 'my-token'}
-})
-
-//requests using specialRequest() will include the 'x-token' header set in
-//baseRequest and will also include the 'special' header
-const specialRequest = baseRequest.defaults({
- headers: {special: 'special value'}
-})
-```
-
-### request.METHOD()
-
-These HTTP method convenience functions act just like `request()` but with a default method already set for you:
-
-- *request.get()*: Defaults to `method: "GET"`.
-- *request.post()*: Defaults to `method: "POST"`.
-- *request.put()*: Defaults to `method: "PUT"`.
-- *request.patch()*: Defaults to `method: "PATCH"`.
-- *request.del() / request.delete()*: Defaults to `method: "DELETE"`.
-- *request.head()*: Defaults to `method: "HEAD"`.
-- *request.options()*: Defaults to `method: "OPTIONS"`.
-
-### request.cookie()
-
-Function that creates a new cookie.
-
-```js
-request.cookie('key1=value1')
-```
-### request.jar()
-
-Function that creates a new cookie jar.
-
-```js
-request.jar()
-```
-
-### response.caseless.get('header-name')
-
-Function that returns the specified response header field using a [case-insensitive match](https://tools.ietf.org/html/rfc7230#section-3.2)
-
-```js
-request('http://www.google.com', function (error, response, body) {
- // print the Content-Type header even if the server returned it as 'content-type' (lowercase)
- console.log('Content-Type is:', response.caseless.get('Content-Type'));
-});
-```
-
-[back to top](#table-of-contents)
-
-
----
-
-
-## Debugging
-
-There are at least three ways to debug the operation of `request`:
-
-1. Launch the node process like `NODE_DEBUG=request node script.js`
- (`lib,request,otherlib` works too).
-
-2. Set `require('request').debug = true` at any time (this does the same thing
- as #1).
-
-3. Use the [request-debug module](https://github.com/request/request-debug) to
- view request and response headers and bodies.
-
-[back to top](#table-of-contents)
-
-
----
-
-## Timeouts
-
-Most requests to external servers should have a timeout attached, in case the
-server is not responding in a timely manner. Without a timeout, your code may
-have a socket open/consume resources for minutes or more.
-
-There are two main types of timeouts: **connection timeouts** and **read
-timeouts**. A connect timeout occurs if the timeout is hit while your client is
-attempting to establish a connection to a remote machine (corresponding to the
-[connect() call][connect] on the socket). A read timeout occurs any time the
-server is too slow to send back a part of the response.
-
-These two situations have widely different implications for what went wrong
-with the request, so it's useful to be able to distinguish them. You can detect
-timeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you
-can detect whether the timeout was a connection timeout by checking if the
-`err.connect` property is set to `true`.
-
-```js
-request.get('http://10.255.255.1', {timeout: 1500}, function(err) {
- console.log(err.code === 'ETIMEDOUT');
- // Set to `true` if the timeout was a connection timeout, `false` or
- // `undefined` otherwise.
- console.log(err.connect === true);
- process.exit(0);
-});
-```
-
-[connect]: http://linux.die.net/man/2/connect
-
-## Examples:
-
-```js
- const request = require('request')
- , rand = Math.floor(Math.random()*100000000).toString()
- ;
- request(
- { method: 'PUT'
- , uri: 'http://mikeal.iriscouch.com/testjs/' + rand
- , multipart:
- [ { 'content-type': 'application/json'
- , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
- }
- , { body: 'I am an attachment' }
- ]
- }
- , function (error, response, body) {
- if(response.statusCode == 201){
- console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)
- } else {
- console.log('error: '+ response.statusCode)
- console.log(body)
- }
- }
- )
-```
-
-For backwards-compatibility, response compression is not supported by default.
-To accept gzip-compressed responses, set the `gzip` option to `true`. Note
-that the body data passed through `request` is automatically decompressed
-while the response object is unmodified and will contain compressed data if
-the server sent a compressed response.
-
-```js
- const request = require('request')
- request(
- { method: 'GET'
- , uri: 'http://www.google.com'
- , gzip: true
- }
- , function (error, response, body) {
- // body is the decompressed response body
- console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
- console.log('the decoded data is: ' + body)
- }
- )
- .on('data', function(data) {
- // decompressed data as it is received
- console.log('decoded chunk: ' + data)
- })
- .on('response', function(response) {
- // unmodified http.IncomingMessage object
- response.on('data', function(data) {
- // compressed data as it is received
- console.log('received ' + data.length + ' bytes of compressed data')
- })
- })
-```
-
-Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`).
-
-```js
-const request = request.defaults({jar: true})
-request('http://www.google.com', function () {
- request('http://images.google.com')
-})
-```
-
-To use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`)
-
-```js
-const j = request.jar()
-const request = request.defaults({jar:j})
-request('http://www.google.com', function () {
- request('http://images.google.com')
-})
-```
-
-OR
-
-```js
-const j = request.jar();
-const cookie = request.cookie('key1=value1');
-const url = 'http://www.google.com';
-j.setCookie(cookie, url);
-request({url: url, jar: j}, function () {
- request('http://images.google.com')
-})
-```
-
-To use a custom cookie store (such as a
-[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore)
-which supports saving to and restoring from JSON files), pass it as a parameter
-to `request.jar()`:
-
-```js
-const FileCookieStore = require('tough-cookie-filestore');
-// NOTE - currently the 'cookies.json' file must already exist!
-const j = request.jar(new FileCookieStore('cookies.json'));
-request = request.defaults({ jar : j })
-request('http://www.google.com', function() {
- request('http://images.google.com')
-})
-```
-
-The cookie store must be a
-[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie)
-store and it must support synchronous operations; see the
-[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#api)
-for details.
-
-To inspect your cookie jar after a request:
-
-```js
-const j = request.jar()
-request({url: 'http://www.google.com', jar: j}, function () {
- const cookie_string = j.getCookieString(url); // "key1=value1; key2=value2; ..."
- const cookies = j.getCookies(url);
- // [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...]
-})
-```
-
-[back to top](#table-of-contents)
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/index.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/index.js
deleted file mode 100644
index d50f9917b..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/index.js
+++ /dev/null
@@ -1,155 +0,0 @@
-// Copyright 2010-2012 Mikeal Rogers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-'use strict'
-
-var extend = require('extend')
-var cookies = require('./lib/cookies')
-var helpers = require('./lib/helpers')
-
-var paramsHaveRequestBody = helpers.paramsHaveRequestBody
-
-// organize params for patch, post, put, head, del
-function initParams (uri, options, callback) {
- if (typeof options === 'function') {
- callback = options
- }
-
- var params = {}
- if (options !== null && typeof options === 'object') {
- extend(params, options, {uri: uri})
- } else if (typeof uri === 'string') {
- extend(params, {uri: uri})
- } else {
- extend(params, uri)
- }
-
- params.callback = callback || params.callback
- return params
-}
-
-function request (uri, options, callback) {
- if (typeof uri === 'undefined') {
- throw new Error('undefined is not a valid uri or options object.')
- }
-
- var params = initParams(uri, options, callback)
-
- if (params.method === 'HEAD' && paramsHaveRequestBody(params)) {
- throw new Error('HTTP HEAD requests MUST NOT include a request body.')
- }
-
- return new request.Request(params)
-}
-
-function verbFunc (verb) {
- var method = verb.toUpperCase()
- return function (uri, options, callback) {
- var params = initParams(uri, options, callback)
- params.method = method
- return request(params, params.callback)
- }
-}
-
-// define like this to please codeintel/intellisense IDEs
-request.get = verbFunc('get')
-request.head = verbFunc('head')
-request.options = verbFunc('options')
-request.post = verbFunc('post')
-request.put = verbFunc('put')
-request.patch = verbFunc('patch')
-request.del = verbFunc('delete')
-request['delete'] = verbFunc('delete')
-
-request.jar = function (store) {
- return cookies.jar(store)
-}
-
-request.cookie = function (str) {
- return cookies.parse(str)
-}
-
-function wrapRequestMethod (method, options, requester, verb) {
- return function (uri, opts, callback) {
- var params = initParams(uri, opts, callback)
-
- var target = {}
- extend(true, target, options, params)
-
- target.pool = params.pool || options.pool
-
- if (verb) {
- target.method = verb.toUpperCase()
- }
-
- if (typeof requester === 'function') {
- method = requester
- }
-
- return method(target, target.callback)
- }
-}
-
-request.defaults = function (options, requester) {
- var self = this
-
- options = options || {}
-
- if (typeof options === 'function') {
- requester = options
- options = {}
- }
-
- var defaults = wrapRequestMethod(self, options, requester)
-
- var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete']
- verbs.forEach(function (verb) {
- defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb)
- })
-
- defaults.cookie = wrapRequestMethod(self.cookie, options, requester)
- defaults.jar = self.jar
- defaults.defaults = self.defaults
- return defaults
-}
-
-request.forever = function (agentOptions, optionsArg) {
- var options = {}
- if (optionsArg) {
- extend(options, optionsArg)
- }
- if (agentOptions) {
- options.agentOptions = agentOptions
- }
-
- options.forever = true
- return request.defaults(options)
-}
-
-// Exports
-
-module.exports = request
-request.Request = require('./request')
-request.initParams = initParams
-
-// Backwards compatibility for request.debug
-Object.defineProperty(request, 'debug', {
- enumerable: true,
- get: function () {
- return request.Request.debug
- },
- set: function (debug) {
- request.Request.debug = debug
- }
-})
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/auth.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/auth.js
deleted file mode 100644
index 02f203869..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/auth.js
+++ /dev/null
@@ -1,167 +0,0 @@
-'use strict'
-
-var caseless = require('caseless')
-var uuid = require('uuid/v4')
-var helpers = require('./helpers')
-
-var md5 = helpers.md5
-var toBase64 = helpers.toBase64
-
-function Auth (request) {
- // define all public properties here
- this.request = request
- this.hasAuth = false
- this.sentAuth = false
- this.bearerToken = null
- this.user = null
- this.pass = null
-}
-
-Auth.prototype.basic = function (user, pass, sendImmediately) {
- var self = this
- if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) {
- self.request.emit('error', new Error('auth() received invalid user or password'))
- }
- self.user = user
- self.pass = pass
- self.hasAuth = true
- var header = user + ':' + (pass || '')
- if (sendImmediately || typeof sendImmediately === 'undefined') {
- var authHeader = 'Basic ' + toBase64(header)
- self.sentAuth = true
- return authHeader
- }
-}
-
-Auth.prototype.bearer = function (bearer, sendImmediately) {
- var self = this
- self.bearerToken = bearer
- self.hasAuth = true
- if (sendImmediately || typeof sendImmediately === 'undefined') {
- if (typeof bearer === 'function') {
- bearer = bearer()
- }
- var authHeader = 'Bearer ' + (bearer || '')
- self.sentAuth = true
- return authHeader
- }
-}
-
-Auth.prototype.digest = function (method, path, authHeader) {
- // TODO: More complete implementation of RFC 2617.
- // - handle challenge.domain
- // - support qop="auth-int" only
- // - handle Authentication-Info (not necessarily?)
- // - check challenge.stale (not necessarily?)
- // - increase nc (not necessarily?)
- // For reference:
- // http://tools.ietf.org/html/rfc2617#section-3
- // https://github.com/bagder/curl/blob/master/lib/http_digest.c
-
- var self = this
-
- var challenge = {}
- var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi
- while (true) {
- var match = re.exec(authHeader)
- if (!match) {
- break
- }
- challenge[match[1]] = match[2] || match[3]
- }
-
- /**
- * RFC 2617: handle both MD5 and MD5-sess algorithms.
- *
- * If the algorithm directive's value is "MD5" or unspecified, then HA1 is
- * HA1=MD5(username:realm:password)
- * If the algorithm directive's value is "MD5-sess", then HA1 is
- * HA1=MD5(MD5(username:realm:password):nonce:cnonce)
- */
- var ha1Compute = function (algorithm, user, realm, pass, nonce, cnonce) {
- var ha1 = md5(user + ':' + realm + ':' + pass)
- if (algorithm && algorithm.toLowerCase() === 'md5-sess') {
- return md5(ha1 + ':' + nonce + ':' + cnonce)
- } else {
- return ha1
- }
- }
-
- var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth'
- var nc = qop && '00000001'
- var cnonce = qop && uuid().replace(/-/g, '')
- var ha1 = ha1Compute(challenge.algorithm, self.user, challenge.realm, self.pass, challenge.nonce, cnonce)
- var ha2 = md5(method + ':' + path)
- var digestResponse = qop
- ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2)
- : md5(ha1 + ':' + challenge.nonce + ':' + ha2)
- var authValues = {
- username: self.user,
- realm: challenge.realm,
- nonce: challenge.nonce,
- uri: path,
- qop: qop,
- response: digestResponse,
- nc: nc,
- cnonce: cnonce,
- algorithm: challenge.algorithm,
- opaque: challenge.opaque
- }
-
- authHeader = []
- for (var k in authValues) {
- if (authValues[k]) {
- if (k === 'qop' || k === 'nc' || k === 'algorithm') {
- authHeader.push(k + '=' + authValues[k])
- } else {
- authHeader.push(k + '="' + authValues[k] + '"')
- }
- }
- }
- authHeader = 'Digest ' + authHeader.join(', ')
- self.sentAuth = true
- return authHeader
-}
-
-Auth.prototype.onRequest = function (user, pass, sendImmediately, bearer) {
- var self = this
- var request = self.request
-
- var authHeader
- if (bearer === undefined && user === undefined) {
- self.request.emit('error', new Error('no auth mechanism defined'))
- } else if (bearer !== undefined) {
- authHeader = self.bearer(bearer, sendImmediately)
- } else {
- authHeader = self.basic(user, pass, sendImmediately)
- }
- if (authHeader) {
- request.setHeader('authorization', authHeader)
- }
-}
-
-Auth.prototype.onResponse = function (response) {
- var self = this
- var request = self.request
-
- if (!self.hasAuth || self.sentAuth) { return null }
-
- var c = caseless(response.headers)
-
- var authHeader = c.get('www-authenticate')
- var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase()
- request.debug('reauth', authVerb)
-
- switch (authVerb) {
- case 'basic':
- return self.basic(self.user, self.pass, true)
-
- case 'bearer':
- return self.bearer(self.bearerToken, true)
-
- case 'digest':
- return self.digest(request.method, request.path, authHeader)
- }
-}
-
-exports.Auth = Auth
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/cookies.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/cookies.js
deleted file mode 100644
index bd5d46bea..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/cookies.js
+++ /dev/null
@@ -1,38 +0,0 @@
-'use strict'
-
-var tough = require('tough-cookie')
-
-var Cookie = tough.Cookie
-var CookieJar = tough.CookieJar
-
-exports.parse = function (str) {
- if (str && str.uri) {
- str = str.uri
- }
- if (typeof str !== 'string') {
- throw new Error('The cookie function only accepts STRING as param')
- }
- return Cookie.parse(str, {loose: true})
-}
-
-// Adapt the sometimes-Async api of tough.CookieJar to our requirements
-function RequestJar (store) {
- var self = this
- self._jar = new CookieJar(store, {looseMode: true})
-}
-RequestJar.prototype.setCookie = function (cookieOrStr, uri, options) {
- var self = this
- return self._jar.setCookieSync(cookieOrStr, uri, options || {})
-}
-RequestJar.prototype.getCookieString = function (uri) {
- var self = this
- return self._jar.getCookieStringSync(uri)
-}
-RequestJar.prototype.getCookies = function (uri) {
- var self = this
- return self._jar.getCookiesSync(uri)
-}
-
-exports.jar = function (store) {
- return new RequestJar(store)
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/getProxyFromURI.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/getProxyFromURI.js
deleted file mode 100644
index 0b9b18e5a..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/getProxyFromURI.js
+++ /dev/null
@@ -1,79 +0,0 @@
-'use strict'
-
-function formatHostname (hostname) {
- // canonicalize the hostname, so that 'oogle.com' won't match 'google.com'
- return hostname.replace(/^\.*/, '.').toLowerCase()
-}
-
-function parseNoProxyZone (zone) {
- zone = zone.trim().toLowerCase()
-
- var zoneParts = zone.split(':', 2)
- var zoneHost = formatHostname(zoneParts[0])
- var zonePort = zoneParts[1]
- var hasPort = zone.indexOf(':') > -1
-
- return {hostname: zoneHost, port: zonePort, hasPort: hasPort}
-}
-
-function uriInNoProxy (uri, noProxy) {
- var port = uri.port || (uri.protocol === 'https:' ? '443' : '80')
- var hostname = formatHostname(uri.hostname)
- var noProxyList = noProxy.split(',')
-
- // iterate through the noProxyList until it finds a match.
- return noProxyList.map(parseNoProxyZone).some(function (noProxyZone) {
- var isMatchedAt = hostname.indexOf(noProxyZone.hostname)
- var hostnameMatched = (
- isMatchedAt > -1 &&
- (isMatchedAt === hostname.length - noProxyZone.hostname.length)
- )
-
- if (noProxyZone.hasPort) {
- return (port === noProxyZone.port) && hostnameMatched
- }
-
- return hostnameMatched
- })
-}
-
-function getProxyFromURI (uri) {
- // Decide the proper request proxy to use based on the request URI object and the
- // environmental variables (NO_PROXY, HTTP_PROXY, etc.)
- // respect NO_PROXY environment variables (see: https://lynx.invisible-island.net/lynx2.8.7/breakout/lynx_help/keystrokes/environments.html)
-
- var noProxy = process.env.NO_PROXY || process.env.no_proxy || ''
-
- // if the noProxy is a wildcard then return null
-
- if (noProxy === '*') {
- return null
- }
-
- // if the noProxy is not empty and the uri is found return null
-
- if (noProxy !== '' && uriInNoProxy(uri, noProxy)) {
- return null
- }
-
- // Check for HTTP or HTTPS Proxy in environment Else default to null
-
- if (uri.protocol === 'http:') {
- return process.env.HTTP_PROXY ||
- process.env.http_proxy || null
- }
-
- if (uri.protocol === 'https:') {
- return process.env.HTTPS_PROXY ||
- process.env.https_proxy ||
- process.env.HTTP_PROXY ||
- process.env.http_proxy || null
- }
-
- // if none of that works, return null
- // (What uri protocol are you using then?)
-
- return null
-}
-
-module.exports = getProxyFromURI
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/har.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/har.js
deleted file mode 100644
index 0dedee444..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/har.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-var fs = require('fs')
-var qs = require('querystring')
-var validate = require('har-validator')
-var extend = require('extend')
-
-function Har (request) {
- this.request = request
-}
-
-Har.prototype.reducer = function (obj, pair) {
- // new property ?
- if (obj[pair.name] === undefined) {
- obj[pair.name] = pair.value
- return obj
- }
-
- // existing? convert to array
- var arr = [
- obj[pair.name],
- pair.value
- ]
-
- obj[pair.name] = arr
-
- return obj
-}
-
-Har.prototype.prep = function (data) {
- // construct utility properties
- data.queryObj = {}
- data.headersObj = {}
- data.postData.jsonObj = false
- data.postData.paramsObj = false
-
- // construct query objects
- if (data.queryString && data.queryString.length) {
- data.queryObj = data.queryString.reduce(this.reducer, {})
- }
-
- // construct headers objects
- if (data.headers && data.headers.length) {
- // loweCase header keys
- data.headersObj = data.headers.reduceRight(function (headers, header) {
- headers[header.name] = header.value
- return headers
- }, {})
- }
-
- // construct Cookie header
- if (data.cookies && data.cookies.length) {
- var cookies = data.cookies.map(function (cookie) {
- return cookie.name + '=' + cookie.value
- })
-
- if (cookies.length) {
- data.headersObj.cookie = cookies.join('; ')
- }
- }
-
- // prep body
- function some (arr) {
- return arr.some(function (type) {
- return data.postData.mimeType.indexOf(type) === 0
- })
- }
-
- if (some([
- 'multipart/mixed',
- 'multipart/related',
- 'multipart/form-data',
- 'multipart/alternative'])) {
- // reset values
- data.postData.mimeType = 'multipart/form-data'
- } else if (some([
- 'application/x-www-form-urlencoded'])) {
- if (!data.postData.params) {
- data.postData.text = ''
- } else {
- data.postData.paramsObj = data.postData.params.reduce(this.reducer, {})
-
- // always overwrite
- data.postData.text = qs.stringify(data.postData.paramsObj)
- }
- } else if (some([
- 'text/json',
- 'text/x-json',
- 'application/json',
- 'application/x-json'])) {
- data.postData.mimeType = 'application/json'
-
- if (data.postData.text) {
- try {
- data.postData.jsonObj = JSON.parse(data.postData.text)
- } catch (e) {
- this.request.debug(e)
-
- // force back to text/plain
- data.postData.mimeType = 'text/plain'
- }
- }
- }
-
- return data
-}
-
-Har.prototype.options = function (options) {
- // skip if no har property defined
- if (!options.har) {
- return options
- }
-
- var har = {}
- extend(har, options.har)
-
- // only process the first entry
- if (har.log && har.log.entries) {
- har = har.log.entries[0]
- }
-
- // add optional properties to make validation successful
- har.url = har.url || options.url || options.uri || options.baseUrl || '/'
- har.httpVersion = har.httpVersion || 'HTTP/1.1'
- har.queryString = har.queryString || []
- har.headers = har.headers || []
- har.cookies = har.cookies || []
- har.postData = har.postData || {}
- har.postData.mimeType = har.postData.mimeType || 'application/octet-stream'
-
- har.bodySize = 0
- har.headersSize = 0
- har.postData.size = 0
-
- if (!validate.request(har)) {
- return options
- }
-
- // clean up and get some utility properties
- var req = this.prep(har)
-
- // construct new options
- if (req.url) {
- options.url = req.url
- }
-
- if (req.method) {
- options.method = req.method
- }
-
- if (Object.keys(req.queryObj).length) {
- options.qs = req.queryObj
- }
-
- if (Object.keys(req.headersObj).length) {
- options.headers = req.headersObj
- }
-
- function test (type) {
- return req.postData.mimeType.indexOf(type) === 0
- }
- if (test('application/x-www-form-urlencoded')) {
- options.form = req.postData.paramsObj
- } else if (test('application/json')) {
- if (req.postData.jsonObj) {
- options.body = req.postData.jsonObj
- options.json = true
- }
- } else if (test('multipart/form-data')) {
- options.formData = {}
-
- req.postData.params.forEach(function (param) {
- var attachment = {}
-
- if (!param.fileName && !param.contentType) {
- options.formData[param.name] = param.value
- return
- }
-
- // attempt to read from disk!
- if (param.fileName && !param.value) {
- attachment.value = fs.createReadStream(param.fileName)
- } else if (param.value) {
- attachment.value = param.value
- }
-
- if (param.fileName) {
- attachment.options = {
- filename: param.fileName,
- contentType: param.contentType ? param.contentType : null
- }
- }
-
- options.formData[param.name] = attachment
- })
- } else {
- if (req.postData.text) {
- options.body = req.postData.text
- }
- }
-
- return options
-}
-
-exports.Har = Har
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/hawk.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/hawk.js
deleted file mode 100644
index de48a9851..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/hawk.js
+++ /dev/null
@@ -1,89 +0,0 @@
-'use strict'
-
-var crypto = require('crypto')
-
-function randomString (size) {
- var bits = (size + 1) * 6
- var buffer = crypto.randomBytes(Math.ceil(bits / 8))
- var string = buffer.toString('base64').replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '')
- return string.slice(0, size)
-}
-
-function calculatePayloadHash (payload, algorithm, contentType) {
- var hash = crypto.createHash(algorithm)
- hash.update('hawk.1.payload\n')
- hash.update((contentType ? contentType.split(';')[0].trim().toLowerCase() : '') + '\n')
- hash.update(payload || '')
- hash.update('\n')
- return hash.digest('base64')
-}
-
-exports.calculateMac = function (credentials, opts) {
- var normalized = 'hawk.1.header\n' +
- opts.ts + '\n' +
- opts.nonce + '\n' +
- (opts.method || '').toUpperCase() + '\n' +
- opts.resource + '\n' +
- opts.host.toLowerCase() + '\n' +
- opts.port + '\n' +
- (opts.hash || '') + '\n'
-
- if (opts.ext) {
- normalized = normalized + opts.ext.replace('\\', '\\\\').replace('\n', '\\n')
- }
-
- normalized = normalized + '\n'
-
- if (opts.app) {
- normalized = normalized + opts.app + '\n' + (opts.dlg || '') + '\n'
- }
-
- var hmac = crypto.createHmac(credentials.algorithm, credentials.key).update(normalized)
- var digest = hmac.digest('base64')
- return digest
-}
-
-exports.header = function (uri, method, opts) {
- var timestamp = opts.timestamp || Math.floor((Date.now() + (opts.localtimeOffsetMsec || 0)) / 1000)
- var credentials = opts.credentials
- if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) {
- return ''
- }
-
- if (['sha1', 'sha256'].indexOf(credentials.algorithm) === -1) {
- return ''
- }
-
- var artifacts = {
- ts: timestamp,
- nonce: opts.nonce || randomString(6),
- method: method,
- resource: uri.pathname + (uri.search || ''),
- host: uri.hostname,
- port: uri.port || (uri.protocol === 'http:' ? 80 : 443),
- hash: opts.hash,
- ext: opts.ext,
- app: opts.app,
- dlg: opts.dlg
- }
-
- if (!artifacts.hash && (opts.payload || opts.payload === '')) {
- artifacts.hash = calculatePayloadHash(opts.payload, credentials.algorithm, opts.contentType)
- }
-
- var mac = exports.calculateMac(credentials, artifacts)
-
- var hasExt = artifacts.ext !== null && artifacts.ext !== undefined && artifacts.ext !== ''
- var header = 'Hawk id="' + credentials.id +
- '", ts="' + artifacts.ts +
- '", nonce="' + artifacts.nonce +
- (artifacts.hash ? '", hash="' + artifacts.hash : '') +
- (hasExt ? '", ext="' + artifacts.ext.replace(/\\/g, '\\\\').replace(/"/g, '\\"') : '') +
- '", mac="' + mac + '"'
-
- if (artifacts.app) {
- header = header + ', app="' + artifacts.app + (artifacts.dlg ? '", dlg="' + artifacts.dlg : '') + '"'
- }
-
- return header
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/helpers.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/helpers.js
deleted file mode 100644
index 8b2a7e6eb..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/helpers.js
+++ /dev/null
@@ -1,66 +0,0 @@
-'use strict'
-
-var jsonSafeStringify = require('json-stringify-safe')
-var crypto = require('crypto')
-var Buffer = require('safe-buffer').Buffer
-
-var defer = typeof setImmediate === 'undefined'
- ? process.nextTick
- : setImmediate
-
-function paramsHaveRequestBody (params) {
- return (
- params.body ||
- params.requestBodyStream ||
- (params.json && typeof params.json !== 'boolean') ||
- params.multipart
- )
-}
-
-function safeStringify (obj, replacer) {
- var ret
- try {
- ret = JSON.stringify(obj, replacer)
- } catch (e) {
- ret = jsonSafeStringify(obj, replacer)
- }
- return ret
-}
-
-function md5 (str) {
- return crypto.createHash('md5').update(str).digest('hex')
-}
-
-function isReadStream (rs) {
- return rs.readable && rs.path && rs.mode
-}
-
-function toBase64 (str) {
- return Buffer.from(str || '', 'utf8').toString('base64')
-}
-
-function copy (obj) {
- var o = {}
- Object.keys(obj).forEach(function (i) {
- o[i] = obj[i]
- })
- return o
-}
-
-function version () {
- var numbers = process.version.replace('v', '').split('.')
- return {
- major: parseInt(numbers[0], 10),
- minor: parseInt(numbers[1], 10),
- patch: parseInt(numbers[2], 10)
- }
-}
-
-exports.paramsHaveRequestBody = paramsHaveRequestBody
-exports.safeStringify = safeStringify
-exports.md5 = md5
-exports.isReadStream = isReadStream
-exports.toBase64 = toBase64
-exports.copy = copy
-exports.version = version
-exports.defer = defer
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/multipart.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/multipart.js
deleted file mode 100644
index 6a009bc13..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/multipart.js
+++ /dev/null
@@ -1,112 +0,0 @@
-'use strict'
-
-var uuid = require('uuid/v4')
-var CombinedStream = require('combined-stream')
-var isstream = require('isstream')
-var Buffer = require('safe-buffer').Buffer
-
-function Multipart (request) {
- this.request = request
- this.boundary = uuid()
- this.chunked = false
- this.body = null
-}
-
-Multipart.prototype.isChunked = function (options) {
- var self = this
- var chunked = false
- var parts = options.data || options
-
- if (!parts.forEach) {
- self.request.emit('error', new Error('Argument error, options.multipart.'))
- }
-
- if (options.chunked !== undefined) {
- chunked = options.chunked
- }
-
- if (self.request.getHeader('transfer-encoding') === 'chunked') {
- chunked = true
- }
-
- if (!chunked) {
- parts.forEach(function (part) {
- if (typeof part.body === 'undefined') {
- self.request.emit('error', new Error('Body attribute missing in multipart.'))
- }
- if (isstream(part.body)) {
- chunked = true
- }
- })
- }
-
- return chunked
-}
-
-Multipart.prototype.setHeaders = function (chunked) {
- var self = this
-
- if (chunked && !self.request.hasHeader('transfer-encoding')) {
- self.request.setHeader('transfer-encoding', 'chunked')
- }
-
- var header = self.request.getHeader('content-type')
-
- if (!header || header.indexOf('multipart') === -1) {
- self.request.setHeader('content-type', 'multipart/related; boundary=' + self.boundary)
- } else {
- if (header.indexOf('boundary') !== -1) {
- self.boundary = header.replace(/.*boundary=([^\s;]+).*/, '$1')
- } else {
- self.request.setHeader('content-type', header + '; boundary=' + self.boundary)
- }
- }
-}
-
-Multipart.prototype.build = function (parts, chunked) {
- var self = this
- var body = chunked ? new CombinedStream() : []
-
- function add (part) {
- if (typeof part === 'number') {
- part = part.toString()
- }
- return chunked ? body.append(part) : body.push(Buffer.from(part))
- }
-
- if (self.request.preambleCRLF) {
- add('\r\n')
- }
-
- parts.forEach(function (part) {
- var preamble = '--' + self.boundary + '\r\n'
- Object.keys(part).forEach(function (key) {
- if (key === 'body') { return }
- preamble += key + ': ' + part[key] + '\r\n'
- })
- preamble += '\r\n'
- add(preamble)
- add(part.body)
- add('\r\n')
- })
- add('--' + self.boundary + '--')
-
- if (self.request.postambleCRLF) {
- add('\r\n')
- }
-
- return body
-}
-
-Multipart.prototype.onRequest = function (options) {
- var self = this
-
- var chunked = self.isChunked(options)
- var parts = options.data || options
-
- self.setHeaders(chunked)
- self.chunked = chunked
- self.body = self.build(parts, chunked)
-}
-
-exports.Multipart = Multipart
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/oauth.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/oauth.js
deleted file mode 100644
index 96de72b8e..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/oauth.js
+++ /dev/null
@@ -1,148 +0,0 @@
-'use strict'
-
-var url = require('url')
-var qs = require('qs')
-var caseless = require('caseless')
-var uuid = require('uuid/v4')
-var oauth = require('oauth-sign')
-var crypto = require('crypto')
-var Buffer = require('safe-buffer').Buffer
-
-function OAuth (request) {
- this.request = request
- this.params = null
-}
-
-OAuth.prototype.buildParams = function (_oauth, uri, method, query, form, qsLib) {
- var oa = {}
- for (var i in _oauth) {
- oa['oauth_' + i] = _oauth[i]
- }
- if (!oa.oauth_version) {
- oa.oauth_version = '1.0'
- }
- if (!oa.oauth_timestamp) {
- oa.oauth_timestamp = Math.floor(Date.now() / 1000).toString()
- }
- if (!oa.oauth_nonce) {
- oa.oauth_nonce = uuid().replace(/-/g, '')
- }
- if (!oa.oauth_signature_method) {
- oa.oauth_signature_method = 'HMAC-SHA1'
- }
-
- var consumer_secret_or_private_key = oa.oauth_consumer_secret || oa.oauth_private_key // eslint-disable-line camelcase
- delete oa.oauth_consumer_secret
- delete oa.oauth_private_key
-
- var token_secret = oa.oauth_token_secret // eslint-disable-line camelcase
- delete oa.oauth_token_secret
-
- var realm = oa.oauth_realm
- delete oa.oauth_realm
- delete oa.oauth_transport_method
-
- var baseurl = uri.protocol + '//' + uri.host + uri.pathname
- var params = qsLib.parse([].concat(query, form, qsLib.stringify(oa)).join('&'))
-
- oa.oauth_signature = oauth.sign(
- oa.oauth_signature_method,
- method,
- baseurl,
- params,
- consumer_secret_or_private_key, // eslint-disable-line camelcase
- token_secret // eslint-disable-line camelcase
- )
-
- if (realm) {
- oa.realm = realm
- }
-
- return oa
-}
-
-OAuth.prototype.buildBodyHash = function (_oauth, body) {
- if (['HMAC-SHA1', 'RSA-SHA1'].indexOf(_oauth.signature_method || 'HMAC-SHA1') < 0) {
- this.request.emit('error', new Error('oauth: ' + _oauth.signature_method +
- ' signature_method not supported with body_hash signing.'))
- }
-
- var shasum = crypto.createHash('sha1')
- shasum.update(body || '')
- var sha1 = shasum.digest('hex')
-
- return Buffer.from(sha1, 'hex').toString('base64')
-}
-
-OAuth.prototype.concatParams = function (oa, sep, wrap) {
- wrap = wrap || ''
-
- var params = Object.keys(oa).filter(function (i) {
- return i !== 'realm' && i !== 'oauth_signature'
- }).sort()
-
- if (oa.realm) {
- params.splice(0, 0, 'realm')
- }
- params.push('oauth_signature')
-
- return params.map(function (i) {
- return i + '=' + wrap + oauth.rfc3986(oa[i]) + wrap
- }).join(sep)
-}
-
-OAuth.prototype.onRequest = function (_oauth) {
- var self = this
- self.params = _oauth
-
- var uri = self.request.uri || {}
- var method = self.request.method || ''
- var headers = caseless(self.request.headers)
- var body = self.request.body || ''
- var qsLib = self.request.qsLib || qs
-
- var form
- var query
- var contentType = headers.get('content-type') || ''
- var formContentType = 'application/x-www-form-urlencoded'
- var transport = _oauth.transport_method || 'header'
-
- if (contentType.slice(0, formContentType.length) === formContentType) {
- contentType = formContentType
- form = body
- }
- if (uri.query) {
- query = uri.query
- }
- if (transport === 'body' && (method !== 'POST' || contentType !== formContentType)) {
- self.request.emit('error', new Error('oauth: transport_method of body requires POST ' +
- 'and content-type ' + formContentType))
- }
-
- if (!form && typeof _oauth.body_hash === 'boolean') {
- _oauth.body_hash = self.buildBodyHash(_oauth, self.request.body.toString())
- }
-
- var oa = self.buildParams(_oauth, uri, method, query, form, qsLib)
-
- switch (transport) {
- case 'header':
- self.request.setHeader('Authorization', 'OAuth ' + self.concatParams(oa, ',', '"'))
- break
-
- case 'query':
- var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&')
- self.request.uri = url.parse(href)
- self.request.path = self.request.uri.path
- break
-
- case 'body':
- self.request.body = (form ? form + '&' : '') + self.concatParams(oa, '&')
- break
-
- default:
- self.request.emit('error', new Error('oauth: transport_method invalid'))
- }
-}
-
-exports.OAuth = OAuth
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/querystring.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/querystring.js
deleted file mode 100644
index 4a32cd149..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/querystring.js
+++ /dev/null
@@ -1,50 +0,0 @@
-'use strict'
-
-var qs = require('qs')
-var querystring = require('querystring')
-
-function Querystring (request) {
- this.request = request
- this.lib = null
- this.useQuerystring = null
- this.parseOptions = null
- this.stringifyOptions = null
-}
-
-Querystring.prototype.init = function (options) {
- if (this.lib) { return }
-
- this.useQuerystring = options.useQuerystring
- this.lib = (this.useQuerystring ? querystring : qs)
-
- this.parseOptions = options.qsParseOptions || {}
- this.stringifyOptions = options.qsStringifyOptions || {}
-}
-
-Querystring.prototype.stringify = function (obj) {
- return (this.useQuerystring)
- ? this.rfc3986(this.lib.stringify(obj,
- this.stringifyOptions.sep || null,
- this.stringifyOptions.eq || null,
- this.stringifyOptions))
- : this.lib.stringify(obj, this.stringifyOptions)
-}
-
-Querystring.prototype.parse = function (str) {
- return (this.useQuerystring)
- ? this.lib.parse(str,
- this.parseOptions.sep || null,
- this.parseOptions.eq || null,
- this.parseOptions)
- : this.lib.parse(str, this.parseOptions)
-}
-
-Querystring.prototype.rfc3986 = function (str) {
- return str.replace(/[!'()*]/g, function (c) {
- return '%' + c.charCodeAt(0).toString(16).toUpperCase()
- })
-}
-
-Querystring.prototype.unescape = querystring.unescape
-
-exports.Querystring = Querystring
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/redirect.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/redirect.js
deleted file mode 100644
index b9150e77c..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/redirect.js
+++ /dev/null
@@ -1,154 +0,0 @@
-'use strict'
-
-var url = require('url')
-var isUrl = /^https?:/
-
-function Redirect (request) {
- this.request = request
- this.followRedirect = true
- this.followRedirects = true
- this.followAllRedirects = false
- this.followOriginalHttpMethod = false
- this.allowRedirect = function () { return true }
- this.maxRedirects = 10
- this.redirects = []
- this.redirectsFollowed = 0
- this.removeRefererHeader = false
-}
-
-Redirect.prototype.onRequest = function (options) {
- var self = this
-
- if (options.maxRedirects !== undefined) {
- self.maxRedirects = options.maxRedirects
- }
- if (typeof options.followRedirect === 'function') {
- self.allowRedirect = options.followRedirect
- }
- if (options.followRedirect !== undefined) {
- self.followRedirects = !!options.followRedirect
- }
- if (options.followAllRedirects !== undefined) {
- self.followAllRedirects = options.followAllRedirects
- }
- if (self.followRedirects || self.followAllRedirects) {
- self.redirects = self.redirects || []
- }
- if (options.removeRefererHeader !== undefined) {
- self.removeRefererHeader = options.removeRefererHeader
- }
- if (options.followOriginalHttpMethod !== undefined) {
- self.followOriginalHttpMethod = options.followOriginalHttpMethod
- }
-}
-
-Redirect.prototype.redirectTo = function (response) {
- var self = this
- var request = self.request
-
- var redirectTo = null
- if (response.statusCode >= 300 && response.statusCode < 400 && response.caseless.has('location')) {
- var location = response.caseless.get('location')
- request.debug('redirect', location)
-
- if (self.followAllRedirects) {
- redirectTo = location
- } else if (self.followRedirects) {
- switch (request.method) {
- case 'PATCH':
- case 'PUT':
- case 'POST':
- case 'DELETE':
- // Do not follow redirects
- break
- default:
- redirectTo = location
- break
- }
- }
- } else if (response.statusCode === 401) {
- var authHeader = request._auth.onResponse(response)
- if (authHeader) {
- request.setHeader('authorization', authHeader)
- redirectTo = request.uri
- }
- }
- return redirectTo
-}
-
-Redirect.prototype.onResponse = function (response) {
- var self = this
- var request = self.request
-
- var redirectTo = self.redirectTo(response)
- if (!redirectTo || !self.allowRedirect.call(request, response)) {
- return false
- }
-
- request.debug('redirect to', redirectTo)
-
- // ignore any potential response body. it cannot possibly be useful
- // to us at this point.
- // response.resume should be defined, but check anyway before calling. Workaround for browserify.
- if (response.resume) {
- response.resume()
- }
-
- if (self.redirectsFollowed >= self.maxRedirects) {
- request.emit('error', new Error('Exceeded maxRedirects. Probably stuck in a redirect loop ' + request.uri.href))
- return false
- }
- self.redirectsFollowed += 1
-
- if (!isUrl.test(redirectTo)) {
- redirectTo = url.resolve(request.uri.href, redirectTo)
- }
-
- var uriPrev = request.uri
- request.uri = url.parse(redirectTo)
-
- // handle the case where we change protocol from https to http or vice versa
- if (request.uri.protocol !== uriPrev.protocol) {
- delete request.agent
- }
-
- self.redirects.push({ statusCode: response.statusCode, redirectUri: redirectTo })
-
- if (self.followAllRedirects && request.method !== 'HEAD' &&
- response.statusCode !== 401 && response.statusCode !== 307) {
- request.method = self.followOriginalHttpMethod ? request.method : 'GET'
- }
- // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215
- delete request.src
- delete request.req
- delete request._started
- if (response.statusCode !== 401 && response.statusCode !== 307) {
- // Remove parameters from the previous response, unless this is the second request
- // for a server that requires digest authentication.
- delete request.body
- delete request._form
- if (request.headers) {
- request.removeHeader('host')
- request.removeHeader('content-type')
- request.removeHeader('content-length')
- if (request.uri.hostname !== request.originalHost.split(':')[0]) {
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of curl:
- // https://github.com/bagder/curl/blob/6beb0eee/lib/http.c#L710
- request.removeHeader('authorization')
- }
- }
- }
-
- if (!self.removeRefererHeader) {
- request.setHeader('referer', uriPrev.href)
- }
-
- request.emit('redirect')
-
- request.init()
-
- return true
-}
-
-exports.Redirect = Redirect
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/tunnel.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/tunnel.js
deleted file mode 100644
index 4479003f6..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/lib/tunnel.js
+++ /dev/null
@@ -1,175 +0,0 @@
-'use strict'
-
-var url = require('url')
-var tunnel = require('tunnel-agent')
-
-var defaultProxyHeaderWhiteList = [
- 'accept',
- 'accept-charset',
- 'accept-encoding',
- 'accept-language',
- 'accept-ranges',
- 'cache-control',
- 'content-encoding',
- 'content-language',
- 'content-location',
- 'content-md5',
- 'content-range',
- 'content-type',
- 'connection',
- 'date',
- 'expect',
- 'max-forwards',
- 'pragma',
- 'referer',
- 'te',
- 'user-agent',
- 'via'
-]
-
-var defaultProxyHeaderExclusiveList = [
- 'proxy-authorization'
-]
-
-function constructProxyHost (uriObject) {
- var port = uriObject.port
- var protocol = uriObject.protocol
- var proxyHost = uriObject.hostname + ':'
-
- if (port) {
- proxyHost += port
- } else if (protocol === 'https:') {
- proxyHost += '443'
- } else {
- proxyHost += '80'
- }
-
- return proxyHost
-}
-
-function constructProxyHeaderWhiteList (headers, proxyHeaderWhiteList) {
- var whiteList = proxyHeaderWhiteList
- .reduce(function (set, header) {
- set[header.toLowerCase()] = true
- return set
- }, {})
-
- return Object.keys(headers)
- .filter(function (header) {
- return whiteList[header.toLowerCase()]
- })
- .reduce(function (set, header) {
- set[header] = headers[header]
- return set
- }, {})
-}
-
-function constructTunnelOptions (request, proxyHeaders) {
- var proxy = request.proxy
-
- var tunnelOptions = {
- proxy: {
- host: proxy.hostname,
- port: +proxy.port,
- proxyAuth: proxy.auth,
- headers: proxyHeaders
- },
- headers: request.headers,
- ca: request.ca,
- cert: request.cert,
- key: request.key,
- passphrase: request.passphrase,
- pfx: request.pfx,
- ciphers: request.ciphers,
- rejectUnauthorized: request.rejectUnauthorized,
- secureOptions: request.secureOptions,
- secureProtocol: request.secureProtocol
- }
-
- return tunnelOptions
-}
-
-function constructTunnelFnName (uri, proxy) {
- var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http')
- var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http')
- return [uriProtocol, proxyProtocol].join('Over')
-}
-
-function getTunnelFn (request) {
- var uri = request.uri
- var proxy = request.proxy
- var tunnelFnName = constructTunnelFnName(uri, proxy)
- return tunnel[tunnelFnName]
-}
-
-function Tunnel (request) {
- this.request = request
- this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList
- this.proxyHeaderExclusiveList = []
- if (typeof request.tunnel !== 'undefined') {
- this.tunnelOverride = request.tunnel
- }
-}
-
-Tunnel.prototype.isEnabled = function () {
- var self = this
- var request = self.request
- // Tunnel HTTPS by default. Allow the user to override this setting.
-
- // If self.tunnelOverride is set (the user specified a value), use it.
- if (typeof self.tunnelOverride !== 'undefined') {
- return self.tunnelOverride
- }
-
- // If the destination is HTTPS, tunnel.
- if (request.uri.protocol === 'https:') {
- return true
- }
-
- // Otherwise, do not use tunnel.
- return false
-}
-
-Tunnel.prototype.setup = function (options) {
- var self = this
- var request = self.request
-
- options = options || {}
-
- if (typeof request.proxy === 'string') {
- request.proxy = url.parse(request.proxy)
- }
-
- if (!request.proxy || !request.tunnel) {
- return false
- }
-
- // Setup Proxy Header Exclusive List and White List
- if (options.proxyHeaderWhiteList) {
- self.proxyHeaderWhiteList = options.proxyHeaderWhiteList
- }
- if (options.proxyHeaderExclusiveList) {
- self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList
- }
-
- var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList)
- var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList)
-
- // Setup Proxy Headers and Proxy Headers Host
- // Only send the Proxy White Listed Header names
- var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList)
- proxyHeaders.host = constructProxyHost(request.uri)
-
- proxyHeaderExclusiveList.forEach(request.removeHeader, request)
-
- // Set Agent from Tunnel Data
- var tunnelFn = getTunnelFn(request)
- var tunnelOptions = constructTunnelOptions(request, proxyHeaders)
- request.agent = tunnelFn(tunnelOptions)
-
- return true
-}
-
-Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList
-Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList
-exports.Tunnel = Tunnel
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-conv b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-conv
deleted file mode 100755
index e839ede5b..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-conv
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env node
-// -*- mode: js -*-
-// vim: set filetype=javascript :
-// Copyright 2018 Joyent, Inc. All rights reserved.
-
-var dashdash = require('dashdash');
-var sshpk = require('../lib/index');
-var fs = require('fs');
-var path = require('path');
-var tty = require('tty');
-var readline = require('readline');
-var getPassword = require('getpass').getPass;
-
-var options = [
- {
- names: ['outformat', 't'],
- type: 'string',
- help: 'Output format'
- },
- {
- names: ['informat', 'T'],
- type: 'string',
- help: 'Input format'
- },
- {
- names: ['file', 'f'],
- type: 'string',
- help: 'Input file name (default stdin)'
- },
- {
- names: ['out', 'o'],
- type: 'string',
- help: 'Output file name (default stdout)'
- },
- {
- names: ['private', 'p'],
- type: 'bool',
- help: 'Produce a private key as output'
- },
- {
- names: ['derive', 'd'],
- type: 'string',
- help: 'Output a new key derived from this one, with given algo'
- },
- {
- names: ['identify', 'i'],
- type: 'bool',
- help: 'Print key metadata instead of converting'
- },
- {
- names: ['fingerprint', 'F'],
- type: 'bool',
- help: 'Output key fingerprint'
- },
- {
- names: ['hash', 'H'],
- type: 'string',
- help: 'Hash function to use for key fingeprint with -F'
- },
- {
- names: ['spki', 's'],
- type: 'bool',
- help: 'With -F, generates an SPKI fingerprint instead of SSH'
- },
- {
- names: ['comment', 'c'],
- type: 'string',
- help: 'Set key comment, if output format supports'
- },
- {
- names: ['help', 'h'],
- type: 'bool',
- help: 'Shows this help text'
- }
-];
-
-if (require.main === module) {
- var parser = dashdash.createParser({
- options: options
- });
-
- try {
- var opts = parser.parse(process.argv);
- } catch (e) {
- console.error('sshpk-conv: error: %s', e.message);
- process.exit(1);
- }
-
- if (opts.help || opts._args.length > 1) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-conv: converts between SSH key formats\n');
- console.error(help);
- console.error('\navailable key formats:');
- console.error(' - pem, pkcs1 eg id_rsa');
- console.error(' - ssh eg id_rsa.pub');
- console.error(' - pkcs8 format you want for openssl');
- console.error(' - openssh like output of ssh-keygen -o');
- console.error(' - rfc4253 raw OpenSSH wire format');
- console.error(' - dnssec dnssec-keygen format');
- console.error(' - putty PuTTY ppk format');
- console.error('\navailable fingerprint formats:');
- console.error(' - hex colon-separated hex for SSH');
- console.error(' straight hex for SPKI');
- console.error(' - base64 SHA256:* format from OpenSSH');
- process.exit(1);
- }
-
- /*
- * Key derivation can only be done on private keys, so use of the -d
- * option necessarily implies -p.
- */
- if (opts.derive)
- opts.private = true;
-
- var inFile = process.stdin;
- var inFileName = 'stdin';
-
- var inFilePath;
- if (opts.file) {
- inFilePath = opts.file;
- } else if (opts._args.length === 1) {
- inFilePath = opts._args[0];
- }
-
- if (inFilePath)
- inFileName = path.basename(inFilePath);
-
- try {
- if (inFilePath) {
- fs.accessSync(inFilePath, fs.R_OK);
- inFile = fs.createReadStream(inFilePath);
- }
- } catch (e) {
- ifError(e, 'error opening input file');
- }
-
- var outFile = process.stdout;
-
- try {
- if (opts.out && !opts.identify) {
- fs.accessSync(path.dirname(opts.out), fs.W_OK);
- outFile = fs.createWriteStream(opts.out);
- }
- } catch (e) {
- ifError(e, 'error opening output file');
- }
-
- var bufs = [];
- inFile.on('readable', function () {
- var data;
- while ((data = inFile.read()))
- bufs.push(data);
- });
- var parseOpts = {};
- parseOpts.filename = inFileName;
- inFile.on('end', function processKey() {
- var buf = Buffer.concat(bufs);
- var fmt = 'auto';
- if (opts.informat)
- fmt = opts.informat;
- var f = sshpk.parseKey;
- if (opts.private)
- f = sshpk.parsePrivateKey;
- try {
- var key = f(buf, fmt, parseOpts);
- } catch (e) {
- if (e.name === 'KeyEncryptedError') {
- getPassword(function (err, pw) {
- if (err)
- ifError(err);
- parseOpts.passphrase = pw;
- processKey();
- });
- return;
- }
- ifError(e);
- }
-
- if (opts.derive)
- key = key.derive(opts.derive);
-
- if (opts.comment)
- key.comment = opts.comment;
-
- if (opts.identify) {
- var kind = 'public';
- if (sshpk.PrivateKey.isPrivateKey(key))
- kind = 'private';
- console.log('%s: a %d bit %s %s key', inFileName,
- key.size, key.type.toUpperCase(), kind);
- if (key.type === 'ecdsa')
- console.log('ECDSA curve: %s', key.curve);
- if (key.comment)
- console.log('Comment: %s', key.comment);
- console.log('SHA256 fingerprint: ' +
- key.fingerprint('sha256').toString());
- console.log('MD5 fingerprint: ' +
- key.fingerprint('md5').toString());
- console.log('SPKI-SHA256 fingerprint: ' +
- key.fingerprint('sha256', 'spki').toString());
- process.exit(0);
- return;
- }
-
- if (opts.fingerprint) {
- var hash = opts.hash;
- var type = opts.spki ? 'spki' : 'ssh';
- var format = opts.outformat;
- var fp = key.fingerprint(hash, type).toString(format);
- outFile.write(fp);
- outFile.write('\n');
- outFile.once('drain', function () {
- process.exit(0);
- });
- return;
- }
-
- fmt = undefined;
- if (opts.outformat)
- fmt = opts.outformat;
- outFile.write(key.toBuffer(fmt));
- if (fmt === 'ssh' ||
- (!opts.private && fmt === undefined))
- outFile.write('\n');
- outFile.once('drain', function () {
- process.exit(0);
- });
- });
-}
-
-function ifError(e, txt) {
- if (txt)
- txt = txt + ': ';
- else
- txt = '';
- console.error('sshpk-conv: ' + txt + e.name + ': ' + e.message);
- if (process.env['DEBUG'] || process.env['V']) {
- console.error(e.stack);
- if (e.innerErr)
- console.error(e.innerErr.stack);
- }
- process.exit(1);
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-sign b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-sign
deleted file mode 100755
index 673fc9864..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-sign
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env node
-// -*- mode: js -*-
-// vim: set filetype=javascript :
-// Copyright 2015 Joyent, Inc. All rights reserved.
-
-var dashdash = require('dashdash');
-var sshpk = require('../lib/index');
-var fs = require('fs');
-var path = require('path');
-var getPassword = require('getpass').getPass;
-
-var options = [
- {
- names: ['hash', 'H'],
- type: 'string',
- help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
- },
- {
- names: ['verbose', 'v'],
- type: 'bool',
- help: 'Display verbose info about key and hash used'
- },
- {
- names: ['identity', 'i'],
- type: 'string',
- help: 'Path to key to use'
- },
- {
- names: ['file', 'f'],
- type: 'string',
- help: 'Input filename'
- },
- {
- names: ['out', 'o'],
- type: 'string',
- help: 'Output filename'
- },
- {
- names: ['format', 't'],
- type: 'string',
- help: 'Signature format (asn1, ssh, raw)'
- },
- {
- names: ['binary', 'b'],
- type: 'bool',
- help: 'Output raw binary instead of base64'
- },
- {
- names: ['help', 'h'],
- type: 'bool',
- help: 'Shows this help text'
- }
-];
-
-var parseOpts = {};
-
-if (require.main === module) {
- var parser = dashdash.createParser({
- options: options
- });
-
- try {
- var opts = parser.parse(process.argv);
- } catch (e) {
- console.error('sshpk-sign: error: %s', e.message);
- process.exit(1);
- }
-
- if (opts.help || opts._args.length > 1) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-sign: sign data using an SSH key\n');
- console.error(help);
- process.exit(1);
- }
-
- if (!opts.identity) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-sign: the -i or --identity option ' +
- 'is required\n');
- console.error(help);
- process.exit(1);
- }
-
- var keyData = fs.readFileSync(opts.identity);
- parseOpts.filename = opts.identity;
-
- run();
-}
-
-function run() {
- var key;
- try {
- key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts);
- } catch (e) {
- if (e.name === 'KeyEncryptedError') {
- getPassword(function (err, pw) {
- parseOpts.passphrase = pw;
- run();
- });
- return;
- }
- console.error('sshpk-sign: error loading private key "' +
- opts.identity + '": ' + e.name + ': ' + e.message);
- process.exit(1);
- }
-
- var hash = opts.hash || key.defaultHashAlgorithm();
-
- var signer;
- try {
- signer = key.createSign(hash);
- } catch (e) {
- console.error('sshpk-sign: error creating signer: ' +
- e.name + ': ' + e.message);
- process.exit(1);
- }
-
- if (opts.verbose) {
- console.error('sshpk-sign: using %s-%s with a %d bit key',
- key.type, hash, key.size);
- }
-
- var inFile = process.stdin;
- var inFileName = 'stdin';
-
- var inFilePath;
- if (opts.file) {
- inFilePath = opts.file;
- } else if (opts._args.length === 1) {
- inFilePath = opts._args[0];
- }
-
- if (inFilePath)
- inFileName = path.basename(inFilePath);
-
- try {
- if (inFilePath) {
- fs.accessSync(inFilePath, fs.R_OK);
- inFile = fs.createReadStream(inFilePath);
- }
- } catch (e) {
- console.error('sshpk-sign: error opening input file' +
- ': ' + e.name + ': ' + e.message);
- process.exit(1);
- }
-
- var outFile = process.stdout;
-
- try {
- if (opts.out && !opts.identify) {
- fs.accessSync(path.dirname(opts.out), fs.W_OK);
- outFile = fs.createWriteStream(opts.out);
- }
- } catch (e) {
- console.error('sshpk-sign: error opening output file' +
- ': ' + e.name + ': ' + e.message);
- process.exit(1);
- }
-
- inFile.pipe(signer);
- inFile.on('end', function () {
- var sig;
- try {
- sig = signer.sign();
- } catch (e) {
- console.error('sshpk-sign: error signing data: ' +
- e.name + ': ' + e.message);
- process.exit(1);
- }
-
- var fmt = opts.format || 'asn1';
- var output;
- try {
- output = sig.toBuffer(fmt);
- if (!opts.binary)
- output = output.toString('base64');
- } catch (e) {
- console.error('sshpk-sign: error converting signature' +
- ' to ' + fmt + ' format: ' + e.name + ': ' +
- e.message);
- process.exit(1);
- }
-
- outFile.write(output);
- if (!opts.binary)
- outFile.write('\n');
- outFile.once('drain', function () {
- process.exit(0);
- });
- });
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-verify b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-verify
deleted file mode 100755
index fc71a82c8..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/sshpk-verify
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env node
-// -*- mode: js -*-
-// vim: set filetype=javascript :
-// Copyright 2015 Joyent, Inc. All rights reserved.
-
-var dashdash = require('dashdash');
-var sshpk = require('../lib/index');
-var fs = require('fs');
-var path = require('path');
-var Buffer = require('safer-buffer').Buffer;
-
-var options = [
- {
- names: ['hash', 'H'],
- type: 'string',
- help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
- },
- {
- names: ['verbose', 'v'],
- type: 'bool',
- help: 'Display verbose info about key and hash used'
- },
- {
- names: ['identity', 'i'],
- type: 'string',
- help: 'Path to (public) key to use'
- },
- {
- names: ['file', 'f'],
- type: 'string',
- help: 'Input filename'
- },
- {
- names: ['format', 't'],
- type: 'string',
- help: 'Signature format (asn1, ssh, raw)'
- },
- {
- names: ['signature', 's'],
- type: 'string',
- help: 'base64-encoded signature data'
- },
- {
- names: ['help', 'h'],
- type: 'bool',
- help: 'Shows this help text'
- }
-];
-
-if (require.main === module) {
- var parser = dashdash.createParser({
- options: options
- });
-
- try {
- var opts = parser.parse(process.argv);
- } catch (e) {
- console.error('sshpk-verify: error: %s', e.message);
- process.exit(3);
- }
-
- if (opts.help || opts._args.length > 1) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-verify: sign data using an SSH key\n');
- console.error(help);
- process.exit(3);
- }
-
- if (!opts.identity) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-verify: the -i or --identity option ' +
- 'is required\n');
- console.error(help);
- process.exit(3);
- }
-
- if (!opts.signature) {
- var help = parser.help({}).trimRight();
- console.error('sshpk-verify: the -s or --signature option ' +
- 'is required\n');
- console.error(help);
- process.exit(3);
- }
-
- var keyData = fs.readFileSync(opts.identity);
-
- var key;
- try {
- key = sshpk.parseKey(keyData);
- } catch (e) {
- console.error('sshpk-verify: error loading key "' +
- opts.identity + '": ' + e.name + ': ' + e.message);
- process.exit(2);
- }
-
- var fmt = opts.format || 'asn1';
- var sigData = Buffer.from(opts.signature, 'base64');
-
- var sig;
- try {
- sig = sshpk.parseSignature(sigData, key.type, fmt);
- } catch (e) {
- console.error('sshpk-verify: error parsing signature: ' +
- e.name + ': ' + e.message);
- process.exit(2);
- }
-
- var hash = opts.hash || key.defaultHashAlgorithm();
-
- var verifier;
- try {
- verifier = key.createVerify(hash);
- } catch (e) {
- console.error('sshpk-verify: error creating verifier: ' +
- e.name + ': ' + e.message);
- process.exit(2);
- }
-
- if (opts.verbose) {
- console.error('sshpk-verify: using %s-%s with a %d bit key',
- key.type, hash, key.size);
- }
-
- var inFile = process.stdin;
- var inFileName = 'stdin';
-
- var inFilePath;
- if (opts.file) {
- inFilePath = opts.file;
- } else if (opts._args.length === 1) {
- inFilePath = opts._args[0];
- }
-
- if (inFilePath)
- inFileName = path.basename(inFilePath);
-
- try {
- if (inFilePath) {
- fs.accessSync(inFilePath, fs.R_OK);
- inFile = fs.createReadStream(inFilePath);
- }
- } catch (e) {
- console.error('sshpk-verify: error opening input file' +
- ': ' + e.name + ': ' + e.message);
- process.exit(2);
- }
-
- inFile.pipe(verifier);
- inFile.on('end', function () {
- var ret;
- try {
- ret = verifier.verify(sig);
- } catch (e) {
- console.error('sshpk-verify: error verifying data: ' +
- e.name + ': ' + e.message);
- process.exit(1);
- }
-
- if (ret) {
- console.error('OK');
- process.exit(0);
- }
-
- console.error('NOT OK');
- process.exit(1);
- });
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/uuid b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/uuid
deleted file mode 100755
index 502626e60..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/.bin/uuid
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env node
-var assert = require('assert');
-
-function usage() {
- console.log('Usage:');
- console.log(' uuid');
- console.log(' uuid v1');
- console.log(' uuid v3 ');
- console.log(' uuid v4');
- console.log(' uuid v5 ');
- console.log(' uuid --help');
- console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122');
-}
-
-var args = process.argv.slice(2);
-
-if (args.indexOf('--help') >= 0) {
- usage();
- process.exit(0);
-}
-var version = args.shift() || 'v4';
-
-switch (version) {
- case 'v1':
- var uuidV1 = require('../v1');
- console.log(uuidV1());
- break;
-
- case 'v3':
- var uuidV3 = require('../v3');
-
- var name = args.shift();
- var namespace = args.shift();
- assert(name != null, 'v3 name not specified');
- assert(namespace != null, 'v3 namespace not specified');
-
- if (namespace == 'URL') namespace = uuidV3.URL;
- if (namespace == 'DNS') namespace = uuidV3.DNS;
-
- console.log(uuidV3(name, namespace));
- break;
-
- case 'v4':
- var uuidV4 = require('../v4');
- console.log(uuidV4());
- break;
-
- case 'v5':
- var uuidV5 = require('../v5');
-
- var name = args.shift();
- var namespace = args.shift();
- assert(name != null, 'v5 name not specified');
- assert(namespace != null, 'v5 namespace not specified');
-
- if (namespace == 'URL') namespace = uuidV5.URL;
- if (namespace == 'DNS') namespace = uuidV5.DNS;
-
- console.log(uuidV5(name, namespace));
- break;
-
- default:
- usage();
- process.exit(1);
-}
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/.tonic_example.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/.tonic_example.js
deleted file mode 100644
index aa11812d8..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/.tonic_example.js
+++ /dev/null
@@ -1,20 +0,0 @@
-var Ajv = require('ajv');
-var ajv = new Ajv({allErrors: true});
-
-var schema = {
- "properties": {
- "foo": { "type": "string" },
- "bar": { "type": "number", "maximum": 3 }
- }
-};
-
-var validate = ajv.compile(schema);
-
-test({"foo": "abc", "bar": 2});
-test({"foo": 2, "bar": 4});
-
-function test(data) {
- var valid = validate(data);
- if (valid) console.log('Valid!');
- else console.log('Invalid: ' + ajv.errorsText(validate.errors));
-}
\ No newline at end of file
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/LICENSE b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/LICENSE
deleted file mode 100644
index 96ee71998..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015-2017 Evgeny Poberezkin
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/README.md b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/README.md
deleted file mode 100644
index e13fdec29..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/README.md
+++ /dev/null
@@ -1,1452 +0,0 @@
-
-
-# Ajv: Another JSON Schema Validator
-
-The fastest JSON Schema validator for Node.js and browser. Supports draft-04/06/07.
-
-[](https://travis-ci.org/epoberezkin/ajv)
-[](https://www.npmjs.com/package/ajv)
-[](https://www.npmjs.com/package/ajv)
-[](https://coveralls.io/github/epoberezkin/ajv?branch=master)
-[](https://gitter.im/ajv-validator/ajv)
-[](https://github.com/sponsors/epoberezkin)
-
-## Please [sponsor Ajv](https://github.com/sponsors/epoberezkin)
-
-Dear Ajv users! ❤️
-
-I ask you to support the development of Ajv with donations. 🙏
-
-Since 2015 Ajv has become widely used, thanks to your help and contributions:
-
-- **90** contributors 🏗
-- **5,000** dependent npm packages ⚙️
-- **7,000** github stars, from GitHub users [all over the world](https://www.google.com/maps/d/u/0/viewer?mid=1MGRV8ciFUGIbO1l0EKFWNJGYE7iSkDxP&ll=-3.81666561775622e-14%2C4.821737100000007&z=2) ⭐️
-- **5,000,000** dependent repositories on GitHub 🚀
-- **120,000,000** npm downloads per month! 💯
-
-Your donations will fund futher development - small and large improvements, support of the next versions of JSON Schema specification, and, possibly, the code should be migrated to TypeScript to make it more maintainable.
-
-I will greatly appreciate anything you can help with to make it happen:
-
-- a **personal** donation - from $2 ☕️
-- your **company** donation - from $10 🍔
-- a **sponsorship** to get promoted on Ajv or related packages - from $50 💰
-- an **introduction** to a sponsor who would benefit from the promotion on Ajv page 🤝
-
-| Please [make donations via my GitHub sponsors page](https://github.com/sponsors/epoberezkin)
‼️ **GitHub will DOUBLE them** ‼️ |
-|---|
-
-#### Open Collective sponsors
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-## Using version 6
-
-[JSON Schema draft-07](http://json-schema.org/latest/json-schema-validation.html) is published.
-
-[Ajv version 6.0.0](https://github.com/epoberezkin/ajv/releases/tag/v6.0.0) that supports draft-07 is released. It may require either migrating your schemas or updating your code (to continue using draft-04 and v5 schemas, draft-06 schemas will be supported without changes).
-
-__Please note__: To use Ajv with draft-06 schemas you need to explicitly add the meta-schema to the validator instance:
-
-```javascript
-ajv.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
-```
-
-To use Ajv with draft-04 schemas in addition to explicitly adding meta-schema you also need to use option schemaId:
-
-```javascript
-var ajv = new Ajv({schemaId: 'id'});
-// If you want to use both draft-04 and draft-06/07 schemas:
-// var ajv = new Ajv({schemaId: 'auto'});
-ajv.addMetaSchema(require('ajv/lib/refs/json-schema-draft-04.json'));
-```
-
-
-## Contents
-
-- [Performance](#performance)
-- [Features](#features)
-- [Getting started](#getting-started)
-- [Frequently Asked Questions](https://github.com/epoberezkin/ajv/blob/master/FAQ.md)
-- [Using in browser](#using-in-browser)
-- [Command line interface](#command-line-interface)
-- Validation
- - [Keywords](#validation-keywords)
- - [Annotation keywords](#annotation-keywords)
- - [Formats](#formats)
- - [Combining schemas with $ref](#ref)
- - [$data reference](#data-reference)
- - NEW: [$merge and $patch keywords](#merge-and-patch-keywords)
- - [Defining custom keywords](#defining-custom-keywords)
- - [Asynchronous schema compilation](#asynchronous-schema-compilation)
- - [Asynchronous validation](#asynchronous-validation)
-- [Security considerations](#security-considerations)
- - [Security contact](#security-contact)
- - [Untrusted schemas](#untrusted-schemas)
- - [Circular references in objects](#circular-references-in-javascript-objects)
- - [Trusted schemas](#security-risks-of-trusted-schemas)
- - [ReDoS attack](#redos-attack)
-- Modifying data during validation
- - [Filtering data](#filtering-data)
- - [Assigning defaults](#assigning-defaults)
- - [Coercing data types](#coercing-data-types)
-- API
- - [Methods](#api)
- - [Options](#options)
- - [Validation errors](#validation-errors)
-- [Plugins](#plugins)
-- [Related packages](#related-packages)
-- [Some packages using Ajv](#some-packages-using-ajv)
-- [Tests, Contributing, History, Support, License](#tests)
-
-
-## Performance
-
-Ajv generates code using [doT templates](https://github.com/olado/doT) to turn JSON Schemas into super-fast validation functions that are efficient for v8 optimization.
-
-Currently Ajv is the fastest and the most standard compliant validator according to these benchmarks:
-
-- [json-schema-benchmark](https://github.com/ebdrup/json-schema-benchmark) - 50% faster than the second place
-- [jsck benchmark](https://github.com/pandastrike/jsck#benchmarks) - 20-190% faster
-- [z-schema benchmark](https://rawgit.com/zaggino/z-schema/master/benchmark/results.html)
-- [themis benchmark](https://cdn.rawgit.com/playlyfe/themis/master/benchmark/results.html)
-
-
-Performance of different validators by [json-schema-benchmark](https://github.com/ebdrup/json-schema-benchmark):
-
-[](https://github.com/ebdrup/json-schema-benchmark/blob/master/README.md#performance)
-
-
-## Features
-
-- Ajv implements full JSON Schema [draft-06/07](http://json-schema.org/) and draft-04 standards:
- - all validation keywords (see [JSON Schema validation keywords](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md))
- - full support of remote refs (remote schemas have to be added with `addSchema` or compiled to be available)
- - support of circular references between schemas
- - correct string lengths for strings with unicode pairs (can be turned off)
- - [formats](#formats) defined by JSON Schema draft-07 standard and custom formats (can be turned off)
- - [validates schemas against meta-schema](#api-validateschema)
-- supports [browsers](#using-in-browser) and Node.js 0.10-8.x
-- [asynchronous loading](#asynchronous-schema-compilation) of referenced schemas during compilation
-- "All errors" validation mode with [option allErrors](#options)
-- [error messages with parameters](#validation-errors) describing error reasons to allow creating custom error messages
-- i18n error messages support with [ajv-i18n](https://github.com/epoberezkin/ajv-i18n) package
-- [filtering data](#filtering-data) from additional properties
-- [assigning defaults](#assigning-defaults) to missing properties and items
-- [coercing data](#coercing-data-types) to the types specified in `type` keywords
-- [custom keywords](#defining-custom-keywords)
-- draft-06/07 keywords `const`, `contains`, `propertyNames` and `if/then/else`
-- draft-06 boolean schemas (`true`/`false` as a schema to always pass/fail).
-- keywords `switch`, `patternRequired`, `formatMaximum` / `formatMinimum` and `formatExclusiveMaximum` / `formatExclusiveMinimum` from [JSON Schema extension proposals](https://github.com/json-schema/json-schema/wiki/v5-Proposals) with [ajv-keywords](https://github.com/epoberezkin/ajv-keywords) package
-- [$data reference](#data-reference) to use values from the validated data as values for the schema keywords
-- [asynchronous validation](#asynchronous-validation) of custom formats and keywords
-
-Currently Ajv is the only validator that passes all the tests from [JSON Schema Test Suite](https://github.com/json-schema/JSON-Schema-Test-Suite) (according to [json-schema-benchmark](https://github.com/ebdrup/json-schema-benchmark), apart from the test that requires that `1.0` is not an integer that is impossible to satisfy in JavaScript).
-
-
-## Install
-
-```
-npm install ajv
-```
-
-
-## Getting started
-
-Try it in the Node.js REPL: https://tonicdev.com/npm/ajv
-
-
-The fastest validation call:
-
-```javascript
-// Node.js require:
-var Ajv = require('ajv');
-// or ESM/TypeScript import
-import Ajv from 'ajv';
-
-var ajv = new Ajv(); // options can be passed, e.g. {allErrors: true}
-var validate = ajv.compile(schema);
-var valid = validate(data);
-if (!valid) console.log(validate.errors);
-```
-
-or with less code
-
-```javascript
-// ...
-var valid = ajv.validate(schema, data);
-if (!valid) console.log(ajv.errors);
-// ...
-```
-
-or
-
-```javascript
-// ...
-var valid = ajv.addSchema(schema, 'mySchema')
- .validate('mySchema', data);
-if (!valid) console.log(ajv.errorsText());
-// ...
-```
-
-See [API](#api) and [Options](#options) for more details.
-
-Ajv compiles schemas to functions and caches them in all cases (using schema serialized with [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) or a custom function as a key), so that the next time the same schema is used (not necessarily the same object instance) it won't be compiled again.
-
-The best performance is achieved when using compiled functions returned by `compile` or `getSchema` methods (there is no additional function call).
-
-__Please note__: every time a validation function or `ajv.validate` are called `errors` property is overwritten. You need to copy `errors` array reference to another variable if you want to use it later (e.g., in the callback). See [Validation errors](#validation-errors)
-
-__Note for TypeScript users__: `ajv` provides its own TypeScript declarations
-out of the box, so you don't need to install the deprecated `@types/ajv`
-module.
-
-
-## Using in browser
-
-You can require Ajv directly from the code you browserify - in this case Ajv will be a part of your bundle.
-
-If you need to use Ajv in several bundles you can create a separate UMD bundle using `npm run bundle` script (thanks to [siddo420](https://github.com/siddo420)).
-
-Then you need to load Ajv in the browser:
-```html
-
-```
-
-This bundle can be used with different module systems; it creates global `Ajv` if no module system is found.
-
-The browser bundle is available on [cdnjs](https://cdnjs.com/libraries/ajv).
-
-Ajv is tested with these browsers:
-
-[](https://saucelabs.com/u/epoberezkin)
-
-__Please note__: some frameworks, e.g. Dojo, may redefine global require in such way that is not compatible with CommonJS module format. In such case Ajv bundle has to be loaded before the framework and then you can use global Ajv (see issue [#234](https://github.com/epoberezkin/ajv/issues/234)).
-
-
-## Command line interface
-
-CLI is available as a separate npm package [ajv-cli](https://github.com/jessedc/ajv-cli). It supports:
-
-- compiling JSON Schemas to test their validity
-- BETA: generating standalone module exporting a validation function to be used without Ajv (using [ajv-pack](https://github.com/epoberezkin/ajv-pack))
-- migrate schemas to draft-07 (using [json-schema-migrate](https://github.com/epoberezkin/json-schema-migrate))
-- validating data file(s) against JSON Schema
-- testing expected validity of data against JSON Schema
-- referenced schemas
-- custom meta-schemas
-- files in JSON and JavaScript format
-- all Ajv options
-- reporting changes in data after validation in [JSON-patch](https://tools.ietf.org/html/rfc6902) format
-
-
-## Validation keywords
-
-Ajv supports all validation keywords from draft-07 of JSON Schema standard:
-
-- [type](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#type)
-- [for numbers](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#keywords-for-numbers) - maximum, minimum, exclusiveMaximum, exclusiveMinimum, multipleOf
-- [for strings](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#keywords-for-strings) - maxLength, minLength, pattern, format
-- [for arrays](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#keywords-for-arrays) - maxItems, minItems, uniqueItems, items, additionalItems, [contains](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#contains)
-- [for objects](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#keywords-for-objects) - maxProperties, minProperties, required, properties, patternProperties, additionalProperties, dependencies, [propertyNames](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#propertynames)
-- [for all types](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#keywords-for-all-types) - enum, [const](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#const)
-- [compound keywords](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#compound-keywords) - not, oneOf, anyOf, allOf, [if/then/else](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#ifthenelse)
-
-With [ajv-keywords](https://github.com/epoberezkin/ajv-keywords) package Ajv also supports validation keywords from [JSON Schema extension proposals](https://github.com/json-schema/json-schema/wiki/v5-Proposals) for JSON Schema standard:
-
-- [patternRequired](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#patternrequired-proposed) - like `required` but with patterns that some property should match.
-- [formatMaximum, formatMinimum, formatExclusiveMaximum, formatExclusiveMinimum](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md#formatmaximum--formatminimum-and-exclusiveformatmaximum--exclusiveformatminimum-proposed) - setting limits for date, time, etc.
-
-See [JSON Schema validation keywords](https://github.com/epoberezkin/ajv/blob/master/KEYWORDS.md) for more details.
-
-
-## Annotation keywords
-
-JSON Schema specification defines several annotation keywords that describe schema itself but do not perform any validation.
-
-- `title` and `description`: information about the data represented by that schema
-- `$comment` (NEW in draft-07): information for developers. With option `$comment` Ajv logs or passes the comment string to the user-supplied function. See [Options](#options).
-- `default`: a default value of the data instance, see [Assigning defaults](#assigning-defaults).
-- `examples` (NEW in draft-06): an array of data instances. Ajv does not check the validity of these instances against the schema.
-- `readOnly` and `writeOnly` (NEW in draft-07): marks data-instance as read-only or write-only in relation to the source of the data (database, api, etc.).
-- `contentEncoding`: [RFC 2045](https://tools.ietf.org/html/rfc2045#section-6.1 ), e.g., "base64".
-- `contentMediaType`: [RFC 2046](https://tools.ietf.org/html/rfc2046), e.g., "image/png".
-
-__Please note__: Ajv does not implement validation of the keywords `examples`, `contentEncoding` and `contentMediaType` but it reserves them. If you want to create a plugin that implements some of them, it should remove these keywords from the instance.
-
-
-## Formats
-
-Ajv implements formats defined by JSON Schema specification and several other formats. It is recommended NOT to use "format" keyword implementations with untrusted data, as they use potentially unsafe regular expressions - see [ReDoS attack](#redos-attack).
-
-__Please note__: if you need to use "format" keyword to validate untrusted data, you MUST assess their suitability and safety for your validation scenarios.
-
-The following formats are implemented for string validation with "format" keyword:
-
-- _date_: full-date according to [RFC3339](http://tools.ietf.org/html/rfc3339#section-5.6).
-- _time_: time with optional time-zone.
-- _date-time_: date-time from the same source (time-zone is mandatory). `date`, `time` and `date-time` validate ranges in `full` mode and only regexp in `fast` mode (see [options](#options)).
-- _uri_: full URI.
-- _uri-reference_: URI reference, including full and relative URIs.
-- _uri-template_: URI template according to [RFC6570](https://tools.ietf.org/html/rfc6570)
-- _url_ (deprecated): [URL record](https://url.spec.whatwg.org/#concept-url).
-- _email_: email address.
-- _hostname_: host name according to [RFC1034](http://tools.ietf.org/html/rfc1034#section-3.5).
-- _ipv4_: IP address v4.
-- _ipv6_: IP address v6.
-- _regex_: tests whether a string is a valid regular expression by passing it to RegExp constructor.
-- _uuid_: Universally Unique IDentifier according to [RFC4122](http://tools.ietf.org/html/rfc4122).
-- _json-pointer_: JSON-pointer according to [RFC6901](https://tools.ietf.org/html/rfc6901).
-- _relative-json-pointer_: relative JSON-pointer according to [this draft](http://tools.ietf.org/html/draft-luff-relative-json-pointer-00).
-
-__Please note__: JSON Schema draft-07 also defines formats `iri`, `iri-reference`, `idn-hostname` and `idn-email` for URLs, hostnames and emails with international characters. Ajv does not implement these formats. If you create Ajv plugin that implements them please make a PR to mention this plugin here.
-
-There are two modes of format validation: `fast` and `full`. This mode affects formats `date`, `time`, `date-time`, `uri`, `uri-reference`, and `email`. See [Options](#options) for details.
-
-You can add additional formats and replace any of the formats above using [addFormat](#api-addformat) method.
-
-The option `unknownFormats` allows changing the default behaviour when an unknown format is encountered. In this case Ajv can either fail schema compilation (default) or ignore it (default in versions before 5.0.0). You also can whitelist specific format(s) to be ignored. See [Options](#options) for details.
-
-You can find regular expressions used for format validation and the sources that were used in [formats.js](https://github.com/epoberezkin/ajv/blob/master/lib/compile/formats.js).
-
-
-## Combining schemas with $ref
-
-You can structure your validation logic across multiple schema files and have schemas reference each other using `$ref` keyword.
-
-Example:
-
-```javascript
-var schema = {
- "$id": "http://example.com/schemas/schema.json",
- "type": "object",
- "properties": {
- "foo": { "$ref": "defs.json#/definitions/int" },
- "bar": { "$ref": "defs.json#/definitions/str" }
- }
-};
-
-var defsSchema = {
- "$id": "http://example.com/schemas/defs.json",
- "definitions": {
- "int": { "type": "integer" },
- "str": { "type": "string" }
- }
-};
-```
-
-Now to compile your schema you can either pass all schemas to Ajv instance:
-
-```javascript
-var ajv = new Ajv({schemas: [schema, defsSchema]});
-var validate = ajv.getSchema('http://example.com/schemas/schema.json');
-```
-
-or use `addSchema` method:
-
-```javascript
-var ajv = new Ajv;
-var validate = ajv.addSchema(defsSchema)
- .compile(schema);
-```
-
-See [Options](#options) and [addSchema](#api) method.
-
-__Please note__:
-- `$ref` is resolved as the uri-reference using schema $id as the base URI (see the example).
-- References can be recursive (and mutually recursive) to implement the schemas for different data structures (such as linked lists, trees, graphs, etc.).
-- You don't have to host your schema files at the URIs that you use as schema $id. These URIs are only used to identify the schemas, and according to JSON Schema specification validators should not expect to be able to download the schemas from these URIs.
-- The actual location of the schema file in the file system is not used.
-- You can pass the identifier of the schema as the second parameter of `addSchema` method or as a property name in `schemas` option. This identifier can be used instead of (or in addition to) schema $id.
-- You cannot have the same $id (or the schema identifier) used for more than one schema - the exception will be thrown.
-- You can implement dynamic resolution of the referenced schemas using `compileAsync` method. In this way you can store schemas in any system (files, web, database, etc.) and reference them without explicitly adding to Ajv instance. See [Asynchronous schema compilation](#asynchronous-schema-compilation).
-
-
-## $data reference
-
-With `$data` option you can use values from the validated data as the values for the schema keywords. See [proposal](https://github.com/json-schema-org/json-schema-spec/issues/51) for more information about how it works.
-
-`$data` reference is supported in the keywords: const, enum, format, maximum/minimum, exclusiveMaximum / exclusiveMinimum, maxLength / minLength, maxItems / minItems, maxProperties / minProperties, formatMaximum / formatMinimum, formatExclusiveMaximum / formatExclusiveMinimum, multipleOf, pattern, required, uniqueItems.
-
-The value of "$data" should be a [JSON-pointer](https://tools.ietf.org/html/rfc6901) to the data (the root is always the top level data object, even if the $data reference is inside a referenced subschema) or a [relative JSON-pointer](http://tools.ietf.org/html/draft-luff-relative-json-pointer-00) (it is relative to the current point in data; if the $data reference is inside a referenced subschema it cannot point to the data outside of the root level for this subschema).
-
-Examples.
-
-This schema requires that the value in property `smaller` is less or equal than the value in the property larger:
-
-```javascript
-var ajv = new Ajv({$data: true});
-
-var schema = {
- "properties": {
- "smaller": {
- "type": "number",
- "maximum": { "$data": "1/larger" }
- },
- "larger": { "type": "number" }
- }
-};
-
-var validData = {
- smaller: 5,
- larger: 7
-};
-
-ajv.validate(schema, validData); // true
-```
-
-This schema requires that the properties have the same format as their field names:
-
-```javascript
-var schema = {
- "additionalProperties": {
- "type": "string",
- "format": { "$data": "0#" }
- }
-};
-
-var validData = {
- 'date-time': '1963-06-19T08:30:06.283185Z',
- email: 'joe.bloggs@example.com'
-}
-```
-
-`$data` reference is resolved safely - it won't throw even if some property is undefined. If `$data` resolves to `undefined` the validation succeeds (with the exclusion of `const` keyword). If `$data` resolves to incorrect type (e.g. not "number" for maximum keyword) the validation fails.
-
-
-## $merge and $patch keywords
-
-With the package [ajv-merge-patch](https://github.com/epoberezkin/ajv-merge-patch) you can use the keywords `$merge` and `$patch` that allow extending JSON Schemas with patches using formats [JSON Merge Patch (RFC 7396)](https://tools.ietf.org/html/rfc7396) and [JSON Patch (RFC 6902)](https://tools.ietf.org/html/rfc6902).
-
-To add keywords `$merge` and `$patch` to Ajv instance use this code:
-
-```javascript
-require('ajv-merge-patch')(ajv);
-```
-
-Examples.
-
-Using `$merge`:
-
-```json
-{
- "$merge": {
- "source": {
- "type": "object",
- "properties": { "p": { "type": "string" } },
- "additionalProperties": false
- },
- "with": {
- "properties": { "q": { "type": "number" } }
- }
- }
-}
-```
-
-Using `$patch`:
-
-```json
-{
- "$patch": {
- "source": {
- "type": "object",
- "properties": { "p": { "type": "string" } },
- "additionalProperties": false
- },
- "with": [
- { "op": "add", "path": "/properties/q", "value": { "type": "number" } }
- ]
- }
-}
-```
-
-The schemas above are equivalent to this schema:
-
-```json
-{
- "type": "object",
- "properties": {
- "p": { "type": "string" },
- "q": { "type": "number" }
- },
- "additionalProperties": false
-}
-```
-
-The properties `source` and `with` in the keywords `$merge` and `$patch` can use absolute or relative `$ref` to point to other schemas previously added to the Ajv instance or to the fragments of the current schema.
-
-See the package [ajv-merge-patch](https://github.com/epoberezkin/ajv-merge-patch) for more information.
-
-
-## Defining custom keywords
-
-The advantages of using custom keywords are:
-
-- allow creating validation scenarios that cannot be expressed using JSON Schema
-- simplify your schemas
-- help bringing a bigger part of the validation logic to your schemas
-- make your schemas more expressive, less verbose and closer to your application domain
-- implement custom data processors that modify your data (`modifying` option MUST be used in keyword definition) and/or create side effects while the data is being validated
-
-If a keyword is used only for side-effects and its validation result is pre-defined, use option `valid: true/false` in keyword definition to simplify both generated code (no error handling in case of `valid: true`) and your keyword functions (no need to return any validation result).
-
-The concerns you have to be aware of when extending JSON Schema standard with custom keywords are the portability and understanding of your schemas. You will have to support these custom keywords on other platforms and to properly document these keywords so that everybody can understand them in your schemas.
-
-You can define custom keywords with [addKeyword](#api-addkeyword) method. Keywords are defined on the `ajv` instance level - new instances will not have previously defined keywords.
-
-Ajv allows defining keywords with:
-- validation function
-- compilation function
-- macro function
-- inline compilation function that should return code (as string) that will be inlined in the currently compiled schema.
-
-Example. `range` and `exclusiveRange` keywords using compiled schema:
-
-```javascript
-ajv.addKeyword('range', {
- type: 'number',
- compile: function (sch, parentSchema) {
- var min = sch[0];
- var max = sch[1];
-
- return parentSchema.exclusiveRange === true
- ? function (data) { return data > min && data < max; }
- : function (data) { return data >= min && data <= max; }
- }
-});
-
-var schema = { "range": [2, 4], "exclusiveRange": true };
-var validate = ajv.compile(schema);
-console.log(validate(2.01)); // true
-console.log(validate(3.99)); // true
-console.log(validate(2)); // false
-console.log(validate(4)); // false
-```
-
-Several custom keywords (typeof, instanceof, range and propertyNames) are defined in [ajv-keywords](https://github.com/epoberezkin/ajv-keywords) package - they can be used for your schemas and as a starting point for your own custom keywords.
-
-See [Defining custom keywords](https://github.com/epoberezkin/ajv/blob/master/CUSTOM.md) for more details.
-
-
-## Asynchronous schema compilation
-
-During asynchronous compilation remote references are loaded using supplied function. See `compileAsync` [method](#api-compileAsync) and `loadSchema` [option](#options).
-
-Example:
-
-```javascript
-var ajv = new Ajv({ loadSchema: loadSchema });
-
-ajv.compileAsync(schema).then(function (validate) {
- var valid = validate(data);
- // ...
-});
-
-function loadSchema(uri) {
- return request.json(uri).then(function (res) {
- if (res.statusCode >= 400)
- throw new Error('Loading error: ' + res.statusCode);
- return res.body;
- });
-}
-```
-
-__Please note__: [Option](#options) `missingRefs` should NOT be set to `"ignore"` or `"fail"` for asynchronous compilation to work.
-
-
-## Asynchronous validation
-
-Example in Node.js REPL: https://tonicdev.com/esp/ajv-asynchronous-validation
-
-You can define custom formats and keywords that perform validation asynchronously by accessing database or some other service. You should add `async: true` in the keyword or format definition (see [addFormat](#api-addformat), [addKeyword](#api-addkeyword) and [Defining custom keywords](#defining-custom-keywords)).
-
-If your schema uses asynchronous formats/keywords or refers to some schema that contains them it should have `"$async": true` keyword so that Ajv can compile it correctly. If asynchronous format/keyword or reference to asynchronous schema is used in the schema without `$async` keyword Ajv will throw an exception during schema compilation.
-
-__Please note__: all asynchronous subschemas that are referenced from the current or other schemas should have `"$async": true` keyword as well, otherwise the schema compilation will fail.
-
-Validation function for an asynchronous custom format/keyword should return a promise that resolves with `true` or `false` (or rejects with `new Ajv.ValidationError(errors)` if you want to return custom errors from the keyword function).
-
-Ajv compiles asynchronous schemas to [es7 async functions](http://tc39.github.io/ecmascript-asyncawait/) that can optionally be transpiled with [nodent](https://github.com/MatAtBread/nodent). Async functions are supported in Node.js 7+ and all modern browsers. You can also supply any other transpiler as a function via `processCode` option. See [Options](#options).
-
-The compiled validation function has `$async: true` property (if the schema is asynchronous), so you can differentiate these functions if you are using both synchronous and asynchronous schemas.
-
-Validation result will be a promise that resolves with validated data or rejects with an exception `Ajv.ValidationError` that contains the array of validation errors in `errors` property.
-
-
-Example:
-
-```javascript
-var ajv = new Ajv;
-// require('ajv-async')(ajv);
-
-ajv.addKeyword('idExists', {
- async: true,
- type: 'number',
- validate: checkIdExists
-});
-
-
-function checkIdExists(schema, data) {
- return knex(schema.table)
- .select('id')
- .where('id', data)
- .then(function (rows) {
- return !!rows.length; // true if record is found
- });
-}
-
-var schema = {
- "$async": true,
- "properties": {
- "userId": {
- "type": "integer",
- "idExists": { "table": "users" }
- },
- "postId": {
- "type": "integer",
- "idExists": { "table": "posts" }
- }
- }
-};
-
-var validate = ajv.compile(schema);
-
-validate({ userId: 1, postId: 19 })
-.then(function (data) {
- console.log('Data is valid', data); // { userId: 1, postId: 19 }
-})
-.catch(function (err) {
- if (!(err instanceof Ajv.ValidationError)) throw err;
- // data is invalid
- console.log('Validation errors:', err.errors);
-});
-```
-
-### Using transpilers with asynchronous validation functions.
-
-[ajv-async](https://github.com/epoberezkin/ajv-async) uses [nodent](https://github.com/MatAtBread/nodent) to transpile async functions. To use another transpiler you should separately install it (or load its bundle in the browser).
-
-
-#### Using nodent
-
-```javascript
-var ajv = new Ajv;
-require('ajv-async')(ajv);
-// in the browser if you want to load ajv-async bundle separately you can:
-// window.ajvAsync(ajv);
-var validate = ajv.compile(schema); // transpiled es7 async function
-validate(data).then(successFunc).catch(errorFunc);
-```
-
-
-#### Using other transpilers
-
-```javascript
-var ajv = new Ajv({ processCode: transpileFunc });
-var validate = ajv.compile(schema); // transpiled es7 async function
-validate(data).then(successFunc).catch(errorFunc);
-```
-
-See [Options](#options).
-
-
-## Security considerations
-
-JSON Schema, if properly used, can replace data sanitisation. It doesn't replace other API security considerations. It also introduces additional security aspects to consider.
-
-
-##### Security contact
-
-To report a security vulnerability, please use the
-[Tidelift security contact](https://tidelift.com/security).
-Tidelift will coordinate the fix and disclosure. Please do NOT report security vulnerabilities via GitHub issues.
-
-
-##### Untrusted schemas
-
-Ajv treats JSON schemas as trusted as your application code. This security model is based on the most common use case, when the schemas are static and bundled together with the application.
-
-If your schemas are received from untrusted sources (or generated from untrusted data) there are several scenarios you need to prevent:
-- compiling schemas can cause stack overflow (if they are too deep)
-- compiling schemas can be slow (e.g. [#557](https://github.com/epoberezkin/ajv/issues/557))
-- validating certain data can be slow
-
-It is difficult to predict all the scenarios, but at the very least it may help to limit the size of untrusted schemas (e.g. limit JSON string length) and also the maximum schema object depth (that can be high for relatively small JSON strings). You also may want to mitigate slow regular expressions in `pattern` and `patternProperties` keywords.
-
-Regardless the measures you take, using untrusted schemas increases security risks.
-
-
-##### Circular references in JavaScript objects
-
-Ajv does not support schemas and validated data that have circular references in objects. See [issue #802](https://github.com/epoberezkin/ajv/issues/802).
-
-An attempt to compile such schemas or validate such data would cause stack overflow (or will not complete in case of asynchronous validation). Depending on the parser you use, untrusted data can lead to circular references.
-
-
-##### Security risks of trusted schemas
-
-Some keywords in JSON Schemas can lead to very slow validation for certain data. These keywords include (but may be not limited to):
-
-- `pattern` and `format` for large strings - in some cases using `maxLength` can help mitigate it, but certain regular expressions can lead to exponential validation time even with relatively short strings (see [ReDoS attack](#redos-attack)).
-- `patternProperties` for large property names - use `propertyNames` to mitigate, but some regular expressions can have exponential evaluation time as well.
-- `uniqueItems` for large non-scalar arrays - use `maxItems` to mitigate
-
-__Please note__: The suggestions above to prevent slow validation would only work if you do NOT use `allErrors: true` in production code (using it would continue validation after validation errors).
-
-You can validate your JSON schemas against [this meta-schema](https://github.com/epoberezkin/ajv/blob/master/lib/refs/json-schema-secure.json) to check that these recommendations are followed:
-
-```javascript
-const isSchemaSecure = ajv.compile(require('ajv/lib/refs/json-schema-secure.json'));
-
-const schema1 = {format: 'email'};
-isSchemaSecure(schema1); // false
-
-const schema2 = {format: 'email', maxLength: MAX_LENGTH};
-isSchemaSecure(schema2); // true
-```
-
-__Please note__: following all these recommendation is not a guarantee that validation of untrusted data is safe - it can still lead to some undesirable results.
-
-
-## ReDoS attack
-
-Certain regular expressions can lead to the exponential evaluation time even with relatively short strings.
-
-Please assess the regular expressions you use in the schemas on their vulnerability to this attack - see [safe-regex](https://github.com/substack/safe-regex), for example.
-
-__Please note__: some formats that Ajv implements use [regular expressions](https://github.com/epoberezkin/ajv/blob/master/lib/compile/formats.js) that can be vulnerable to ReDoS attack, so if you use Ajv to validate data from untrusted sources __it is strongly recommended__ to consider the following:
-
-- making assessment of "format" implementations in Ajv.
-- using `format: 'fast'` option that simplifies some of the regular expressions (although it does not guarantee that they are safe).
-- replacing format implementations provided by Ajv with your own implementations of "format" keyword that either uses different regular expressions or another approach to format validation. Please see [addFormat](#api-addformat) method.
-- disabling format validation by ignoring "format" keyword with option `format: false`
-
-Whatever mitigation you choose, please assume all formats provided by Ajv as potentially unsafe and make your own assessment of their suitability for your validation scenarios.
-
-
-## Filtering data
-
-With [option `removeAdditional`](#options) (added by [andyscott](https://github.com/andyscott)) you can filter data during the validation.
-
-This option modifies original data.
-
-Example:
-
-```javascript
-var ajv = new Ajv({ removeAdditional: true });
-var schema = {
- "additionalProperties": false,
- "properties": {
- "foo": { "type": "number" },
- "bar": {
- "additionalProperties": { "type": "number" },
- "properties": {
- "baz": { "type": "string" }
- }
- }
- }
-}
-
-var data = {
- "foo": 0,
- "additional1": 1, // will be removed; `additionalProperties` == false
- "bar": {
- "baz": "abc",
- "additional2": 2 // will NOT be removed; `additionalProperties` != false
- },
-}
-
-var validate = ajv.compile(schema);
-
-console.log(validate(data)); // true
-console.log(data); // { "foo": 0, "bar": { "baz": "abc", "additional2": 2 }
-```
-
-If `removeAdditional` option in the example above were `"all"` then both `additional1` and `additional2` properties would have been removed.
-
-If the option were `"failing"` then property `additional1` would have been removed regardless of its value and property `additional2` would have been removed only if its value were failing the schema in the inner `additionalProperties` (so in the example above it would have stayed because it passes the schema, but any non-number would have been removed).
-
-__Please note__: If you use `removeAdditional` option with `additionalProperties` keyword inside `anyOf`/`oneOf` keywords your validation can fail with this schema, for example:
-
-```json
-{
- "type": "object",
- "oneOf": [
- {
- "properties": {
- "foo": { "type": "string" }
- },
- "required": [ "foo" ],
- "additionalProperties": false
- },
- {
- "properties": {
- "bar": { "type": "integer" }
- },
- "required": [ "bar" ],
- "additionalProperties": false
- }
- ]
-}
-```
-
-The intention of the schema above is to allow objects with either the string property "foo" or the integer property "bar", but not with both and not with any other properties.
-
-With the option `removeAdditional: true` the validation will pass for the object `{ "foo": "abc"}` but will fail for the object `{"bar": 1}`. It happens because while the first subschema in `oneOf` is validated, the property `bar` is removed because it is an additional property according to the standard (because it is not included in `properties` keyword in the same schema).
-
-While this behaviour is unexpected (issues [#129](https://github.com/epoberezkin/ajv/issues/129), [#134](https://github.com/epoberezkin/ajv/issues/134)), it is correct. To have the expected behaviour (both objects are allowed and additional properties are removed) the schema has to be refactored in this way:
-
-```json
-{
- "type": "object",
- "properties": {
- "foo": { "type": "string" },
- "bar": { "type": "integer" }
- },
- "additionalProperties": false,
- "oneOf": [
- { "required": [ "foo" ] },
- { "required": [ "bar" ] }
- ]
-}
-```
-
-The schema above is also more efficient - it will compile into a faster function.
-
-
-## Assigning defaults
-
-With [option `useDefaults`](#options) Ajv will assign values from `default` keyword in the schemas of `properties` and `items` (when it is the array of schemas) to the missing properties and items.
-
-With the option value `"empty"` properties and items equal to `null` or `""` (empty string) will be considered missing and assigned defaults.
-
-This option modifies original data.
-
-__Please note__: the default value is inserted in the generated validation code as a literal, so the value inserted in the data will be the deep clone of the default in the schema.
-
-
-Example 1 (`default` in `properties`):
-
-```javascript
-var ajv = new Ajv({ useDefaults: true });
-var schema = {
- "type": "object",
- "properties": {
- "foo": { "type": "number" },
- "bar": { "type": "string", "default": "baz" }
- },
- "required": [ "foo", "bar" ]
-};
-
-var data = { "foo": 1 };
-
-var validate = ajv.compile(schema);
-
-console.log(validate(data)); // true
-console.log(data); // { "foo": 1, "bar": "baz" }
-```
-
-Example 2 (`default` in `items`):
-
-```javascript
-var schema = {
- "type": "array",
- "items": [
- { "type": "number" },
- { "type": "string", "default": "foo" }
- ]
-}
-
-var data = [ 1 ];
-
-var validate = ajv.compile(schema);
-
-console.log(validate(data)); // true
-console.log(data); // [ 1, "foo" ]
-```
-
-`default` keywords in other cases are ignored:
-
-- not in `properties` or `items` subschemas
-- in schemas inside `anyOf`, `oneOf` and `not` (see [#42](https://github.com/epoberezkin/ajv/issues/42))
-- in `if` subschema of `switch` keyword
-- in schemas generated by custom macro keywords
-
-The [`strictDefaults` option](#options) customizes Ajv's behavior for the defaults that Ajv ignores (`true` raises an error, and `"log"` outputs a warning).
-
-
-## Coercing data types
-
-When you are validating user inputs all your data properties are usually strings. The option `coerceTypes` allows you to have your data types coerced to the types specified in your schema `type` keywords, both to pass the validation and to use the correctly typed data afterwards.
-
-This option modifies original data.
-
-__Please note__: if you pass a scalar value to the validating function its type will be coerced and it will pass the validation, but the value of the variable you pass won't be updated because scalars are passed by value.
-
-
-Example 1:
-
-```javascript
-var ajv = new Ajv({ coerceTypes: true });
-var schema = {
- "type": "object",
- "properties": {
- "foo": { "type": "number" },
- "bar": { "type": "boolean" }
- },
- "required": [ "foo", "bar" ]
-};
-
-var data = { "foo": "1", "bar": "false" };
-
-var validate = ajv.compile(schema);
-
-console.log(validate(data)); // true
-console.log(data); // { "foo": 1, "bar": false }
-```
-
-Example 2 (array coercions):
-
-```javascript
-var ajv = new Ajv({ coerceTypes: 'array' });
-var schema = {
- "properties": {
- "foo": { "type": "array", "items": { "type": "number" } },
- "bar": { "type": "boolean" }
- }
-};
-
-var data = { "foo": "1", "bar": ["false"] };
-
-var validate = ajv.compile(schema);
-
-console.log(validate(data)); // true
-console.log(data); // { "foo": [1], "bar": false }
-```
-
-The coercion rules, as you can see from the example, are different from JavaScript both to validate user input as expected and to have the coercion reversible (to correctly validate cases where different types are defined in subschemas of "anyOf" and other compound keywords).
-
-See [Coercion rules](https://github.com/epoberezkin/ajv/blob/master/COERCION.md) for details.
-
-
-## API
-
-##### new Ajv(Object options) -> Object
-
-Create Ajv instance.
-
-
-##### .compile(Object schema) -> Function<Object data>
-
-Generate validating function and cache the compiled schema for future use.
-
-Validating function returns a boolean value. This function has properties `errors` and `schema`. Errors encountered during the last validation are assigned to `errors` property (it is assigned `null` if there was no errors). `schema` property contains the reference to the original schema.
-
-The schema passed to this method will be validated against meta-schema unless `validateSchema` option is false. If schema is invalid, an error will be thrown. See [options](#options).
-
-
-##### .compileAsync(Object schema [, Boolean meta] [, Function callback]) -> Promise
-
-Asynchronous version of `compile` method that loads missing remote schemas using asynchronous function in `options.loadSchema`. This function returns a Promise that resolves to a validation function. An optional callback passed to `compileAsync` will be called with 2 parameters: error (or null) and validating function. The returned promise will reject (and the callback will be called with an error) when:
-
-- missing schema can't be loaded (`loadSchema` returns a Promise that rejects).
-- a schema containing a missing reference is loaded, but the reference cannot be resolved.
-- schema (or some loaded/referenced schema) is invalid.
-
-The function compiles schema and loads the first missing schema (or meta-schema) until all missing schemas are loaded.
-
-You can asynchronously compile meta-schema by passing `true` as the second parameter.
-
-See example in [Asynchronous compilation](#asynchronous-schema-compilation).
-
-
-##### .validate(Object schema|String key|String ref, data) -> Boolean
-
-Validate data using passed schema (it will be compiled and cached).
-
-Instead of the schema you can use the key that was previously passed to `addSchema`, the schema id if it was present in the schema or any previously resolved reference.
-
-Validation errors will be available in the `errors` property of Ajv instance (`null` if there were no errors).
-
-__Please note__: every time this method is called the errors are overwritten so you need to copy them to another variable if you want to use them later.
-
-If the schema is asynchronous (has `$async` keyword on the top level) this method returns a Promise. See [Asynchronous validation](#asynchronous-validation).
-
-
-##### .addSchema(Array<Object>|Object schema [, String key]) -> Ajv
-
-Add schema(s) to validator instance. This method does not compile schemas (but it still validates them). Because of that dependencies can be added in any order and circular dependencies are supported. It also prevents unnecessary compilation of schemas that are containers for other schemas but not used as a whole.
-
-Array of schemas can be passed (schemas should have ids), the second parameter will be ignored.
-
-Key can be passed that can be used to reference the schema and will be used as the schema id if there is no id inside the schema. If the key is not passed, the schema id will be used as the key.
-
-
-Once the schema is added, it (and all the references inside it) can be referenced in other schemas and used to validate data.
-
-Although `addSchema` does not compile schemas, explicit compilation is not required - the schema will be compiled when it is used first time.
-
-By default the schema is validated against meta-schema before it is added, and if the schema does not pass validation the exception is thrown. This behaviour is controlled by `validateSchema` option.
-
-__Please note__: Ajv uses the [method chaining syntax](https://en.wikipedia.org/wiki/Method_chaining) for all methods with the prefix `add*` and `remove*`.
-This allows you to do nice things like the following.
-
-```javascript
-var validate = new Ajv().addSchema(schema).addFormat(name, regex).getSchema(uri);
-```
-
-##### .addMetaSchema(Array<Object>|Object schema [, String key]) -> Ajv
-
-Adds meta schema(s) that can be used to validate other schemas. That function should be used instead of `addSchema` because there may be instance options that would compile a meta schema incorrectly (at the moment it is `removeAdditional` option).
-
-There is no need to explicitly add draft-07 meta schema (http://json-schema.org/draft-07/schema) - it is added by default, unless option `meta` is set to `false`. You only need to use it if you have a changed meta-schema that you want to use to validate your schemas. See `validateSchema`.
-
-
-##### .validateSchema(Object schema) -> Boolean
-
-Validates schema. This method should be used to validate schemas rather than `validate` due to the inconsistency of `uri` format in JSON Schema standard.
-
-By default this method is called automatically when the schema is added, so you rarely need to use it directly.
-
-If schema doesn't have `$schema` property, it is validated against draft 6 meta-schema (option `meta` should not be false).
-
-If schema has `$schema` property, then the schema with this id (that should be previously added) is used to validate passed schema.
-
-Errors will be available at `ajv.errors`.
-
-
-##### .getSchema(String key) -> Function<Object data>
-
-Retrieve compiled schema previously added with `addSchema` by the key passed to `addSchema` or by its full reference (id). The returned validating function has `schema` property with the reference to the original schema.
-
-
-##### .removeSchema([Object schema|String key|String ref|RegExp pattern]) -> Ajv
-
-Remove added/cached schema. Even if schema is referenced by other schemas it can be safely removed as dependent schemas have local references.
-
-Schema can be removed using:
-- key passed to `addSchema`
-- it's full reference (id)
-- RegExp that should match schema id or key (meta-schemas won't be removed)
-- actual schema object that will be stable-stringified to remove schema from cache
-
-If no parameter is passed all schemas but meta-schemas will be removed and the cache will be cleared.
-
-
-##### .addFormat(String name, String|RegExp|Function|Object format) -> Ajv
-
-Add custom format to validate strings or numbers. It can also be used to replace pre-defined formats for Ajv instance.
-
-Strings are converted to RegExp.
-
-Function should return validation result as `true` or `false`.
-
-If object is passed it should have properties `validate`, `compare` and `async`:
-
-- _validate_: a string, RegExp or a function as described above.
-- _compare_: an optional comparison function that accepts two strings and compares them according to the format meaning. This function is used with keywords `formatMaximum`/`formatMinimum` (defined in [ajv-keywords](https://github.com/epoberezkin/ajv-keywords) package). It should return `1` if the first value is bigger than the second value, `-1` if it is smaller and `0` if it is equal.
-- _async_: an optional `true` value if `validate` is an asynchronous function; in this case it should return a promise that resolves with a value `true` or `false`.
-- _type_: an optional type of data that the format applies to. It can be `"string"` (default) or `"number"` (see https://github.com/epoberezkin/ajv/issues/291#issuecomment-259923858). If the type of data is different, the validation will pass.
-
-Custom formats can be also added via `formats` option.
-
-
-##### .addKeyword(String keyword, Object definition) -> Ajv
-
-Add custom validation keyword to Ajv instance.
-
-Keyword should be different from all standard JSON Schema keywords and different from previously defined keywords. There is no way to redefine keywords or to remove keyword definition from the instance.
-
-Keyword must start with a letter, `_` or `$`, and may continue with letters, numbers, `_`, `$`, or `-`.
-It is recommended to use an application-specific prefix for keywords to avoid current and future name collisions.
-
-Example Keywords:
-- `"xyz-example"`: valid, and uses prefix for the xyz project to avoid name collisions.
-- `"example"`: valid, but not recommended as it could collide with future versions of JSON Schema etc.
-- `"3-example"`: invalid as numbers are not allowed to be the first character in a keyword
-
-Keyword definition is an object with the following properties:
-
-- _type_: optional string or array of strings with data type(s) that the keyword applies to. If not present, the keyword will apply to all types.
-- _validate_: validating function
-- _compile_: compiling function
-- _macro_: macro function
-- _inline_: compiling function that returns code (as string)
-- _schema_: an optional `false` value used with "validate" keyword to not pass schema
-- _metaSchema_: an optional meta-schema for keyword schema
-- _dependencies_: an optional list of properties that must be present in the parent schema - it will be checked during schema compilation
-- _modifying_: `true` MUST be passed if keyword modifies data
-- _statements_: `true` can be passed in case inline keyword generates statements (as opposed to expression)
-- _valid_: pass `true`/`false` to pre-define validation result, the result returned from validation function will be ignored. This option cannot be used with macro keywords.
-- _$data_: an optional `true` value to support [$data reference](#data-reference) as the value of custom keyword. The reference will be resolved at validation time. If the keyword has meta-schema it would be extended to allow $data and it will be used to validate the resolved value. Supporting $data reference requires that keyword has validating function (as the only option or in addition to compile, macro or inline function).
-- _async_: an optional `true` value if the validation function is asynchronous (whether it is compiled or passed in _validate_ property); in this case it should return a promise that resolves with a value `true` or `false`. This option is ignored in case of "macro" and "inline" keywords.
-- _errors_: an optional boolean or string `"full"` indicating whether keyword returns errors. If this property is not set Ajv will determine if the errors were set in case of failed validation.
-
-_compile_, _macro_ and _inline_ are mutually exclusive, only one should be used at a time. _validate_ can be used separately or in addition to them to support $data reference.
-
-__Please note__: If the keyword is validating data type that is different from the type(s) in its definition, the validation function will not be called (and expanded macro will not be used), so there is no need to check for data type inside validation function or inside schema returned by macro function (unless you want to enforce a specific type and for some reason do not want to use a separate `type` keyword for that). In the same way as standard keywords work, if the keyword does not apply to the data type being validated, the validation of this keyword will succeed.
-
-See [Defining custom keywords](#defining-custom-keywords) for more details.
-
-
-##### .getKeyword(String keyword) -> Object|Boolean
-
-Returns custom keyword definition, `true` for pre-defined keywords and `false` if the keyword is unknown.
-
-
-##### .removeKeyword(String keyword) -> Ajv
-
-Removes custom or pre-defined keyword so you can redefine them.
-
-While this method can be used to extend pre-defined keywords, it can also be used to completely change their meaning - it may lead to unexpected results.
-
-__Please note__: schemas compiled before the keyword is removed will continue to work without changes. To recompile schemas use `removeSchema` method and compile them again.
-
-
-##### .errorsText([Array<Object> errors [, Object options]]) -> String
-
-Returns the text with all errors in a String.
-
-Options can have properties `separator` (string used to separate errors, ", " by default) and `dataVar` (the variable name that dataPaths are prefixed with, "data" by default).
-
-
-## Options
-
-Defaults:
-
-```javascript
-{
- // validation and reporting options:
- $data: false,
- allErrors: false,
- verbose: false,
- $comment: false, // NEW in Ajv version 6.0
- jsonPointers: false,
- uniqueItems: true,
- unicode: true,
- nullable: false,
- format: 'fast',
- formats: {},
- unknownFormats: true,
- schemas: {},
- logger: undefined,
- // referenced schema options:
- schemaId: '$id',
- missingRefs: true,
- extendRefs: 'ignore', // recommended 'fail'
- loadSchema: undefined, // function(uri: string): Promise {}
- // options to modify validated data:
- removeAdditional: false,
- useDefaults: false,
- coerceTypes: false,
- // strict mode options
- strictDefaults: false,
- strictKeywords: false,
- // asynchronous validation options:
- transpile: undefined, // requires ajv-async package
- // advanced options:
- meta: true,
- validateSchema: true,
- addUsedSchema: true,
- inlineRefs: true,
- passContext: false,
- loopRequired: Infinity,
- ownProperties: false,
- multipleOfPrecision: false,
- errorDataPath: 'object', // deprecated
- messages: true,
- sourceCode: false,
- processCode: undefined, // function (str: string): string {}
- cache: new Cache,
- serialize: undefined
-}
-```
-
-##### Validation and reporting options
-
-- _$data_: support [$data references](#data-reference). Draft 6 meta-schema that is added by default will be extended to allow them. If you want to use another meta-schema you need to use $dataMetaSchema method to add support for $data reference. See [API](#api).
-- _allErrors_: check all rules collecting all errors. Default is to return after the first error.
-- _verbose_: include the reference to the part of the schema (`schema` and `parentSchema`) and validated data in errors (false by default).
-- _$comment_ (NEW in Ajv version 6.0): log or pass the value of `$comment` keyword to a function. Option values:
- - `false` (default): ignore $comment keyword.
- - `true`: log the keyword value to console.
- - function: pass the keyword value, its schema path and root schema to the specified function
-- _jsonPointers_: set `dataPath` property of errors using [JSON Pointers](https://tools.ietf.org/html/rfc6901) instead of JavaScript property access notation.
-- _uniqueItems_: validate `uniqueItems` keyword (true by default).
-- _unicode_: calculate correct length of strings with unicode pairs (true by default). Pass `false` to use `.length` of strings that is faster, but gives "incorrect" lengths of strings with unicode pairs - each unicode pair is counted as two characters.
-- _nullable_: support keyword "nullable" from [Open API 3 specification](https://swagger.io/docs/specification/data-models/data-types/).
-- _format_: formats validation mode. Option values:
- - `"fast"` (default) - simplified and fast validation (see [Formats](#formats) for details of which formats are available and affected by this option).
- - `"full"` - more restrictive and slow validation. E.g., 25:00:00 and 2015/14/33 will be invalid time and date in 'full' mode but it will be valid in 'fast' mode.
- - `false` - ignore all format keywords.
-- _formats_: an object with custom formats. Keys and values will be passed to `addFormat` method.
-- _keywords_: an object with custom keywords. Keys and values will be passed to `addKeyword` method.
-- _unknownFormats_: handling of unknown formats. Option values:
- - `true` (default) - if an unknown format is encountered the exception is thrown during schema compilation. If `format` keyword value is [$data reference](#data-reference) and it is unknown the validation will fail.
- - `[String]` - an array of unknown format names that will be ignored. This option can be used to allow usage of third party schemas with format(s) for which you don't have definitions, but still fail if another unknown format is used. If `format` keyword value is [$data reference](#data-reference) and it is not in this array the validation will fail.
- - `"ignore"` - to log warning during schema compilation and always pass validation (the default behaviour in versions before 5.0.0). This option is not recommended, as it allows to mistype format name and it won't be validated without any error message. This behaviour is required by JSON Schema specification.
-- _schemas_: an array or object of schemas that will be added to the instance. In case you pass the array the schemas must have IDs in them. When the object is passed the method `addSchema(value, key)` will be called for each schema in this object.
-- _logger_: sets the logging method. Default is the global `console` object that should have methods `log`, `warn` and `error`. See [Error logging](#error-logging). Option values:
- - custom logger - it should have methods `log`, `warn` and `error`. If any of these methods is missing an exception will be thrown.
- - `false` - logging is disabled.
-
-
-##### Referenced schema options
-
-- _schemaId_: this option defines which keywords are used as schema URI. Option value:
- - `"$id"` (default) - only use `$id` keyword as schema URI (as specified in JSON Schema draft-06/07), ignore `id` keyword (if it is present a warning will be logged).
- - `"id"` - only use `id` keyword as schema URI (as specified in JSON Schema draft-04), ignore `$id` keyword (if it is present a warning will be logged).
- - `"auto"` - use both `$id` and `id` keywords as schema URI. If both are present (in the same schema object) and different the exception will be thrown during schema compilation.
-- _missingRefs_: handling of missing referenced schemas. Option values:
- - `true` (default) - if the reference cannot be resolved during compilation the exception is thrown. The thrown error has properties `missingRef` (with hash fragment) and `missingSchema` (without it). Both properties are resolved relative to the current base id (usually schema id, unless it was substituted).
- - `"ignore"` - to log error during compilation and always pass validation.
- - `"fail"` - to log error and successfully compile schema but fail validation if this rule is checked.
-- _extendRefs_: validation of other keywords when `$ref` is present in the schema. Option values:
- - `"ignore"` (default) - when `$ref` is used other keywords are ignored (as per [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03#section-3) standard). A warning will be logged during the schema compilation.
- - `"fail"` (recommended) - if other validation keywords are used together with `$ref` the exception will be thrown when the schema is compiled. This option is recommended to make sure schema has no keywords that are ignored, which can be confusing.
- - `true` - validate all keywords in the schemas with `$ref` (the default behaviour in versions before 5.0.0).
-- _loadSchema_: asynchronous function that will be used to load remote schemas when `compileAsync` [method](#api-compileAsync) is used and some reference is missing (option `missingRefs` should NOT be 'fail' or 'ignore'). This function should accept remote schema uri as a parameter and return a Promise that resolves to a schema. See example in [Asynchronous compilation](#asynchronous-schema-compilation).
-
-
-##### Options to modify validated data
-
-- _removeAdditional_: remove additional properties - see example in [Filtering data](#filtering-data). This option is not used if schema is added with `addMetaSchema` method. Option values:
- - `false` (default) - not to remove additional properties
- - `"all"` - all additional properties are removed, regardless of `additionalProperties` keyword in schema (and no validation is made for them).
- - `true` - only additional properties with `additionalProperties` keyword equal to `false` are removed.
- - `"failing"` - additional properties that fail schema validation will be removed (where `additionalProperties` keyword is `false` or schema).
-- _useDefaults_: replace missing or undefined properties and items with the values from corresponding `default` keywords. Default behaviour is to ignore `default` keywords. This option is not used if schema is added with `addMetaSchema` method. See examples in [Assigning defaults](#assigning-defaults). Option values:
- - `false` (default) - do not use defaults
- - `true` - insert defaults by value (object literal is used).
- - `"empty"` - in addition to missing or undefined, use defaults for properties and items that are equal to `null` or `""` (an empty string).
- - `"shared"` (deprecated) - insert defaults by reference. If the default is an object, it will be shared by all instances of validated data. If you modify the inserted default in the validated data, it will be modified in the schema as well.
-- _coerceTypes_: change data type of data to match `type` keyword. See the example in [Coercing data types](#coercing-data-types) and [coercion rules](https://github.com/epoberezkin/ajv/blob/master/COERCION.md). Option values:
- - `false` (default) - no type coercion.
- - `true` - coerce scalar data types.
- - `"array"` - in addition to coercions between scalar types, coerce scalar data to an array with one element and vice versa (as required by the schema).
-
-
-##### Strict mode options
-
-- _strictDefaults_: report ignored `default` keywords in schemas. Option values:
- - `false` (default) - ignored defaults are not reported
- - `true` - if an ignored default is present, throw an error
- - `"log"` - if an ignored default is present, log warning
-- _strictKeywords_: report unknown keywords in schemas. Option values:
- - `false` (default) - unknown keywords are not reported
- - `true` - if an unknown keyword is present, throw an error
- - `"log"` - if an unknown keyword is present, log warning
-
-
-##### Asynchronous validation options
-
-- _transpile_: Requires [ajv-async](https://github.com/epoberezkin/ajv-async) package. It determines whether Ajv transpiles compiled asynchronous validation function. Option values:
- - `undefined` (default) - transpile with [nodent](https://github.com/MatAtBread/nodent) if async functions are not supported.
- - `true` - always transpile with nodent.
- - `false` - do not transpile; if async functions are not supported an exception will be thrown.
-
-
-##### Advanced options
-
-- _meta_: add [meta-schema](http://json-schema.org/documentation.html) so it can be used by other schemas (true by default). If an object is passed, it will be used as the default meta-schema for schemas that have no `$schema` keyword. This default meta-schema MUST have `$schema` keyword.
-- _validateSchema_: validate added/compiled schemas against meta-schema (true by default). `$schema` property in the schema can be http://json-schema.org/draft-07/schema or absent (draft-07 meta-schema will be used) or can be a reference to the schema previously added with `addMetaSchema` method. Option values:
- - `true` (default) - if the validation fails, throw the exception.
- - `"log"` - if the validation fails, log error.
- - `false` - skip schema validation.
-- _addUsedSchema_: by default methods `compile` and `validate` add schemas to the instance if they have `$id` (or `id`) property that doesn't start with "#". If `$id` is present and it is not unique the exception will be thrown. Set this option to `false` to skip adding schemas to the instance and the `$id` uniqueness check when these methods are used. This option does not affect `addSchema` method.
-- _inlineRefs_: Affects compilation of referenced schemas. Option values:
- - `true` (default) - the referenced schemas that don't have refs in them are inlined, regardless of their size - that substantially improves performance at the cost of the bigger size of compiled schema functions.
- - `false` - to not inline referenced schemas (they will be compiled as separate functions).
- - integer number - to limit the maximum number of keywords of the schema that will be inlined.
-- _passContext_: pass validation context to custom keyword functions. If this option is `true` and you pass some context to the compiled validation function with `validate.call(context, data)`, the `context` will be available as `this` in your custom keywords. By default `this` is Ajv instance.
-- _loopRequired_: by default `required` keyword is compiled into a single expression (or a sequence of statements in `allErrors` mode). In case of a very large number of properties in this keyword it may result in a very big validation function. Pass integer to set the number of properties above which `required` keyword will be validated in a loop - smaller validation function size but also worse performance.
-- _ownProperties_: by default Ajv iterates over all enumerable object properties; when this option is `true` only own enumerable object properties (i.e. found directly on the object rather than on its prototype) are iterated. Contributed by @mbroadst.
-- _multipleOfPrecision_: by default `multipleOf` keyword is validated by comparing the result of division with parseInt() of that result. It works for dividers that are bigger than 1. For small dividers such as 0.01 the result of the division is usually not integer (even when it should be integer, see issue [#84](https://github.com/epoberezkin/ajv/issues/84)). If you need to use fractional dividers set this option to some positive integer N to have `multipleOf` validated using this formula: `Math.abs(Math.round(division) - division) < 1e-N` (it is slower but allows for float arithmetics deviations).
-- _errorDataPath_ (deprecated): set `dataPath` to point to 'object' (default) or to 'property' when validating keywords `required`, `additionalProperties` and `dependencies`.
-- _messages_: Include human-readable messages in errors. `true` by default. `false` can be passed when custom messages are used (e.g. with [ajv-i18n](https://github.com/epoberezkin/ajv-i18n)).
-- _sourceCode_: add `sourceCode` property to validating function (for debugging; this code can be different from the result of toString call).
-- _processCode_: an optional function to process generated code before it is passed to Function constructor. It can be used to either beautify (the validating function is generated without line-breaks) or to transpile code. Starting from version 5.0.0 this option replaced options:
- - `beautify` that formatted the generated function using [js-beautify](https://github.com/beautify-web/js-beautify). If you want to beautify the generated code pass `require('js-beautify').js_beautify`.
- - `transpile` that transpiled asynchronous validation function. You can still use `transpile` option with [ajv-async](https://github.com/epoberezkin/ajv-async) package. See [Asynchronous validation](#asynchronous-validation) for more information.
-- _cache_: an optional instance of cache to store compiled schemas using stable-stringified schema as a key. For example, set-associative cache [sacjs](https://github.com/epoberezkin/sacjs) can be used. If not passed then a simple hash is used which is good enough for the common use case (a limited number of statically defined schemas). Cache should have methods `put(key, value)`, `get(key)`, `del(key)` and `clear()`.
-- _serialize_: an optional function to serialize schema to cache key. Pass `false` to use schema itself as a key (e.g., if WeakMap used as a cache). By default [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used.
-
-
-## Validation errors
-
-In case of validation failure, Ajv assigns the array of errors to `errors` property of validation function (or to `errors` property of Ajv instance when `validate` or `validateSchema` methods were called). In case of [asynchronous validation](#asynchronous-validation), the returned promise is rejected with exception `Ajv.ValidationError` that has `errors` property.
-
-
-### Error objects
-
-Each error is an object with the following properties:
-
-- _keyword_: validation keyword.
-- _dataPath_: the path to the part of the data that was validated. By default `dataPath` uses JavaScript property access notation (e.g., `".prop[1].subProp"`). When the option `jsonPointers` is true (see [Options](#options)) `dataPath` will be set using JSON pointer standard (e.g., `"/prop/1/subProp"`).
-- _schemaPath_: the path (JSON-pointer as a URI fragment) to the schema of the keyword that failed validation.
-- _params_: the object with the additional information about error that can be used to create custom error messages (e.g., using [ajv-i18n](https://github.com/epoberezkin/ajv-i18n) package). See below for parameters set by all keywords.
-- _message_: the standard error message (can be excluded with option `messages` set to false).
-- _schema_: the schema of the keyword (added with `verbose` option).
-- _parentSchema_: the schema containing the keyword (added with `verbose` option)
-- _data_: the data validated by the keyword (added with `verbose` option).
-
-__Please note__: `propertyNames` keyword schema validation errors have an additional property `propertyName`, `dataPath` points to the object. After schema validation for each property name, if it is invalid an additional error is added with the property `keyword` equal to `"propertyNames"`.
-
-
-### Error parameters
-
-Properties of `params` object in errors depend on the keyword that failed validation.
-
-- `maxItems`, `minItems`, `maxLength`, `minLength`, `maxProperties`, `minProperties` - property `limit` (number, the schema of the keyword).
-- `additionalItems` - property `limit` (the maximum number of allowed items in case when `items` keyword is an array of schemas and `additionalItems` is false).
-- `additionalProperties` - property `additionalProperty` (the property not used in `properties` and `patternProperties` keywords).
-- `dependencies` - properties:
- - `property` (dependent property),
- - `missingProperty` (required missing dependency - only the first one is reported currently)
- - `deps` (required dependencies, comma separated list as a string),
- - `depsCount` (the number of required dependencies).
-- `format` - property `format` (the schema of the keyword).
-- `maximum`, `minimum` - properties:
- - `limit` (number, the schema of the keyword),
- - `exclusive` (boolean, the schema of `exclusiveMaximum` or `exclusiveMinimum`),
- - `comparison` (string, comparison operation to compare the data to the limit, with the data on the left and the limit on the right; can be "<", "<=", ">", ">=")
-- `multipleOf` - property `multipleOf` (the schema of the keyword)
-- `pattern` - property `pattern` (the schema of the keyword)
-- `required` - property `missingProperty` (required property that is missing).
-- `propertyNames` - property `propertyName` (an invalid property name).
-- `patternRequired` (in ajv-keywords) - property `missingPattern` (required pattern that did not match any property).
-- `type` - property `type` (required type(s), a string, can be a comma-separated list)
-- `uniqueItems` - properties `i` and `j` (indices of duplicate items).
-- `const` - property `allowedValue` pointing to the value (the schema of the keyword).
-- `enum` - property `allowedValues` pointing to the array of values (the schema of the keyword).
-- `$ref` - property `ref` with the referenced schema URI.
-- `oneOf` - property `passingSchemas` (array of indices of passing schemas, null if no schema passes).
-- custom keywords (in case keyword definition doesn't create errors) - property `keyword` (the keyword name).
-
-
-### Error logging
-
-Using the `logger` option when initiallizing Ajv will allow you to define custom logging. Here you can build upon the exisiting logging. The use of other logging packages is supported as long as the package or its associated wrapper exposes the required methods. If any of the required methods are missing an exception will be thrown.
-- **Required Methods**: `log`, `warn`, `error`
-
-```javascript
-var otherLogger = new OtherLogger();
-var ajv = new Ajv({
- logger: {
- log: console.log.bind(console),
- warn: function warn() {
- otherLogger.logWarn.apply(otherLogger, arguments);
- },
- error: function error() {
- otherLogger.logError.apply(otherLogger, arguments);
- console.error.apply(console, arguments);
- }
- }
-});
-```
-
-
-## Plugins
-
-Ajv can be extended with plugins that add custom keywords, formats or functions to process generated code. When such plugin is published as npm package it is recommended that it follows these conventions:
-
-- it exports a function
-- this function accepts ajv instance as the first parameter and returns the same instance to allow chaining
-- this function can accept an optional configuration as the second parameter
-
-If you have published a useful plugin please submit a PR to add it to the next section.
-
-
-## Related packages
-
-- [ajv-async](https://github.com/epoberezkin/ajv-async) - plugin to configure async validation mode
-- [ajv-bsontype](https://github.com/BoLaMN/ajv-bsontype) - plugin to validate mongodb's bsonType formats
-- [ajv-cli](https://github.com/jessedc/ajv-cli) - command line interface
-- [ajv-errors](https://github.com/epoberezkin/ajv-errors) - plugin for custom error messages
-- [ajv-i18n](https://github.com/epoberezkin/ajv-i18n) - internationalised error messages
-- [ajv-istanbul](https://github.com/epoberezkin/ajv-istanbul) - plugin to instrument generated validation code to measure test coverage of your schemas
-- [ajv-keywords](https://github.com/epoberezkin/ajv-keywords) - plugin with custom validation keywords (select, typeof, etc.)
-- [ajv-merge-patch](https://github.com/epoberezkin/ajv-merge-patch) - plugin with keywords $merge and $patch
-- [ajv-pack](https://github.com/epoberezkin/ajv-pack) - produces a compact module exporting validation functions
-- [ajv-formats-draft2019](https://github.com/luzlab/ajv-formats-draft2019) - format validators for draft2019 that aren't already included in ajv (ie. `idn-hostname`, `idn-email`, `iri`, `iri-reference` and `duration`).
-
-## Some packages using Ajv
-
-- [webpack](https://github.com/webpack/webpack) - a module bundler. Its main purpose is to bundle JavaScript files for usage in a browser
-- [jsonscript-js](https://github.com/JSONScript/jsonscript-js) - the interpreter for [JSONScript](http://www.jsonscript.org) - scripted processing of existing endpoints and services
-- [osprey-method-handler](https://github.com/mulesoft-labs/osprey-method-handler) - Express middleware for validating requests and responses based on a RAML method object, used in [osprey](https://github.com/mulesoft/osprey) - validating API proxy generated from a RAML definition
-- [har-validator](https://github.com/ahmadnassri/har-validator) - HTTP Archive (HAR) validator
-- [jsoneditor](https://github.com/josdejong/jsoneditor) - a web-based tool to view, edit, format, and validate JSON http://jsoneditoronline.org
-- [JSON Schema Lint](https://github.com/nickcmaynard/jsonschemalint) - a web tool to validate JSON/YAML document against a single JSON Schema http://jsonschemalint.com
-- [objection](https://github.com/vincit/objection.js) - SQL-friendly ORM for Node.js
-- [table](https://github.com/gajus/table) - formats data into a string table
-- [ripple-lib](https://github.com/ripple/ripple-lib) - a JavaScript API for interacting with [Ripple](https://ripple.com) in Node.js and the browser
-- [restbase](https://github.com/wikimedia/restbase) - distributed storage with REST API & dispatcher for backend services built to provide a low-latency & high-throughput API for Wikipedia / Wikimedia content
-- [hippie-swagger](https://github.com/CacheControl/hippie-swagger) - [Hippie](https://github.com/vesln/hippie) wrapper that provides end to end API testing with swagger validation
-- [react-form-controlled](https://github.com/seeden/react-form-controlled) - React controlled form components with validation
-- [rabbitmq-schema](https://github.com/tjmehta/rabbitmq-schema) - a schema definition module for RabbitMQ graphs and messages
-- [@query/schema](https://www.npmjs.com/package/@query/schema) - stream filtering with a URI-safe query syntax parsing to JSON Schema
-- [chai-ajv-json-schema](https://github.com/peon374/chai-ajv-json-schema) - chai plugin to us JSON Schema with expect in mocha tests
-- [grunt-jsonschema-ajv](https://github.com/SignpostMarv/grunt-jsonschema-ajv) - Grunt plugin for validating files against JSON Schema
-- [extract-text-webpack-plugin](https://github.com/webpack-contrib/extract-text-webpack-plugin) - extract text from bundle into a file
-- [electron-builder](https://github.com/electron-userland/electron-builder) - a solution to package and build a ready for distribution Electron app
-- [addons-linter](https://github.com/mozilla/addons-linter) - Mozilla Add-ons Linter
-- [gh-pages-generator](https://github.com/epoberezkin/gh-pages-generator) - multi-page site generator converting markdown files to GitHub pages
-- [ESLint](https://github.com/eslint/eslint) - the pluggable linting utility for JavaScript and JSX
-
-
-## Tests
-
-```
-npm install
-git submodule update --init
-npm test
-```
-
-## Contributing
-
-All validation functions are generated using doT templates in [dot](https://github.com/epoberezkin/ajv/tree/master/lib/dot) folder. Templates are precompiled so doT is not a run-time dependency.
-
-`npm run build` - compiles templates to [dotjs](https://github.com/epoberezkin/ajv/tree/master/lib/dotjs) folder.
-
-`npm run watch` - automatically compiles templates when files in dot folder change
-
-Please see [Contributing guidelines](https://github.com/epoberezkin/ajv/blob/master/CONTRIBUTING.md)
-
-
-## Changes history
-
-See https://github.com/epoberezkin/ajv/releases
-
-__Please note__: [Changes in version 6.0.0](https://github.com/epoberezkin/ajv/releases/tag/v6.0.0).
-
-[Version 5.0.0](https://github.com/epoberezkin/ajv/releases/tag/5.0.0).
-
-[Version 4.0.0](https://github.com/epoberezkin/ajv/releases/tag/4.0.0).
-
-[Version 3.0.0](https://github.com/epoberezkin/ajv/releases/tag/3.0.0).
-
-[Version 2.0.0](https://github.com/epoberezkin/ajv/releases/tag/2.0.0).
-
-
-## Open-source software support
-
-Ajv is a part of [Tidelift subscription](https://tidelift.com/subscription/pkg/npm-ajv?utm_source=npm-ajv&utm_medium=referral&utm_campaign=readme) - it provides a centralised support to open-source software users, in addition to the support provided by software maintainers.
-
-
-## License
-
-[MIT](https://github.com/epoberezkin/ajv/blob/master/LICENSE)
diff --git a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/dist/ajv.bundle.js b/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/dist/ajv.bundle.js
deleted file mode 100644
index dd956301f..000000000
--- a/package/lean/luci-app-jd-dailybonus/root/usr/lib/node/request/node_modules/ajv/dist/ajv.bundle.js
+++ /dev/null
@@ -1,7165 +0,0 @@
-(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Ajv = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i;
-// For the source: https://gist.github.com/dperini/729294
-// For test cases: https://mathiasbynens.be/demo/url-regex
-// @todo Delete current URL in favour of the commented out URL rule when this issue is fixed https://github.com/eslint/eslint/issues/7983.
-// var URL = /^(?:(?:https?|ftp):\/\/)(?:\S+(?::\S*)?@)?(?:(?!10(?:\.\d{1,3}){3})(?!127(?:\.\d{1,3}){3})(?!169\.254(?:\.\d{1,3}){2})(?!192\.168(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u{00a1}-\u{ffff}0-9]+-?)*[a-z\u{00a1}-\u{ffff}0-9]+)(?:\.(?:[a-z\u{00a1}-\u{ffff}0-9]+-?)*[a-z\u{00a1}-\u{ffff}0-9]+)*(?:\.(?:[a-z\u{00a1}-\u{ffff}]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/iu;
-var URL = /^(?:(?:http[s\u017F]?|ftp):\/\/)(?:(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+(?::(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?@)?(?:(?!10(?:\.[0-9]{1,3}){3})(?!127(?:\.[0-9]{1,3}){3})(?!169\.254(?:\.[0-9]{1,3}){2})(?!192\.168(?:\.[0-9]{1,3}){2})(?!172\.(?:1[6-9]|2[0-9]|3[01])(?:\.[0-9]{1,3}){2})(?:[1-9][0-9]?|1[0-9][0-9]|2[01][0-9]|22[0-3])(?:\.(?:1?[0-9]{1,2}|2[0-4][0-9]|25[0-5])){2}(?:\.(?:[1-9][0-9]?|1[0-9][0-9]|2[0-4][0-9]|25[0-4]))|(?:(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)(?:\.(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)*(?:\.(?:(?:[KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]){2,})))(?::[0-9]{2,5})?(?:\/(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?$/i;
-var UUID = /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i;
-var JSON_POINTER = /^(?:\/(?:[^~/]|~0|~1)*)*$/;
-var JSON_POINTER_URI_FRAGMENT = /^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i;
-var RELATIVE_JSON_POINTER = /^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/;
-
-
-module.exports = formats;
-
-function formats(mode) {
- mode = mode == 'full' ? 'full' : 'fast';
- return util.copy(formats[mode]);
-}
-
-
-formats.fast = {
- // date: http://tools.ietf.org/html/rfc3339#section-5.6
- date: /^\d\d\d\d-[0-1]\d-[0-3]\d$/,
- // date-time: http://tools.ietf.org/html/rfc3339#section-5.6
- time: /^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i,
- 'date-time': /^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i,
- // uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js
- uri: /^(?:[a-z][a-z0-9+-.]*:)(?:\/?\/)?[^\s]*$/i,
- 'uri-reference': /^(?:(?:[a-z][a-z0-9+-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i,
- 'uri-template': URITEMPLATE,
- url: URL,
- // email (sources from jsen validator):
- // http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363
- // http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'willful violation')
- email: /^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,
- hostname: HOSTNAME,
- // optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html
- ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,
- // optimized http://stackoverflow.com/questions/53497/regular-expression-that-matches-valid-ipv6-addresses
- ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,
- regex: regex,
- // uuid: http://tools.ietf.org/html/rfc4122
- uuid: UUID,
- // JSON-pointer: https://tools.ietf.org/html/rfc6901
- // uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A
- 'json-pointer': JSON_POINTER,
- 'json-pointer-uri-fragment': JSON_POINTER_URI_FRAGMENT,
- // relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00
- 'relative-json-pointer': RELATIVE_JSON_POINTER
-};
-
-
-formats.full = {
- date: date,
- time: time,
- 'date-time': date_time,
- uri: uri,
- 'uri-reference': URIREF,
- 'uri-template': URITEMPLATE,
- url: URL,
- email: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,
- hostname: HOSTNAME,
- ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,
- ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,
- regex: regex,
- uuid: UUID,
- 'json-pointer': JSON_POINTER,
- 'json-pointer-uri-fragment': JSON_POINTER_URI_FRAGMENT,
- 'relative-json-pointer': RELATIVE_JSON_POINTER
-};
-
-
-function isLeapYear(year) {
- // https://tools.ietf.org/html/rfc3339#appendix-C
- return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0);
-}
-
-
-function date(str) {
- // full-date from http://tools.ietf.org/html/rfc3339#section-5.6
- var matches = str.match(DATE);
- if (!matches) return false;
-
- var year = +matches[1];
- var month = +matches[2];
- var day = +matches[3];
-
- return month >= 1 && month <= 12 && day >= 1 &&
- day <= (month == 2 && isLeapYear(year) ? 29 : DAYS[month]);
-}
-
-
-function time(str, full) {
- var matches = str.match(TIME);
- if (!matches) return false;
-
- var hour = matches[1];
- var minute = matches[2];
- var second = matches[3];
- var timeZone = matches[5];
- return ((hour <= 23 && minute <= 59 && second <= 59) ||
- (hour == 23 && minute == 59 && second == 60)) &&
- (!full || timeZone);
-}
-
-
-var DATE_TIME_SEPARATOR = /t|\s/i;
-function date_time(str) {
- // http://tools.ietf.org/html/rfc3339#section-5.6
- var dateTime = str.split(DATE_TIME_SEPARATOR);
- return dateTime.length == 2 && date(dateTime[0]) && time(dateTime[1], true);
-}
-
-
-var NOT_URI_FRAGMENT = /\/|:/;
-function uri(str) {
- // http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "."
- return NOT_URI_FRAGMENT.test(str) && URI.test(str);
-}
-
-
-var Z_ANCHOR = /[^\\]\\Z/;
-function regex(str) {
- if (Z_ANCHOR.test(str)) return false;
- try {
- new RegExp(str);
- return true;
- } catch(e) {
- return false;
- }
-}
-
-},{"./util":10}],5:[function(require,module,exports){
-'use strict';
-
-var resolve = require('./resolve')
- , util = require('./util')
- , errorClasses = require('./error_classes')
- , stableStringify = require('fast-json-stable-stringify');
-
-var validateGenerator = require('../dotjs/validate');
-
-/**
- * Functions below are used inside compiled validations function
- */
-
-var ucs2length = util.ucs2length;
-var equal = require('fast-deep-equal');
-
-// this error is thrown by async schemas to return validation errors via exception
-var ValidationError = errorClasses.Validation;
-
-module.exports = compile;
-
-
-/**
- * Compiles schema to validation function
- * @this Ajv
- * @param {Object} schema schema object
- * @param {Object} root object with information about the root schema for this schema
- * @param {Object} localRefs the hash of local references inside the schema (created by resolve.id), used for inline resolution
- * @param {String} baseId base ID for IDs in the schema
- * @return {Function} validation function
- */
-function compile(schema, root, localRefs, baseId) {
- /* jshint validthis: true, evil: true */
- /* eslint no-shadow: 0 */
- var self = this
- , opts = this._opts
- , refVal = [ undefined ]
- , refs = {}
- , patterns = []
- , patternsHash = {}
- , defaults = []
- , defaultsHash = {}
- , customRules = [];
-
- root = root || { schema: schema, refVal: refVal, refs: refs };
-
- var c = checkCompiling.call(this, schema, root, baseId);
- var compilation = this._compilations[c.index];
- if (c.compiling) return (compilation.callValidate = callValidate);
-
- var formats = this._formats;
- var RULES = this.RULES;
-
- try {
- var v = localCompile(schema, root, localRefs, baseId);
- compilation.validate = v;
- var cv = compilation.callValidate;
- if (cv) {
- cv.schema = v.schema;
- cv.errors = null;
- cv.refs = v.refs;
- cv.refVal = v.refVal;
- cv.root = v.root;
- cv.$async = v.$async;
- if (opts.sourceCode) cv.source = v.source;
- }
- return v;
- } finally {
- endCompiling.call(this, schema, root, baseId);
- }
-
- /* @this {*} - custom context, see passContext option */
- function callValidate() {
- /* jshint validthis: true */
- var validate = compilation.validate;
- var result = validate.apply(this, arguments);
- callValidate.errors = validate.errors;
- return result;
- }
-
- function localCompile(_schema, _root, localRefs, baseId) {
- var isRoot = !_root || (_root && _root.schema == _schema);
- if (_root.schema != root.schema)
- return compile.call(self, _schema, _root, localRefs, baseId);
-
- var $async = _schema.$async === true;
-
- var sourceCode = validateGenerator({
- isTop: true,
- schema: _schema,
- isRoot: isRoot,
- baseId: baseId,
- root: _root,
- schemaPath: '',
- errSchemaPath: '#',
- errorPath: '""',
- MissingRefError: errorClasses.MissingRef,
- RULES: RULES,
- validate: validateGenerator,
- util: util,
- resolve: resolve,
- resolveRef: resolveRef,
- usePattern: usePattern,
- useDefault: useDefault,
- useCustomRule: useCustomRule,
- opts: opts,
- formats: formats,
- logger: self.logger,
- self: self
- });
-
- sourceCode = vars(refVal, refValCode) + vars(patterns, patternCode)
- + vars(defaults, defaultCode) + vars(customRules, customRuleCode)
- + sourceCode;
-
- if (opts.processCode) sourceCode = opts.processCode(sourceCode);
- // console.log('\n\n\n *** \n', JSON.stringify(sourceCode));
- var validate;
- try {
- var makeValidate = new Function(
- 'self',
- 'RULES',
- 'formats',
- 'root',
- 'refVal',
- 'defaults',
- 'customRules',
- 'equal',
- 'ucs2length',
- 'ValidationError',
- sourceCode
- );
-
- validate = makeValidate(
- self,
- RULES,
- formats,
- root,
- refVal,
- defaults,
- customRules,
- equal,
- ucs2length,
- ValidationError
- );
-
- refVal[0] = validate;
- } catch(e) {
- self.logger.error('Error compiling schema, function code:', sourceCode);
- throw e;
- }
-
- validate.schema = _schema;
- validate.errors = null;
- validate.refs = refs;
- validate.refVal = refVal;
- validate.root = isRoot ? validate : _root;
- if ($async) validate.$async = true;
- if (opts.sourceCode === true) {
- validate.source = {
- code: sourceCode,
- patterns: patterns,
- defaults: defaults
- };
- }
-
- return validate;
- }
-
- function resolveRef(baseId, ref, isRoot) {
- ref = resolve.url(baseId, ref);
- var refIndex = refs[ref];
- var _refVal, refCode;
- if (refIndex !== undefined) {
- _refVal = refVal[refIndex];
- refCode = 'refVal[' + refIndex + ']';
- return resolvedRef(_refVal, refCode);
- }
- if (!isRoot && root.refs) {
- var rootRefId = root.refs[ref];
- if (rootRefId !== undefined) {
- _refVal = root.refVal[rootRefId];
- refCode = addLocalRef(ref, _refVal);
- return resolvedRef(_refVal, refCode);
- }
- }
-
- refCode = addLocalRef(ref);
- var v = resolve.call(self, localCompile, root, ref);
- if (v === undefined) {
- var localSchema = localRefs && localRefs[ref];
- if (localSchema) {
- v = resolve.inlineRef(localSchema, opts.inlineRefs)
- ? localSchema
- : compile.call(self, localSchema, root, localRefs, baseId);
- }
- }
-
- if (v === undefined) {
- removeLocalRef(ref);
- } else {
- replaceLocalRef(ref, v);
- return resolvedRef(v, refCode);
- }
- }
-
- function addLocalRef(ref, v) {
- var refId = refVal.length;
- refVal[refId] = v;
- refs[ref] = refId;
- return 'refVal' + refId;
- }
-
- function removeLocalRef(ref) {
- delete refs[ref];
- }
-
- function replaceLocalRef(ref, v) {
- var refId = refs[ref];
- refVal[refId] = v;
- }
-
- function resolvedRef(refVal, code) {
- return typeof refVal == 'object' || typeof refVal == 'boolean'
- ? { code: code, schema: refVal, inline: true }
- : { code: code, $async: refVal && !!refVal.$async };
- }
-
- function usePattern(regexStr) {
- var index = patternsHash[regexStr];
- if (index === undefined) {
- index = patternsHash[regexStr] = patterns.length;
- patterns[index] = regexStr;
- }
- return 'pattern' + index;
- }
-
- function useDefault(value) {
- switch (typeof value) {
- case 'boolean':
- case 'number':
- return '' + value;
- case 'string':
- return util.toQuotedString(value);
- case 'object':
- if (value === null) return 'null';
- var valueStr = stableStringify(value);
- var index = defaultsHash[valueStr];
- if (index === undefined) {
- index = defaultsHash[valueStr] = defaults.length;
- defaults[index] = value;
- }
- return 'default' + index;
- }
- }
-
- function useCustomRule(rule, schema, parentSchema, it) {
- if (self._opts.validateSchema !== false) {
- var deps = rule.definition.dependencies;
- if (deps && !deps.every(function(keyword) {
- return Object.prototype.hasOwnProperty.call(parentSchema, keyword);
- }))
- throw new Error('parent schema must have all required keywords: ' + deps.join(','));
-
- var validateSchema = rule.definition.validateSchema;
- if (validateSchema) {
- var valid = validateSchema(schema);
- if (!valid) {
- var message = 'keyword schema is invalid: ' + self.errorsText(validateSchema.errors);
- if (self._opts.validateSchema == 'log') self.logger.error(message);
- else throw new Error(message);
- }
- }
- }
-
- var compile = rule.definition.compile
- , inline = rule.definition.inline
- , macro = rule.definition.macro;
-
- var validate;
- if (compile) {
- validate = compile.call(self, schema, parentSchema, it);
- } else if (macro) {
- validate = macro.call(self, schema, parentSchema, it);
- if (opts.validateSchema !== false) self.validateSchema(validate, true);
- } else if (inline) {
- validate = inline.call(self, it, rule.keyword, schema, parentSchema);
- } else {
- validate = rule.definition.validate;
- if (!validate) return;
- }
-
- if (validate === undefined)
- throw new Error('custom keyword "' + rule.keyword + '"failed to compile');
-
- var index = customRules.length;
- customRules[index] = validate;
-
- return {
- code: 'customRule' + index,
- validate: validate
- };
- }
-}
-
-
-/**
- * Checks if the schema is currently compiled
- * @this Ajv
- * @param {Object} schema schema to compile
- * @param {Object} root root object
- * @param {String} baseId base schema ID
- * @return {Object} object with properties "index" (compilation index) and "compiling" (boolean)
- */
-function checkCompiling(schema, root, baseId) {
- /* jshint validthis: true */
- var index = compIndex.call(this, schema, root, baseId);
- if (index >= 0) return { index: index, compiling: true };
- index = this._compilations.length;
- this._compilations[index] = {
- schema: schema,
- root: root,
- baseId: baseId
- };
- return { index: index, compiling: false };
-}
-
-
-/**
- * Removes the schema from the currently compiled list
- * @this Ajv
- * @param {Object} schema schema to compile
- * @param {Object} root root object
- * @param {String} baseId base schema ID
- */
-function endCompiling(schema, root, baseId) {
- /* jshint validthis: true */
- var i = compIndex.call(this, schema, root, baseId);
- if (i >= 0) this._compilations.splice(i, 1);
-}
-
-
-/**
- * Index of schema compilation in the currently compiled list
- * @this Ajv
- * @param {Object} schema schema to compile
- * @param {Object} root root object
- * @param {String} baseId base schema ID
- * @return {Integer} compilation index
- */
-function compIndex(schema, root, baseId) {
- /* jshint validthis: true */
- for (var i=0; i= 0xD800 && value <= 0xDBFF && pos < len) {
- // high surrogate, and there is a next character
- value = str.charCodeAt(pos);
- if ((value & 0xFC00) == 0xDC00) pos++; // low surrogate
- }
- }
- return length;
-};
-
-},{}],10:[function(require,module,exports){
-'use strict';
-
-
-module.exports = {
- copy: copy,
- checkDataType: checkDataType,
- checkDataTypes: checkDataTypes,
- coerceToTypes: coerceToTypes,
- toHash: toHash,
- getProperty: getProperty,
- escapeQuotes: escapeQuotes,
- equal: require('fast-deep-equal'),
- ucs2length: require('./ucs2length'),
- varOccurences: varOccurences,
- varReplace: varReplace,
- cleanUpCode: cleanUpCode,
- finalCleanUpCode: finalCleanUpCode,
- schemaHasRules: schemaHasRules,
- schemaHasRulesExcept: schemaHasRulesExcept,
- schemaUnknownRules: schemaUnknownRules,
- toQuotedString: toQuotedString,
- getPathExpr: getPathExpr,
- getPath: getPath,
- getData: getData,
- unescapeFragment: unescapeFragment,
- unescapeJsonPointer: unescapeJsonPointer,
- escapeFragment: escapeFragment,
- escapeJsonPointer: escapeJsonPointer
-};
-
-
-function copy(o, to) {
- to = to || {};
- for (var key in o) to[key] = o[key];
- return to;
-}
-
-
-function checkDataType(dataType, data, negate) {
- var EQUAL = negate ? ' !== ' : ' === '
- , AND = negate ? ' || ' : ' && '
- , OK = negate ? '!' : ''
- , NOT = negate ? '' : '!';
- switch (dataType) {
- case 'null': return data + EQUAL + 'null';
- case 'array': return OK + 'Array.isArray(' + data + ')';
- case 'object': return '(' + OK + data + AND +
- 'typeof ' + data + EQUAL + '"object"' + AND +
- NOT + 'Array.isArray(' + data + '))';
- case 'integer': return '(typeof ' + data + EQUAL + '"number"' + AND +
- NOT + '(' + data + ' % 1)' +
- AND + data + EQUAL + data + ')';
- default: return 'typeof ' + data + EQUAL + '"' + dataType + '"';
- }
-}
-
-
-function checkDataTypes(dataTypes, data) {
- switch (dataTypes.length) {
- case 1: return checkDataType(dataTypes[0], data, true);
- default:
- var code = '';
- var types = toHash(dataTypes);
- if (types.array && types.object) {
- code = types.null ? '(': '(!' + data + ' || ';
- code += 'typeof ' + data + ' !== "object")';
- delete types.null;
- delete types.array;
- delete types.object;
- }
- if (types.number) delete types.integer;
- for (var t in types)
- code += (code ? ' && ' : '' ) + checkDataType(t, data, true);
-
- return code;
- }
-}
-
-
-var COERCE_TO_TYPES = toHash([ 'string', 'number', 'integer', 'boolean', 'null' ]);
-function coerceToTypes(optionCoerceTypes, dataTypes) {
- if (Array.isArray(dataTypes)) {
- var types = [];
- for (var i=0; i= lvl) throw new Error('Cannot access property/index ' + up + ' levels up, current level is ' + lvl);
- return paths[lvl - up];
- }
-
- if (up > lvl) throw new Error('Cannot access data ' + up + ' levels up, current level is ' + lvl);
- data = 'data' + ((lvl - up) || '');
- if (!jsonPointer) return data;
- }
-
- var expr = data;
- var segments = jsonPointer.split('/');
- for (var i=0; i',
- $notOp = $isMax ? '>' : '<',
- $errorKeyword = undefined;
- if ($isDataExcl) {
- var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr),
- $exclusive = 'exclusive' + $lvl,
- $exclType = 'exclType' + $lvl,
- $exclIsNumber = 'exclIsNumber' + $lvl,
- $opExpr = 'op' + $lvl,
- $opStr = '\' + ' + $opExpr + ' + \'';
- out += ' var schemaExcl' + ($lvl) + ' = ' + ($schemaValueExcl) + '; ';
- $schemaValueExcl = 'schemaExcl' + $lvl;
- out += ' var ' + ($exclusive) + '; var ' + ($exclType) + ' = typeof ' + ($schemaValueExcl) + '; if (' + ($exclType) + ' != \'boolean\' && ' + ($exclType) + ' != \'undefined\' && ' + ($exclType) + ' != \'number\') { ';
- var $errorKeyword = $exclusiveKeyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || '_exclusiveLimit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'' + ($exclusiveKeyword) + ' should be boolean\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- out += ' ' + ($exclType) + ' == \'number\' ? ( (' + ($exclusive) + ' = ' + ($schemaValue) + ' === undefined || ' + ($schemaValueExcl) + ' ' + ($op) + '= ' + ($schemaValue) + ') ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaValueExcl) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) : ( (' + ($exclusive) + ' = ' + ($schemaValueExcl) + ' === true) ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaValue) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) || ' + ($data) + ' !== ' + ($data) + ') { var op' + ($lvl) + ' = ' + ($exclusive) + ' ? \'' + ($op) + '\' : \'' + ($op) + '=\'; ';
- if ($schema === undefined) {
- $errorKeyword = $exclusiveKeyword;
- $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword;
- $schemaValue = $schemaValueExcl;
- $isData = $isDataExcl;
- }
- } else {
- var $exclIsNumber = typeof $schemaExcl == 'number',
- $opStr = $op;
- if ($exclIsNumber && $isData) {
- var $opExpr = '\'' + $opStr + '\'';
- out += ' if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- out += ' ( ' + ($schemaValue) + ' === undefined || ' + ($schemaExcl) + ' ' + ($op) + '= ' + ($schemaValue) + ' ? ' + ($data) + ' ' + ($notOp) + '= ' + ($schemaExcl) + ' : ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' ) || ' + ($data) + ' !== ' + ($data) + ') { ';
- } else {
- if ($exclIsNumber && $schema === undefined) {
- $exclusive = true;
- $errorKeyword = $exclusiveKeyword;
- $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword;
- $schemaValue = $schemaExcl;
- $notOp += '=';
- } else {
- if ($exclIsNumber) $schemaValue = Math[$isMax ? 'min' : 'max']($schemaExcl, $schema);
- if ($schemaExcl === ($exclIsNumber ? $schemaValue : true)) {
- $exclusive = true;
- $errorKeyword = $exclusiveKeyword;
- $errSchemaPath = it.errSchemaPath + '/' + $exclusiveKeyword;
- $notOp += '=';
- } else {
- $exclusive = false;
- $opStr += '=';
- }
- }
- var $opExpr = '\'' + $opStr + '\'';
- out += ' if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- out += ' ' + ($data) + ' ' + ($notOp) + ' ' + ($schemaValue) + ' || ' + ($data) + ' !== ' + ($data) + ') { ';
- }
- }
- $errorKeyword = $errorKeyword || $keyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || '_limit') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { comparison: ' + ($opExpr) + ', limit: ' + ($schemaValue) + ', exclusive: ' + ($exclusive) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be ' + ($opStr) + ' ';
- if ($isData) {
- out += '\' + ' + ($schemaValue);
- } else {
- out += '' + ($schemaValue) + '\'';
- }
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],14:[function(require,module,exports){
-'use strict';
-module.exports = function generate__limitItems(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $errorKeyword;
- var $data = 'data' + ($dataLvl || '');
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $op = $keyword == 'maxItems' ? '>' : '<';
- out += 'if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- out += ' ' + ($data) + '.length ' + ($op) + ' ' + ($schemaValue) + ') { ';
- var $errorKeyword = $keyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || '_limitItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT have ';
- if ($keyword == 'maxItems') {
- out += 'more';
- } else {
- out += 'fewer';
- }
- out += ' than ';
- if ($isData) {
- out += '\' + ' + ($schemaValue) + ' + \'';
- } else {
- out += '' + ($schema);
- }
- out += ' items\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += '} ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],15:[function(require,module,exports){
-'use strict';
-module.exports = function generate__limitLength(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $errorKeyword;
- var $data = 'data' + ($dataLvl || '');
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $op = $keyword == 'maxLength' ? '>' : '<';
- out += 'if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- if (it.opts.unicode === false) {
- out += ' ' + ($data) + '.length ';
- } else {
- out += ' ucs2length(' + ($data) + ') ';
- }
- out += ' ' + ($op) + ' ' + ($schemaValue) + ') { ';
- var $errorKeyword = $keyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || '_limitLength') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT be ';
- if ($keyword == 'maxLength') {
- out += 'longer';
- } else {
- out += 'shorter';
- }
- out += ' than ';
- if ($isData) {
- out += '\' + ' + ($schemaValue) + ' + \'';
- } else {
- out += '' + ($schema);
- }
- out += ' characters\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += '} ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],16:[function(require,module,exports){
-'use strict';
-module.exports = function generate__limitProperties(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $errorKeyword;
- var $data = 'data' + ($dataLvl || '');
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $op = $keyword == 'maxProperties' ? '>' : '<';
- out += 'if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
- }
- out += ' Object.keys(' + ($data) + ').length ' + ($op) + ' ' + ($schemaValue) + ') { ';
- var $errorKeyword = $keyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || '_limitProperties') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT have ';
- if ($keyword == 'maxProperties') {
- out += 'more';
- } else {
- out += 'fewer';
- }
- out += ' than ';
- if ($isData) {
- out += '\' + ' + ($schemaValue) + ' + \'';
- } else {
- out += '' + ($schema);
- }
- out += ' properties\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += '} ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],17:[function(require,module,exports){
-'use strict';
-module.exports = function generate_allOf(it, $keyword, $ruleType) {
- var out = ' ';
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $currentBaseId = $it.baseId,
- $allSchemasEmpty = true;
- var arr1 = $schema;
- if (arr1) {
- var $sch, $i = -1,
- l1 = arr1.length - 1;
- while ($i < l1) {
- $sch = arr1[$i += 1];
- if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- $allSchemasEmpty = false;
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + '[' + $i + ']';
- $it.errSchemaPath = $errSchemaPath + '/' + $i;
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- }
- }
- if ($breakOnError) {
- if ($allSchemasEmpty) {
- out += ' if (true) { ';
- } else {
- out += ' ' + ($closingBraces.slice(0, -1)) + ' ';
- }
- }
- out = it.util.cleanUpCode(out);
- return out;
-}
-
-},{}],18:[function(require,module,exports){
-'use strict';
-module.exports = function generate_anyOf(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $noEmptySchema = $schema.every(function($sch) {
- return (it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all));
- });
- if ($noEmptySchema) {
- var $currentBaseId = $it.baseId;
- out += ' var ' + ($errs) + ' = errors; var ' + ($valid) + ' = false; ';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- var arr1 = $schema;
- if (arr1) {
- var $sch, $i = -1,
- l1 = arr1.length - 1;
- while ($i < l1) {
- $sch = arr1[$i += 1];
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + '[' + $i + ']';
- $it.errSchemaPath = $errSchemaPath + '/' + $i;
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- out += ' ' + ($valid) + ' = ' + ($valid) + ' || ' + ($nextValid) + '; if (!' + ($valid) + ') { ';
- $closingBraces += '}';
- }
- }
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += ' ' + ($closingBraces) + ' if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('anyOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should match some schema in anyOf\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError(vErrors); ';
- } else {
- out += ' validate.errors = vErrors; return false; ';
- }
- }
- out += ' } else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } ';
- if (it.opts.allErrors) {
- out += ' } ';
- }
- out = it.util.cleanUpCode(out);
- } else {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- }
- return out;
-}
-
-},{}],19:[function(require,module,exports){
-'use strict';
-module.exports = function generate_comment(it, $keyword, $ruleType) {
- var out = ' ';
- var $schema = it.schema[$keyword];
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $comment = it.util.toQuotedString($schema);
- if (it.opts.$comment === true) {
- out += ' console.log(' + ($comment) + ');';
- } else if (typeof it.opts.$comment == 'function') {
- out += ' self._opts.$comment(' + ($comment) + ', ' + (it.util.toQuotedString($errSchemaPath)) + ', validate.root.schema);';
- }
- return out;
-}
-
-},{}],20:[function(require,module,exports){
-'use strict';
-module.exports = function generate_const(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- if (!$isData) {
- out += ' var schema' + ($lvl) + ' = validate.schema' + ($schemaPath) + ';';
- }
- out += 'var ' + ($valid) + ' = equal(' + ($data) + ', schema' + ($lvl) + '); if (!' + ($valid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('const') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { allowedValue: schema' + ($lvl) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be equal to constant\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' }';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],21:[function(require,module,exports){
-'use strict';
-module.exports = function generate_contains(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $idx = 'i' + $lvl,
- $dataNxt = $it.dataLevel = it.dataLevel + 1,
- $nextData = 'data' + $dataNxt,
- $currentBaseId = it.baseId,
- $nonEmptySchema = (it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all));
- out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';';
- if ($nonEmptySchema) {
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- $it.schema = $schema;
- $it.schemaPath = $schemaPath;
- $it.errSchemaPath = $errSchemaPath;
- out += ' var ' + ($nextValid) + ' = false; for (var ' + ($idx) + ' = 0; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { ';
- $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true);
- var $passData = $data + '[' + $idx + ']';
- $it.dataPathArr[$dataNxt] = $idx;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- out += ' if (' + ($nextValid) + ') break; } ';
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += ' ' + ($closingBraces) + ' if (!' + ($nextValid) + ') {';
- } else {
- out += ' if (' + ($data) + '.length == 0) {';
- }
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('contains') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should contain a valid item\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else { ';
- if ($nonEmptySchema) {
- out += ' errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } ';
- }
- if (it.opts.allErrors) {
- out += ' } ';
- }
- out = it.util.cleanUpCode(out);
- return out;
-}
-
-},{}],22:[function(require,module,exports){
-'use strict';
-module.exports = function generate_custom(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $errorKeyword;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $rule = this,
- $definition = 'definition' + $lvl,
- $rDef = $rule.definition,
- $closingBraces = '';
- var $compile, $inline, $macro, $ruleValidate, $validateCode;
- if ($isData && $rDef.$data) {
- $validateCode = 'keywordValidate' + $lvl;
- var $validateSchema = $rDef.validateSchema;
- out += ' var ' + ($definition) + ' = RULES.custom[\'' + ($keyword) + '\'].definition; var ' + ($validateCode) + ' = ' + ($definition) + '.validate;';
- } else {
- $ruleValidate = it.useCustomRule($rule, $schema, it.schema, it);
- if (!$ruleValidate) return;
- $schemaValue = 'validate.schema' + $schemaPath;
- $validateCode = $ruleValidate.code;
- $compile = $rDef.compile;
- $inline = $rDef.inline;
- $macro = $rDef.macro;
- }
- var $ruleErrs = $validateCode + '.errors',
- $i = 'i' + $lvl,
- $ruleErr = 'ruleErr' + $lvl,
- $asyncKeyword = $rDef.async;
- if ($asyncKeyword && !it.async) throw new Error('async keyword in sync schema');
- if (!($inline || $macro)) {
- out += '' + ($ruleErrs) + ' = null;';
- }
- out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';';
- if ($isData && $rDef.$data) {
- $closingBraces += '}';
- out += ' if (' + ($schemaValue) + ' === undefined) { ' + ($valid) + ' = true; } else { ';
- if ($validateSchema) {
- $closingBraces += '}';
- out += ' ' + ($valid) + ' = ' + ($definition) + '.validateSchema(' + ($schemaValue) + '); if (' + ($valid) + ') { ';
- }
- }
- if ($inline) {
- if ($rDef.statements) {
- out += ' ' + ($ruleValidate.validate) + ' ';
- } else {
- out += ' ' + ($valid) + ' = ' + ($ruleValidate.validate) + '; ';
- }
- } else if ($macro) {
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- $it.schema = $ruleValidate.validate;
- $it.schemaPath = '';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- var $code = it.validate($it).replace(/validate\.schema/g, $validateCode);
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += ' ' + ($code);
- } else {
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = '';
- out += ' ' + ($validateCode) + '.call( ';
- if (it.opts.passContext) {
- out += 'this';
- } else {
- out += 'self';
- }
- if ($compile || $rDef.schema === false) {
- out += ' , ' + ($data) + ' ';
- } else {
- out += ' , ' + ($schemaValue) + ' , ' + ($data) + ' , validate.schema' + (it.schemaPath) + ' ';
- }
- out += ' , (dataPath || \'\')';
- if (it.errorPath != '""') {
- out += ' + ' + (it.errorPath);
- }
- var $parentData = $dataLvl ? 'data' + (($dataLvl - 1) || '') : 'parentData',
- $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty';
- out += ' , ' + ($parentData) + ' , ' + ($parentDataProperty) + ' , rootData ) ';
- var def_callRuleValidate = out;
- out = $$outStack.pop();
- if ($rDef.errors === false) {
- out += ' ' + ($valid) + ' = ';
- if ($asyncKeyword) {
- out += 'await ';
- }
- out += '' + (def_callRuleValidate) + '; ';
- } else {
- if ($asyncKeyword) {
- $ruleErrs = 'customErrors' + $lvl;
- out += ' var ' + ($ruleErrs) + ' = null; try { ' + ($valid) + ' = await ' + (def_callRuleValidate) + '; } catch (e) { ' + ($valid) + ' = false; if (e instanceof ValidationError) ' + ($ruleErrs) + ' = e.errors; else throw e; } ';
- } else {
- out += ' ' + ($ruleErrs) + ' = null; ' + ($valid) + ' = ' + (def_callRuleValidate) + '; ';
- }
- }
- }
- if ($rDef.modifying) {
- out += ' if (' + ($parentData) + ') ' + ($data) + ' = ' + ($parentData) + '[' + ($parentDataProperty) + '];';
- }
- out += '' + ($closingBraces);
- if ($rDef.valid) {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- } else {
- out += ' if ( ';
- if ($rDef.valid === undefined) {
- out += ' !';
- if ($macro) {
- out += '' + ($nextValid);
- } else {
- out += '' + ($valid);
- }
- } else {
- out += ' ' + (!$rDef.valid) + ' ';
- }
- out += ') { ';
- $errorKeyword = $rule.keyword;
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = '';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || 'custom') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { keyword: \'' + ($rule.keyword) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should pass "' + ($rule.keyword) + '" keyword validation\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- var def_customError = out;
- out = $$outStack.pop();
- if ($inline) {
- if ($rDef.errors) {
- if ($rDef.errors != 'full') {
- out += ' for (var ' + ($i) + '=' + ($errs) + '; ' + ($i) + ' 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- out += ' ' + ($nextValid) + ' = true; if ( ' + ($data) + (it.util.getProperty($property)) + ' !== undefined ';
- if ($ownProperties) {
- out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($property)) + '\') ';
- }
- out += ') { ';
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + it.util.getProperty($property);
- $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($property);
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- out += ' } ';
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- }
- if ($breakOnError) {
- out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {';
- }
- out = it.util.cleanUpCode(out);
- return out;
-}
-
-},{}],24:[function(require,module,exports){
-'use strict';
-module.exports = function generate_enum(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $i = 'i' + $lvl,
- $vSchema = 'schema' + $lvl;
- if (!$isData) {
- out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + ';';
- }
- out += 'var ' + ($valid) + ';';
- if ($isData) {
- out += ' if (schema' + ($lvl) + ' === undefined) ' + ($valid) + ' = true; else if (!Array.isArray(schema' + ($lvl) + ')) ' + ($valid) + ' = false; else {';
- }
- out += '' + ($valid) + ' = false;for (var ' + ($i) + '=0; ' + ($i) + '<' + ($vSchema) + '.length; ' + ($i) + '++) if (equal(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + '])) { ' + ($valid) + ' = true; break; }';
- if ($isData) {
- out += ' } ';
- }
- out += ' if (!' + ($valid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('enum') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { allowedValues: schema' + ($lvl) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be equal to one of the allowed values\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' }';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],25:[function(require,module,exports){
-'use strict';
-module.exports = function generate_format(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- if (it.opts.format === false) {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- return out;
- }
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $unknownFormats = it.opts.unknownFormats,
- $allowUnknown = Array.isArray($unknownFormats);
- if ($isData) {
- var $format = 'format' + $lvl,
- $isObject = 'isObject' + $lvl,
- $formatType = 'formatType' + $lvl;
- out += ' var ' + ($format) + ' = formats[' + ($schemaValue) + ']; var ' + ($isObject) + ' = typeof ' + ($format) + ' == \'object\' && !(' + ($format) + ' instanceof RegExp) && ' + ($format) + '.validate; var ' + ($formatType) + ' = ' + ($isObject) + ' && ' + ($format) + '.type || \'string\'; if (' + ($isObject) + ') { ';
- if (it.async) {
- out += ' var async' + ($lvl) + ' = ' + ($format) + '.async; ';
- }
- out += ' ' + ($format) + ' = ' + ($format) + '.validate; } if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'string\') || ';
- }
- out += ' (';
- if ($unknownFormats != 'ignore') {
- out += ' (' + ($schemaValue) + ' && !' + ($format) + ' ';
- if ($allowUnknown) {
- out += ' && self._opts.unknownFormats.indexOf(' + ($schemaValue) + ') == -1 ';
- }
- out += ') || ';
- }
- out += ' (' + ($format) + ' && ' + ($formatType) + ' == \'' + ($ruleType) + '\' && !(typeof ' + ($format) + ' == \'function\' ? ';
- if (it.async) {
- out += ' (async' + ($lvl) + ' ? await ' + ($format) + '(' + ($data) + ') : ' + ($format) + '(' + ($data) + ')) ';
- } else {
- out += ' ' + ($format) + '(' + ($data) + ') ';
- }
- out += ' : ' + ($format) + '.test(' + ($data) + '))))) {';
- } else {
- var $format = it.formats[$schema];
- if (!$format) {
- if ($unknownFormats == 'ignore') {
- it.logger.warn('unknown format "' + $schema + '" ignored in schema at path "' + it.errSchemaPath + '"');
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- return out;
- } else if ($allowUnknown && $unknownFormats.indexOf($schema) >= 0) {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- return out;
- } else {
- throw new Error('unknown format "' + $schema + '" is used in schema at path "' + it.errSchemaPath + '"');
- }
- }
- var $isObject = typeof $format == 'object' && !($format instanceof RegExp) && $format.validate;
- var $formatType = $isObject && $format.type || 'string';
- if ($isObject) {
- var $async = $format.async === true;
- $format = $format.validate;
- }
- if ($formatType != $ruleType) {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- return out;
- }
- if ($async) {
- if (!it.async) throw new Error('async format in sync schema');
- var $formatRef = 'formats' + it.util.getProperty($schema) + '.validate';
- out += ' if (!(await ' + ($formatRef) + '(' + ($data) + '))) { ';
- } else {
- out += ' if (! ';
- var $formatRef = 'formats' + it.util.getProperty($schema);
- if ($isObject) $formatRef += '.validate';
- if (typeof $format == 'function') {
- out += ' ' + ($formatRef) + '(' + ($data) + ') ';
- } else {
- out += ' ' + ($formatRef) + '.test(' + ($data) + ') ';
- }
- out += ') { ';
- }
- }
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('format') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { format: ';
- if ($isData) {
- out += '' + ($schemaValue);
- } else {
- out += '' + (it.util.toQuotedString($schema));
- }
- out += ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should match format "';
- if ($isData) {
- out += '\' + ' + ($schemaValue) + ' + \'';
- } else {
- out += '' + (it.util.escapeQuotes($schema));
- }
- out += '"\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + (it.util.toQuotedString($schema));
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],26:[function(require,module,exports){
-'use strict';
-module.exports = function generate_if(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $thenSch = it.schema['then'],
- $elseSch = it.schema['else'],
- $thenPresent = $thenSch !== undefined && (it.opts.strictKeywords ? typeof $thenSch == 'object' && Object.keys($thenSch).length > 0 : it.util.schemaHasRules($thenSch, it.RULES.all)),
- $elsePresent = $elseSch !== undefined && (it.opts.strictKeywords ? typeof $elseSch == 'object' && Object.keys($elseSch).length > 0 : it.util.schemaHasRules($elseSch, it.RULES.all)),
- $currentBaseId = $it.baseId;
- if ($thenPresent || $elsePresent) {
- var $ifClause;
- $it.createErrors = false;
- $it.schema = $schema;
- $it.schemaPath = $schemaPath;
- $it.errSchemaPath = $errSchemaPath;
- out += ' var ' + ($errs) + ' = errors; var ' + ($valid) + ' = true; ';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- $it.createErrors = true;
- out += ' errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } ';
- it.compositeRule = $it.compositeRule = $wasComposite;
- if ($thenPresent) {
- out += ' if (' + ($nextValid) + ') { ';
- $it.schema = it.schema['then'];
- $it.schemaPath = it.schemaPath + '.then';
- $it.errSchemaPath = it.errSchemaPath + '/then';
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- out += ' ' + ($valid) + ' = ' + ($nextValid) + '; ';
- if ($thenPresent && $elsePresent) {
- $ifClause = 'ifClause' + $lvl;
- out += ' var ' + ($ifClause) + ' = \'then\'; ';
- } else {
- $ifClause = '\'then\'';
- }
- out += ' } ';
- if ($elsePresent) {
- out += ' else { ';
- }
- } else {
- out += ' if (!' + ($nextValid) + ') { ';
- }
- if ($elsePresent) {
- $it.schema = it.schema['else'];
- $it.schemaPath = it.schemaPath + '.else';
- $it.errSchemaPath = it.errSchemaPath + '/else';
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- out += ' ' + ($valid) + ' = ' + ($nextValid) + '; ';
- if ($thenPresent && $elsePresent) {
- $ifClause = 'ifClause' + $lvl;
- out += ' var ' + ($ifClause) + ' = \'else\'; ';
- } else {
- $ifClause = '\'else\'';
- }
- out += ' } ';
- }
- out += ' if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('if') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { failingKeyword: ' + ($ifClause) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should match "\' + ' + ($ifClause) + ' + \'" schema\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError(vErrors); ';
- } else {
- out += ' validate.errors = vErrors; return false; ';
- }
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' else { ';
- }
- out = it.util.cleanUpCode(out);
- } else {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- }
- return out;
-}
-
-},{}],27:[function(require,module,exports){
-'use strict';
-
-//all requires must be explicit because browserify won't work with dynamic requires
-module.exports = {
- '$ref': require('./ref'),
- allOf: require('./allOf'),
- anyOf: require('./anyOf'),
- '$comment': require('./comment'),
- const: require('./const'),
- contains: require('./contains'),
- dependencies: require('./dependencies'),
- 'enum': require('./enum'),
- format: require('./format'),
- 'if': require('./if'),
- items: require('./items'),
- maximum: require('./_limit'),
- minimum: require('./_limit'),
- maxItems: require('./_limitItems'),
- minItems: require('./_limitItems'),
- maxLength: require('./_limitLength'),
- minLength: require('./_limitLength'),
- maxProperties: require('./_limitProperties'),
- minProperties: require('./_limitProperties'),
- multipleOf: require('./multipleOf'),
- not: require('./not'),
- oneOf: require('./oneOf'),
- pattern: require('./pattern'),
- properties: require('./properties'),
- propertyNames: require('./propertyNames'),
- required: require('./required'),
- uniqueItems: require('./uniqueItems'),
- validate: require('./validate')
-};
-
-},{"./_limit":13,"./_limitItems":14,"./_limitLength":15,"./_limitProperties":16,"./allOf":17,"./anyOf":18,"./comment":19,"./const":20,"./contains":21,"./dependencies":23,"./enum":24,"./format":25,"./if":26,"./items":28,"./multipleOf":29,"./not":30,"./oneOf":31,"./pattern":32,"./properties":33,"./propertyNames":34,"./ref":35,"./required":36,"./uniqueItems":37,"./validate":38}],28:[function(require,module,exports){
-'use strict';
-module.exports = function generate_items(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $idx = 'i' + $lvl,
- $dataNxt = $it.dataLevel = it.dataLevel + 1,
- $nextData = 'data' + $dataNxt,
- $currentBaseId = it.baseId;
- out += 'var ' + ($errs) + ' = errors;var ' + ($valid) + ';';
- if (Array.isArray($schema)) {
- var $additionalItems = it.schema.additionalItems;
- if ($additionalItems === false) {
- out += ' ' + ($valid) + ' = ' + ($data) + '.length <= ' + ($schema.length) + '; ';
- var $currErrSchemaPath = $errSchemaPath;
- $errSchemaPath = it.errSchemaPath + '/additionalItems';
- out += ' if (!' + ($valid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('additionalItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schema.length) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT have more than ' + ($schema.length) + ' items\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } ';
- $errSchemaPath = $currErrSchemaPath;
- if ($breakOnError) {
- $closingBraces += '}';
- out += ' else { ';
- }
- }
- var arr1 = $schema;
- if (arr1) {
- var $sch, $i = -1,
- l1 = arr1.length - 1;
- while ($i < l1) {
- $sch = arr1[$i += 1];
- if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- out += ' ' + ($nextValid) + ' = true; if (' + ($data) + '.length > ' + ($i) + ') { ';
- var $passData = $data + '[' + $i + ']';
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + '[' + $i + ']';
- $it.errSchemaPath = $errSchemaPath + '/' + $i;
- $it.errorPath = it.util.getPathExpr(it.errorPath, $i, it.opts.jsonPointers, true);
- $it.dataPathArr[$dataNxt] = $i;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- }
- }
- if (typeof $additionalItems == 'object' && (it.opts.strictKeywords ? typeof $additionalItems == 'object' && Object.keys($additionalItems).length > 0 : it.util.schemaHasRules($additionalItems, it.RULES.all))) {
- $it.schema = $additionalItems;
- $it.schemaPath = it.schemaPath + '.additionalItems';
- $it.errSchemaPath = it.errSchemaPath + '/additionalItems';
- out += ' ' + ($nextValid) + ' = true; if (' + ($data) + '.length > ' + ($schema.length) + ') { for (var ' + ($idx) + ' = ' + ($schema.length) + '; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { ';
- $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true);
- var $passData = $data + '[' + $idx + ']';
- $it.dataPathArr[$dataNxt] = $idx;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- if ($breakOnError) {
- out += ' if (!' + ($nextValid) + ') break; ';
- }
- out += ' } } ';
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- } else if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) {
- $it.schema = $schema;
- $it.schemaPath = $schemaPath;
- $it.errSchemaPath = $errSchemaPath;
- out += ' for (var ' + ($idx) + ' = ' + (0) + '; ' + ($idx) + ' < ' + ($data) + '.length; ' + ($idx) + '++) { ';
- $it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true);
- var $passData = $data + '[' + $idx + ']';
- $it.dataPathArr[$dataNxt] = $idx;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- if ($breakOnError) {
- out += ' if (!' + ($nextValid) + ') break; ';
- }
- out += ' }';
- }
- if ($breakOnError) {
- out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {';
- }
- out = it.util.cleanUpCode(out);
- return out;
-}
-
-},{}],29:[function(require,module,exports){
-'use strict';
-module.exports = function generate_multipleOf(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- out += 'var division' + ($lvl) + ';if (';
- if ($isData) {
- out += ' ' + ($schemaValue) + ' !== undefined && ( typeof ' + ($schemaValue) + ' != \'number\' || ';
- }
- out += ' (division' + ($lvl) + ' = ' + ($data) + ' / ' + ($schemaValue) + ', ';
- if (it.opts.multipleOfPrecision) {
- out += ' Math.abs(Math.round(division' + ($lvl) + ') - division' + ($lvl) + ') > 1e-' + (it.opts.multipleOfPrecision) + ' ';
- } else {
- out += ' division' + ($lvl) + ' !== parseInt(division' + ($lvl) + ') ';
- }
- out += ' ) ';
- if ($isData) {
- out += ' ) ';
- }
- out += ' ) { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('multipleOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { multipleOf: ' + ($schemaValue) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be multiple of ';
- if ($isData) {
- out += '\' + ' + ($schemaValue);
- } else {
- out += '' + ($schemaValue) + '\'';
- }
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += '} ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],30:[function(require,module,exports){
-'use strict';
-module.exports = function generate_not(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) {
- $it.schema = $schema;
- $it.schemaPath = $schemaPath;
- $it.errSchemaPath = $errSchemaPath;
- out += ' var ' + ($errs) + ' = errors; ';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- $it.createErrors = false;
- var $allErrorsOption;
- if ($it.opts.allErrors) {
- $allErrorsOption = $it.opts.allErrors;
- $it.opts.allErrors = false;
- }
- out += ' ' + (it.validate($it)) + ' ';
- $it.createErrors = true;
- if ($allErrorsOption) $it.opts.allErrors = $allErrorsOption;
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += ' if (' + ($nextValid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('not') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT be valid\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; } ';
- if (it.opts.allErrors) {
- out += ' } ';
- }
- } else {
- out += ' var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('not') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT be valid\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- if ($breakOnError) {
- out += ' if (false) { ';
- }
- }
- return out;
-}
-
-},{}],31:[function(require,module,exports){
-'use strict';
-module.exports = function generate_oneOf(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $currentBaseId = $it.baseId,
- $prevValid = 'prevValid' + $lvl,
- $passingSchemas = 'passingSchemas' + $lvl;
- out += 'var ' + ($errs) + ' = errors , ' + ($prevValid) + ' = false , ' + ($valid) + ' = false , ' + ($passingSchemas) + ' = null; ';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- var arr1 = $schema;
- if (arr1) {
- var $sch, $i = -1,
- l1 = arr1.length - 1;
- while ($i < l1) {
- $sch = arr1[$i += 1];
- if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + '[' + $i + ']';
- $it.errSchemaPath = $errSchemaPath + '/' + $i;
- out += ' ' + (it.validate($it)) + ' ';
- $it.baseId = $currentBaseId;
- } else {
- out += ' var ' + ($nextValid) + ' = true; ';
- }
- if ($i) {
- out += ' if (' + ($nextValid) + ' && ' + ($prevValid) + ') { ' + ($valid) + ' = false; ' + ($passingSchemas) + ' = [' + ($passingSchemas) + ', ' + ($i) + ']; } else { ';
- $closingBraces += '}';
- }
- out += ' if (' + ($nextValid) + ') { ' + ($valid) + ' = ' + ($prevValid) + ' = true; ' + ($passingSchemas) + ' = ' + ($i) + '; }';
- }
- }
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += '' + ($closingBraces) + 'if (!' + ($valid) + ') { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('oneOf') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { passingSchemas: ' + ($passingSchemas) + ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should match exactly one schema in oneOf\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError(vErrors); ';
- } else {
- out += ' validate.errors = vErrors; return false; ';
- }
- }
- out += '} else { errors = ' + ($errs) + '; if (vErrors !== null) { if (' + ($errs) + ') vErrors.length = ' + ($errs) + '; else vErrors = null; }';
- if (it.opts.allErrors) {
- out += ' } ';
- }
- return out;
-}
-
-},{}],32:[function(require,module,exports){
-'use strict';
-module.exports = function generate_pattern(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- var $regexp = $isData ? '(new RegExp(' + $schemaValue + '))' : it.usePattern($schema);
- out += 'if ( ';
- if ($isData) {
- out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'string\') || ';
- }
- out += ' !' + ($regexp) + '.test(' + ($data) + ') ) { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('pattern') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { pattern: ';
- if ($isData) {
- out += '' + ($schemaValue);
- } else {
- out += '' + (it.util.toQuotedString($schema));
- }
- out += ' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should match pattern "';
- if ($isData) {
- out += '\' + ' + ($schemaValue) + ' + \'';
- } else {
- out += '' + (it.util.escapeQuotes($schema));
- }
- out += '"\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + (it.util.toQuotedString($schema));
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += '} ';
- if ($breakOnError) {
- out += ' else { ';
- }
- return out;
-}
-
-},{}],33:[function(require,module,exports){
-'use strict';
-module.exports = function generate_properties(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- var $key = 'key' + $lvl,
- $idx = 'idx' + $lvl,
- $dataNxt = $it.dataLevel = it.dataLevel + 1,
- $nextData = 'data' + $dataNxt,
- $dataProperties = 'dataProperties' + $lvl;
- var $schemaKeys = Object.keys($schema || {}),
- $pProperties = it.schema.patternProperties || {},
- $pPropertyKeys = Object.keys($pProperties),
- $aProperties = it.schema.additionalProperties,
- $someProperties = $schemaKeys.length || $pPropertyKeys.length,
- $noAdditional = $aProperties === false,
- $additionalIsSchema = typeof $aProperties == 'object' && Object.keys($aProperties).length,
- $removeAdditional = it.opts.removeAdditional,
- $checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional,
- $ownProperties = it.opts.ownProperties,
- $currentBaseId = it.baseId;
- var $required = it.schema.required;
- if ($required && !(it.opts.$data && $required.$data) && $required.length < it.opts.loopRequired) var $requiredHash = it.util.toHash($required);
- out += 'var ' + ($errs) + ' = errors;var ' + ($nextValid) + ' = true;';
- if ($ownProperties) {
- out += ' var ' + ($dataProperties) + ' = undefined;';
- }
- if ($checkAdditional) {
- if ($ownProperties) {
- out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; ';
- } else {
- out += ' for (var ' + ($key) + ' in ' + ($data) + ') { ';
- }
- if ($someProperties) {
- out += ' var isAdditional' + ($lvl) + ' = !(false ';
- if ($schemaKeys.length) {
- if ($schemaKeys.length > 8) {
- out += ' || validate.schema' + ($schemaPath) + '.hasOwnProperty(' + ($key) + ') ';
- } else {
- var arr1 = $schemaKeys;
- if (arr1) {
- var $propertyKey, i1 = -1,
- l1 = arr1.length - 1;
- while (i1 < l1) {
- $propertyKey = arr1[i1 += 1];
- out += ' || ' + ($key) + ' == ' + (it.util.toQuotedString($propertyKey)) + ' ';
- }
- }
- }
- }
- if ($pPropertyKeys.length) {
- var arr2 = $pPropertyKeys;
- if (arr2) {
- var $pProperty, $i = -1,
- l2 = arr2.length - 1;
- while ($i < l2) {
- $pProperty = arr2[$i += 1];
- out += ' || ' + (it.usePattern($pProperty)) + '.test(' + ($key) + ') ';
- }
- }
- }
- out += ' ); if (isAdditional' + ($lvl) + ') { ';
- }
- if ($removeAdditional == 'all') {
- out += ' delete ' + ($data) + '[' + ($key) + ']; ';
- } else {
- var $currentErrorPath = it.errorPath;
- var $additionalProperty = '\' + ' + $key + ' + \'';
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
- }
- if ($noAdditional) {
- if ($removeAdditional) {
- out += ' delete ' + ($data) + '[' + ($key) + ']; ';
- } else {
- out += ' ' + ($nextValid) + ' = false; ';
- var $currErrSchemaPath = $errSchemaPath;
- $errSchemaPath = it.errSchemaPath + '/additionalProperties';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('additionalProperties') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { additionalProperty: \'' + ($additionalProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is an invalid additional property';
- } else {
- out += 'should NOT have additional properties';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- $errSchemaPath = $currErrSchemaPath;
- if ($breakOnError) {
- out += ' break; ';
- }
- }
- } else if ($additionalIsSchema) {
- if ($removeAdditional == 'failing') {
- out += ' var ' + ($errs) + ' = errors; ';
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- $it.schema = $aProperties;
- $it.schemaPath = it.schemaPath + '.additionalProperties';
- $it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
- $it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
- var $passData = $data + '[' + $key + ']';
- $it.dataPathArr[$dataNxt] = $key;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- out += ' if (!' + ($nextValid) + ') { errors = ' + ($errs) + '; if (validate.errors !== null) { if (errors) validate.errors.length = errors; else validate.errors = null; } delete ' + ($data) + '[' + ($key) + ']; } ';
- it.compositeRule = $it.compositeRule = $wasComposite;
- } else {
- $it.schema = $aProperties;
- $it.schemaPath = it.schemaPath + '.additionalProperties';
- $it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
- $it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
- var $passData = $data + '[' + $key + ']';
- $it.dataPathArr[$dataNxt] = $key;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- if ($breakOnError) {
- out += ' if (!' + ($nextValid) + ') break; ';
- }
- }
- }
- it.errorPath = $currentErrorPath;
- }
- if ($someProperties) {
- out += ' } ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- var $useDefaults = it.opts.useDefaults && !it.compositeRule;
- if ($schemaKeys.length) {
- var arr3 = $schemaKeys;
- if (arr3) {
- var $propertyKey, i3 = -1,
- l3 = arr3.length - 1;
- while (i3 < l3) {
- $propertyKey = arr3[i3 += 1];
- var $sch = $schema[$propertyKey];
- if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- var $prop = it.util.getProperty($propertyKey),
- $passData = $data + $prop,
- $hasDefault = $useDefaults && $sch.default !== undefined;
- $it.schema = $sch;
- $it.schemaPath = $schemaPath + $prop;
- $it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey);
- $it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers);
- $it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey);
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- $code = it.util.varReplace($code, $nextData, $passData);
- var $useData = $passData;
- } else {
- var $useData = $nextData;
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ';
- }
- if ($hasDefault) {
- out += ' ' + ($code) + ' ';
- } else {
- if ($requiredHash && $requiredHash[$propertyKey]) {
- out += ' if ( ' + ($useData) + ' === undefined ';
- if ($ownProperties) {
- out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') ';
- }
- out += ') { ' + ($nextValid) + ' = false; ';
- var $currentErrorPath = it.errorPath,
- $currErrSchemaPath = $errSchemaPath,
- $missingProperty = it.util.escapeQuotes($propertyKey);
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers);
- }
- $errSchemaPath = it.errSchemaPath + '/required';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- $errSchemaPath = $currErrSchemaPath;
- it.errorPath = $currentErrorPath;
- out += ' } else { ';
- } else {
- if ($breakOnError) {
- out += ' if ( ' + ($useData) + ' === undefined ';
- if ($ownProperties) {
- out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') ';
- }
- out += ') { ' + ($nextValid) + ' = true; } else { ';
- } else {
- out += ' if (' + ($useData) + ' !== undefined ';
- if ($ownProperties) {
- out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') ';
- }
- out += ' ) { ';
- }
- }
- out += ' ' + ($code) + ' } ';
- }
- }
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- }
- }
- if ($pPropertyKeys.length) {
- var arr4 = $pPropertyKeys;
- if (arr4) {
- var $pProperty, i4 = -1,
- l4 = arr4.length - 1;
- while (i4 < l4) {
- $pProperty = arr4[i4 += 1];
- var $sch = $pProperties[$pProperty];
- if ((it.opts.strictKeywords ? typeof $sch == 'object' && Object.keys($sch).length > 0 : it.util.schemaHasRules($sch, it.RULES.all))) {
- $it.schema = $sch;
- $it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty);
- $it.errSchemaPath = it.errSchemaPath + '/patternProperties/' + it.util.escapeFragment($pProperty);
- if ($ownProperties) {
- out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; ';
- } else {
- out += ' for (var ' + ($key) + ' in ' + ($data) + ') { ';
- }
- out += ' if (' + (it.usePattern($pProperty)) + '.test(' + ($key) + ')) { ';
- $it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
- var $passData = $data + '[' + $key + ']';
- $it.dataPathArr[$dataNxt] = $key;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- if ($breakOnError) {
- out += ' if (!' + ($nextValid) + ') break; ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' else ' + ($nextValid) + ' = true; ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' if (' + ($nextValid) + ') { ';
- $closingBraces += '}';
- }
- }
- }
- }
- }
- if ($breakOnError) {
- out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {';
- }
- out = it.util.cleanUpCode(out);
- return out;
-}
-
-},{}],34:[function(require,module,exports){
-'use strict';
-module.exports = function generate_propertyNames(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $errs = 'errs__' + $lvl;
- var $it = it.util.copy(it);
- var $closingBraces = '';
- $it.level++;
- var $nextValid = 'valid' + $it.level;
- out += 'var ' + ($errs) + ' = errors;';
- if ((it.opts.strictKeywords ? typeof $schema == 'object' && Object.keys($schema).length > 0 : it.util.schemaHasRules($schema, it.RULES.all))) {
- $it.schema = $schema;
- $it.schemaPath = $schemaPath;
- $it.errSchemaPath = $errSchemaPath;
- var $key = 'key' + $lvl,
- $idx = 'idx' + $lvl,
- $i = 'i' + $lvl,
- $invalidName = '\' + ' + $key + ' + \'',
- $dataNxt = $it.dataLevel = it.dataLevel + 1,
- $nextData = 'data' + $dataNxt,
- $dataProperties = 'dataProperties' + $lvl,
- $ownProperties = it.opts.ownProperties,
- $currentBaseId = it.baseId;
- if ($ownProperties) {
- out += ' var ' + ($dataProperties) + ' = undefined; ';
- }
- if ($ownProperties) {
- out += ' ' + ($dataProperties) + ' = ' + ($dataProperties) + ' || Object.keys(' + ($data) + '); for (var ' + ($idx) + '=0; ' + ($idx) + '<' + ($dataProperties) + '.length; ' + ($idx) + '++) { var ' + ($key) + ' = ' + ($dataProperties) + '[' + ($idx) + ']; ';
- } else {
- out += ' for (var ' + ($key) + ' in ' + ($data) + ') { ';
- }
- out += ' var startErrs' + ($lvl) + ' = errors; ';
- var $passData = $key;
- var $wasComposite = it.compositeRule;
- it.compositeRule = $it.compositeRule = true;
- var $code = it.validate($it);
- $it.baseId = $currentBaseId;
- if (it.util.varOccurences($code, $nextData) < 2) {
- out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
- } else {
- out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
- }
- it.compositeRule = $it.compositeRule = $wasComposite;
- out += ' if (!' + ($nextValid) + ') { for (var ' + ($i) + '=startErrs' + ($lvl) + '; ' + ($i) + ' 0 : it.util.schemaHasRules($propertySch, it.RULES.all)))) {
- $required[$required.length] = $property;
- }
- }
- }
- } else {
- var $required = $schema;
- }
- }
- if ($isData || $required.length) {
- var $currentErrorPath = it.errorPath,
- $loopRequired = $isData || $required.length >= it.opts.loopRequired,
- $ownProperties = it.opts.ownProperties;
- if ($breakOnError) {
- out += ' var missing' + ($lvl) + '; ';
- if ($loopRequired) {
- if (!$isData) {
- out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + '; ';
- }
- var $i = 'i' + $lvl,
- $propertyPath = 'schema' + $lvl + '[' + $i + ']',
- $missingProperty = '\' + ' + $propertyPath + ' + \'';
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.util.getPathExpr($currentErrorPath, $propertyPath, it.opts.jsonPointers);
- }
- out += ' var ' + ($valid) + ' = true; ';
- if ($isData) {
- out += ' if (schema' + ($lvl) + ' === undefined) ' + ($valid) + ' = true; else if (!Array.isArray(schema' + ($lvl) + ')) ' + ($valid) + ' = false; else {';
- }
- out += ' for (var ' + ($i) + ' = 0; ' + ($i) + ' < ' + ($vSchema) + '.length; ' + ($i) + '++) { ' + ($valid) + ' = ' + ($data) + '[' + ($vSchema) + '[' + ($i) + ']] !== undefined ';
- if ($ownProperties) {
- out += ' && Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + ']) ';
- }
- out += '; if (!' + ($valid) + ') break; } ';
- if ($isData) {
- out += ' } ';
- }
- out += ' if (!' + ($valid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else { ';
- } else {
- out += ' if ( ';
- var arr2 = $required;
- if (arr2) {
- var $propertyKey, $i = -1,
- l2 = arr2.length - 1;
- while ($i < l2) {
- $propertyKey = arr2[$i += 1];
- if ($i) {
- out += ' || ';
- }
- var $prop = it.util.getProperty($propertyKey),
- $useData = $data + $prop;
- out += ' ( ( ' + ($useData) + ' === undefined ';
- if ($ownProperties) {
- out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') ';
- }
- out += ') && (missing' + ($lvl) + ' = ' + (it.util.toQuotedString(it.opts.jsonPointers ? $propertyKey : $prop)) + ') ) ';
- }
- }
- out += ') { ';
- var $propertyPath = 'missing' + $lvl,
- $missingProperty = '\' + ' + $propertyPath + ' + \'';
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.opts.jsonPointers ? it.util.getPathExpr($currentErrorPath, $propertyPath, true) : $currentErrorPath + ' + ' + $propertyPath;
- }
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else { ';
- }
- } else {
- if ($loopRequired) {
- if (!$isData) {
- out += ' var ' + ($vSchema) + ' = validate.schema' + ($schemaPath) + '; ';
- }
- var $i = 'i' + $lvl,
- $propertyPath = 'schema' + $lvl + '[' + $i + ']',
- $missingProperty = '\' + ' + $propertyPath + ' + \'';
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.util.getPathExpr($currentErrorPath, $propertyPath, it.opts.jsonPointers);
- }
- if ($isData) {
- out += ' if (' + ($vSchema) + ' && !Array.isArray(' + ($vSchema) + ')) { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } else if (' + ($vSchema) + ' !== undefined) { ';
- }
- out += ' for (var ' + ($i) + ' = 0; ' + ($i) + ' < ' + ($vSchema) + '.length; ' + ($i) + '++) { if (' + ($data) + '[' + ($vSchema) + '[' + ($i) + ']] === undefined ';
- if ($ownProperties) {
- out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($vSchema) + '[' + ($i) + ']) ';
- }
- out += ') { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } } ';
- if ($isData) {
- out += ' } ';
- }
- } else {
- var arr3 = $required;
- if (arr3) {
- var $propertyKey, i3 = -1,
- l3 = arr3.length - 1;
- while (i3 < l3) {
- $propertyKey = arr3[i3 += 1];
- var $prop = it.util.getProperty($propertyKey),
- $missingProperty = it.util.escapeQuotes($propertyKey),
- $useData = $data + $prop;
- if (it.opts._errorDataPathProperty) {
- it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers);
- }
- out += ' if ( ' + ($useData) + ' === undefined ';
- if ($ownProperties) {
- out += ' || ! Object.prototype.hasOwnProperty.call(' + ($data) + ', \'' + (it.util.escapeQuotes($propertyKey)) + '\') ';
- }
- out += ') { var err = '; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'';
- if (it.opts._errorDataPathProperty) {
- out += 'is a required property';
- } else {
- out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- out += '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; } ';
- }
- }
- }
- }
- it.errorPath = $currentErrorPath;
- } else if ($breakOnError) {
- out += ' if (true) {';
- }
- return out;
-}
-
-},{}],37:[function(require,module,exports){
-'use strict';
-module.exports = function generate_uniqueItems(it, $keyword, $ruleType) {
- var out = ' ';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- var $isData = it.opts.$data && $schema && $schema.$data,
- $schemaValue;
- if ($isData) {
- out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
- $schemaValue = 'schema' + $lvl;
- } else {
- $schemaValue = $schema;
- }
- if (($schema || $isData) && it.opts.uniqueItems !== false) {
- if ($isData) {
- out += ' var ' + ($valid) + '; if (' + ($schemaValue) + ' === false || ' + ($schemaValue) + ' === undefined) ' + ($valid) + ' = true; else if (typeof ' + ($schemaValue) + ' != \'boolean\') ' + ($valid) + ' = false; else { ';
- }
- out += ' var i = ' + ($data) + '.length , ' + ($valid) + ' = true , j; if (i > 1) { ';
- var $itemType = it.schema.items && it.schema.items.type,
- $typeIsArray = Array.isArray($itemType);
- if (!$itemType || $itemType == 'object' || $itemType == 'array' || ($typeIsArray && ($itemType.indexOf('object') >= 0 || $itemType.indexOf('array') >= 0))) {
- out += ' outer: for (;i--;) { for (j = i; j--;) { if (equal(' + ($data) + '[i], ' + ($data) + '[j])) { ' + ($valid) + ' = false; break outer; } } } ';
- } else {
- out += ' var itemIndices = {}, item; for (;i--;) { var item = ' + ($data) + '[i]; ';
- var $method = 'checkDataType' + ($typeIsArray ? 's' : '');
- out += ' if (' + (it.util[$method]($itemType, 'item', true)) + ') continue; ';
- if ($typeIsArray) {
- out += ' if (typeof item == \'string\') item = \'"\' + item; ';
- }
- out += ' if (typeof itemIndices[item] == \'number\') { ' + ($valid) + ' = false; j = itemIndices[item]; break; } itemIndices[item] = i; } ';
- }
- out += ' } ';
- if ($isData) {
- out += ' } ';
- }
- out += ' if (!' + ($valid) + ') { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ('uniqueItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { i: i, j: j } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should NOT have duplicate items (items ## \' + j + \' and \' + i + \' are identical)\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: ';
- if ($isData) {
- out += 'validate.schema' + ($schemaPath);
- } else {
- out += '' + ($schema);
- }
- out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } ';
- if ($breakOnError) {
- out += ' else { ';
- }
- } else {
- if ($breakOnError) {
- out += ' if (true) { ';
- }
- }
- return out;
-}
-
-},{}],38:[function(require,module,exports){
-'use strict';
-module.exports = function generate_validate(it, $keyword, $ruleType) {
- var out = '';
- var $async = it.schema.$async === true,
- $refKeywords = it.util.schemaHasRulesExcept(it.schema, it.RULES.all, '$ref'),
- $id = it.self._getId(it.schema);
- if (it.opts.strictKeywords) {
- var $unknownKwd = it.util.schemaUnknownRules(it.schema, it.RULES.keywords);
- if ($unknownKwd) {
- var $keywordsMsg = 'unknown keyword: ' + $unknownKwd;
- if (it.opts.strictKeywords === 'log') it.logger.warn($keywordsMsg);
- else throw new Error($keywordsMsg);
- }
- }
- if (it.isTop) {
- out += ' var validate = ';
- if ($async) {
- it.async = true;
- out += 'async ';
- }
- out += 'function(data, dataPath, parentData, parentDataProperty, rootData) { \'use strict\'; ';
- if ($id && (it.opts.sourceCode || it.opts.processCode)) {
- out += ' ' + ('/\*# sourceURL=' + $id + ' */') + ' ';
- }
- }
- if (typeof it.schema == 'boolean' || !($refKeywords || it.schema.$ref)) {
- var $keyword = 'false schema';
- var $lvl = it.level;
- var $dataLvl = it.dataLevel;
- var $schema = it.schema[$keyword];
- var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
- var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
- var $breakOnError = !it.opts.allErrors;
- var $errorKeyword;
- var $data = 'data' + ($dataLvl || '');
- var $valid = 'valid' + $lvl;
- if (it.schema === false) {
- if (it.isTop) {
- $breakOnError = true;
- } else {
- out += ' var ' + ($valid) + ' = false; ';
- }
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || 'false schema') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: {} ';
- if (it.opts.messages !== false) {
- out += ' , message: \'boolean schema is false\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- } else {
- if (it.isTop) {
- if ($async) {
- out += ' return data; ';
- } else {
- out += ' validate.errors = null; return true; ';
- }
- } else {
- out += ' var ' + ($valid) + ' = true; ';
- }
- }
- if (it.isTop) {
- out += ' }; return validate; ';
- }
- return out;
- }
- if (it.isTop) {
- var $top = it.isTop,
- $lvl = it.level = 0,
- $dataLvl = it.dataLevel = 0,
- $data = 'data';
- it.rootId = it.resolve.fullPath(it.self._getId(it.root.schema));
- it.baseId = it.baseId || it.rootId;
- delete it.isTop;
- it.dataPathArr = [undefined];
- if (it.schema.default !== undefined && it.opts.useDefaults && it.opts.strictDefaults) {
- var $defaultMsg = 'default is ignored in the schema root';
- if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg);
- else throw new Error($defaultMsg);
- }
- out += ' var vErrors = null; ';
- out += ' var errors = 0; ';
- out += ' if (rootData === undefined) rootData = data; ';
- } else {
- var $lvl = it.level,
- $dataLvl = it.dataLevel,
- $data = 'data' + ($dataLvl || '');
- if ($id) it.baseId = it.resolve.url(it.baseId, $id);
- if ($async && !it.async) throw new Error('async schema in sync schema');
- out += ' var errs_' + ($lvl) + ' = errors;';
- }
- var $valid = 'valid' + $lvl,
- $breakOnError = !it.opts.allErrors,
- $closingBraces1 = '',
- $closingBraces2 = '';
- var $errorKeyword;
- var $typeSchema = it.schema.type,
- $typeIsArray = Array.isArray($typeSchema);
- if ($typeSchema && it.opts.nullable && it.schema.nullable === true) {
- if ($typeIsArray) {
- if ($typeSchema.indexOf('null') == -1) $typeSchema = $typeSchema.concat('null');
- } else if ($typeSchema != 'null') {
- $typeSchema = [$typeSchema, 'null'];
- $typeIsArray = true;
- }
- }
- if ($typeIsArray && $typeSchema.length == 1) {
- $typeSchema = $typeSchema[0];
- $typeIsArray = false;
- }
- if (it.schema.$ref && $refKeywords) {
- if (it.opts.extendRefs == 'fail') {
- throw new Error('$ref: validation keywords used in schema at path "' + it.errSchemaPath + '" (see option extendRefs)');
- } else if (it.opts.extendRefs !== true) {
- $refKeywords = false;
- it.logger.warn('$ref: keywords ignored in schema at path "' + it.errSchemaPath + '"');
- }
- }
- if (it.schema.$comment && it.opts.$comment) {
- out += ' ' + (it.RULES.all.$comment.code(it, '$comment'));
- }
- if ($typeSchema) {
- if (it.opts.coerceTypes) {
- var $coerceToTypes = it.util.coerceToTypes(it.opts.coerceTypes, $typeSchema);
- }
- var $rulesGroup = it.RULES.types[$typeSchema];
- if ($coerceToTypes || $typeIsArray || $rulesGroup === true || ($rulesGroup && !$shouldUseGroup($rulesGroup))) {
- var $schemaPath = it.schemaPath + '.type',
- $errSchemaPath = it.errSchemaPath + '/type';
- var $schemaPath = it.schemaPath + '.type',
- $errSchemaPath = it.errSchemaPath + '/type',
- $method = $typeIsArray ? 'checkDataTypes' : 'checkDataType';
- out += ' if (' + (it.util[$method]($typeSchema, $data, true)) + ') { ';
- if ($coerceToTypes) {
- var $dataType = 'dataType' + $lvl,
- $coerced = 'coerced' + $lvl;
- out += ' var ' + ($dataType) + ' = typeof ' + ($data) + '; ';
- if (it.opts.coerceTypes == 'array') {
- out += ' if (' + ($dataType) + ' == \'object\' && Array.isArray(' + ($data) + ')) ' + ($dataType) + ' = \'array\'; ';
- }
- out += ' var ' + ($coerced) + ' = undefined; ';
- var $bracesCoercion = '';
- var arr1 = $coerceToTypes;
- if (arr1) {
- var $type, $i = -1,
- l1 = arr1.length - 1;
- while ($i < l1) {
- $type = arr1[$i += 1];
- if ($i) {
- out += ' if (' + ($coerced) + ' === undefined) { ';
- $bracesCoercion += '}';
- }
- if (it.opts.coerceTypes == 'array' && $type != 'array') {
- out += ' if (' + ($dataType) + ' == \'array\' && ' + ($data) + '.length == 1) { ' + ($coerced) + ' = ' + ($data) + ' = ' + ($data) + '[0]; ' + ($dataType) + ' = typeof ' + ($data) + '; } ';
- }
- if ($type == 'string') {
- out += ' if (' + ($dataType) + ' == \'number\' || ' + ($dataType) + ' == \'boolean\') ' + ($coerced) + ' = \'\' + ' + ($data) + '; else if (' + ($data) + ' === null) ' + ($coerced) + ' = \'\'; ';
- } else if ($type == 'number' || $type == 'integer') {
- out += ' if (' + ($dataType) + ' == \'boolean\' || ' + ($data) + ' === null || (' + ($dataType) + ' == \'string\' && ' + ($data) + ' && ' + ($data) + ' == +' + ($data) + ' ';
- if ($type == 'integer') {
- out += ' && !(' + ($data) + ' % 1)';
- }
- out += ')) ' + ($coerced) + ' = +' + ($data) + '; ';
- } else if ($type == 'boolean') {
- out += ' if (' + ($data) + ' === \'false\' || ' + ($data) + ' === 0 || ' + ($data) + ' === null) ' + ($coerced) + ' = false; else if (' + ($data) + ' === \'true\' || ' + ($data) + ' === 1) ' + ($coerced) + ' = true; ';
- } else if ($type == 'null') {
- out += ' if (' + ($data) + ' === \'\' || ' + ($data) + ' === 0 || ' + ($data) + ' === false) ' + ($coerced) + ' = null; ';
- } else if (it.opts.coerceTypes == 'array' && $type == 'array') {
- out += ' if (' + ($dataType) + ' == \'string\' || ' + ($dataType) + ' == \'number\' || ' + ($dataType) + ' == \'boolean\' || ' + ($data) + ' == null) ' + ($coerced) + ' = [' + ($data) + ']; ';
- }
- }
- }
- out += ' ' + ($bracesCoercion) + ' if (' + ($coerced) + ' === undefined) { ';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \'';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be ';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } else { ';
- var $parentData = $dataLvl ? 'data' + (($dataLvl - 1) || '') : 'parentData',
- $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty';
- out += ' ' + ($data) + ' = ' + ($coerced) + '; ';
- if (!$dataLvl) {
- out += 'if (' + ($parentData) + ' !== undefined)';
- }
- out += ' ' + ($parentData) + '[' + ($parentDataProperty) + '] = ' + ($coerced) + '; } ';
- } else {
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \'';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be ';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- }
- out += ' } ';
- }
- }
- if (it.schema.$ref && !$refKeywords) {
- out += ' ' + (it.RULES.all.$ref.code(it, '$ref')) + ' ';
- if ($breakOnError) {
- out += ' } if (errors === ';
- if ($top) {
- out += '0';
- } else {
- out += 'errs_' + ($lvl);
- }
- out += ') { ';
- $closingBraces2 += '}';
- }
- } else {
- var arr2 = it.RULES;
- if (arr2) {
- var $rulesGroup, i2 = -1,
- l2 = arr2.length - 1;
- while (i2 < l2) {
- $rulesGroup = arr2[i2 += 1];
- if ($shouldUseGroup($rulesGroup)) {
- if ($rulesGroup.type) {
- out += ' if (' + (it.util.checkDataType($rulesGroup.type, $data)) + ') { ';
- }
- if (it.opts.useDefaults) {
- if ($rulesGroup.type == 'object' && it.schema.properties) {
- var $schema = it.schema.properties,
- $schemaKeys = Object.keys($schema);
- var arr3 = $schemaKeys;
- if (arr3) {
- var $propertyKey, i3 = -1,
- l3 = arr3.length - 1;
- while (i3 < l3) {
- $propertyKey = arr3[i3 += 1];
- var $sch = $schema[$propertyKey];
- if ($sch.default !== undefined) {
- var $passData = $data + it.util.getProperty($propertyKey);
- if (it.compositeRule) {
- if (it.opts.strictDefaults) {
- var $defaultMsg = 'default is ignored for: ' + $passData;
- if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg);
- else throw new Error($defaultMsg);
- }
- } else {
- out += ' if (' + ($passData) + ' === undefined ';
- if (it.opts.useDefaults == 'empty') {
- out += ' || ' + ($passData) + ' === null || ' + ($passData) + ' === \'\' ';
- }
- out += ' ) ' + ($passData) + ' = ';
- if (it.opts.useDefaults == 'shared') {
- out += ' ' + (it.useDefault($sch.default)) + ' ';
- } else {
- out += ' ' + (JSON.stringify($sch.default)) + ' ';
- }
- out += '; ';
- }
- }
- }
- }
- } else if ($rulesGroup.type == 'array' && Array.isArray(it.schema.items)) {
- var arr4 = it.schema.items;
- if (arr4) {
- var $sch, $i = -1,
- l4 = arr4.length - 1;
- while ($i < l4) {
- $sch = arr4[$i += 1];
- if ($sch.default !== undefined) {
- var $passData = $data + '[' + $i + ']';
- if (it.compositeRule) {
- if (it.opts.strictDefaults) {
- var $defaultMsg = 'default is ignored for: ' + $passData;
- if (it.opts.strictDefaults === 'log') it.logger.warn($defaultMsg);
- else throw new Error($defaultMsg);
- }
- } else {
- out += ' if (' + ($passData) + ' === undefined ';
- if (it.opts.useDefaults == 'empty') {
- out += ' || ' + ($passData) + ' === null || ' + ($passData) + ' === \'\' ';
- }
- out += ' ) ' + ($passData) + ' = ';
- if (it.opts.useDefaults == 'shared') {
- out += ' ' + (it.useDefault($sch.default)) + ' ';
- } else {
- out += ' ' + (JSON.stringify($sch.default)) + ' ';
- }
- out += '; ';
- }
- }
- }
- }
- }
- }
- var arr5 = $rulesGroup.rules;
- if (arr5) {
- var $rule, i5 = -1,
- l5 = arr5.length - 1;
- while (i5 < l5) {
- $rule = arr5[i5 += 1];
- if ($shouldUseRule($rule)) {
- var $code = $rule.code(it, $rule.keyword, $rulesGroup.type);
- if ($code) {
- out += ' ' + ($code) + ' ';
- if ($breakOnError) {
- $closingBraces1 += '}';
- }
- }
- }
- }
- }
- if ($breakOnError) {
- out += ' ' + ($closingBraces1) + ' ';
- $closingBraces1 = '';
- }
- if ($rulesGroup.type) {
- out += ' } ';
- if ($typeSchema && $typeSchema === $rulesGroup.type && !$coerceToTypes) {
- out += ' else { ';
- var $schemaPath = it.schemaPath + '.type',
- $errSchemaPath = it.errSchemaPath + '/type';
- var $$outStack = $$outStack || [];
- $$outStack.push(out);
- out = ''; /* istanbul ignore else */
- if (it.createErrors !== false) {
- out += ' { keyword: \'' + ($errorKeyword || 'type') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { type: \'';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' } ';
- if (it.opts.messages !== false) {
- out += ' , message: \'should be ';
- if ($typeIsArray) {
- out += '' + ($typeSchema.join(","));
- } else {
- out += '' + ($typeSchema);
- }
- out += '\' ';
- }
- if (it.opts.verbose) {
- out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
- }
- out += ' } ';
- } else {
- out += ' {} ';
- }
- var __err = out;
- out = $$outStack.pop();
- if (!it.compositeRule && $breakOnError) {
- /* istanbul ignore if */
- if (it.async) {
- out += ' throw new ValidationError([' + (__err) + ']); ';
- } else {
- out += ' validate.errors = [' + (__err) + ']; return false; ';
- }
- } else {
- out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
- }
- out += ' } ';
- }
- }
- if ($breakOnError) {
- out += ' if (errors === ';
- if ($top) {
- out += '0';
- } else {
- out += 'errs_' + ($lvl);
- }
- out += ') { ';
- $closingBraces2 += '}';
- }
- }
- }
- }
- }
- if ($breakOnError) {
- out += ' ' + ($closingBraces2) + ' ';
- }
- if ($top) {
- if ($async) {
- out += ' if (errors === 0) return data; ';
- out += ' else throw new ValidationError(vErrors); ';
- } else {
- out += ' validate.errors = vErrors; ';
- out += ' return errors === 0; ';
- }
- out += ' }; return validate;';
- } else {
- out += ' var ' + ($valid) + ' = errors === errs_' + ($lvl) + ';';
- }
- out = it.util.cleanUpCode(out);
- if ($top) {
- out = it.util.finalCleanUpCode(out, $async);
- }
-
- function $shouldUseGroup($rulesGroup) {
- var rules = $rulesGroup.rules;
- for (var i = 0; i < rules.length; i++)
- if ($shouldUseRule(rules[i])) return true;
- }
-
- function $shouldUseRule($rule) {
- return it.schema[$rule.keyword] !== undefined || ($rule.implements && $ruleImplementsSomeKeyword($rule));
- }
-
- function $ruleImplementsSomeKeyword($rule) {
- var impl = $rule.implements;
- for (var i = 0; i < impl.length; i++)
- if (it.schema[impl[i]] !== undefined) return true;
- }
- return out;
-}
-
-},{}],39:[function(require,module,exports){
-'use strict';
-
-var IDENTIFIER = /^[a-z_$][a-z0-9_$-]*$/i;
-var customRuleCode = require('./dotjs/custom');
-var definitionSchema = require('./definition_schema');
-
-module.exports = {
- add: addKeyword,
- get: getKeyword,
- remove: removeKeyword,
- validate: validateKeyword
-};
-
-
-/**
- * Define custom keyword
- * @this Ajv
- * @param {String} keyword custom keyword, should be unique (including different from all standard, custom and macro keywords).
- * @param {Object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`.
- * @return {Ajv} this for method chaining
- */
-function addKeyword(keyword, definition) {
- /* jshint validthis: true */
- /* eslint no-shadow: 0 */
- var RULES = this.RULES;
- if (RULES.keywords[keyword])
- throw new Error('Keyword ' + keyword + ' is already defined');
-
- if (!IDENTIFIER.test(keyword))
- throw new Error('Keyword ' + keyword + ' is not a valid identifier');
-
- if (definition) {
- this.validateKeyword(definition, true);
-
- var dataType = definition.type;
- if (Array.isArray(dataType)) {
- for (var i=0; i 1) {
- sets[0] = sets[0].slice(0, -1);
- var xl = sets.length - 1;
- for (var x = 1; x < xl; ++x) {
- sets[x] = sets[x].slice(1, -1);
- }
- sets[xl] = sets[xl].slice(1);
- return sets.join('');
- } else {
- return sets[0];
- }
-}
-function subexp(str) {
- return "(?:" + str + ")";
-}
-function typeOf(o) {
- return o === undefined ? "undefined" : o === null ? "null" : Object.prototype.toString.call(o).split(" ").pop().split("]").shift().toLowerCase();
-}
-function toUpperCase(str) {
- return str.toUpperCase();
-}
-function toArray(obj) {
- return obj !== undefined && obj !== null ? obj instanceof Array ? obj : typeof obj.length !== "number" || obj.split || obj.setInterval || obj.call ? [obj] : Array.prototype.slice.call(obj) : [];
-}
-function assign(target, source) {
- var obj = target;
- if (source) {
- for (var key in source) {
- obj[key] = source[key];
- }
- }
- return obj;
-}
-
-function buildExps(isIRI) {
- var ALPHA$$ = "[A-Za-z]",
- CR$ = "[\\x0D]",
- DIGIT$$ = "[0-9]",
- DQUOTE$$ = "[\\x22]",
- HEXDIG$$ = merge(DIGIT$$, "[A-Fa-f]"),
- //case-insensitive
- LF$$ = "[\\x0A]",
- SP$$ = "[\\x20]",
- PCT_ENCODED$ = subexp(subexp("%[EFef]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%[89A-Fa-f]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%" + HEXDIG$$ + HEXDIG$$)),
- //expanded
- GEN_DELIMS$$ = "[\\:\\/\\?\\#\\[\\]\\@]",
- SUB_DELIMS$$ = "[\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\=]",
- RESERVED$$ = merge(GEN_DELIMS$$, SUB_DELIMS$$),
- UCSCHAR$$ = isIRI ? "[\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]" : "[]",
- //subset, excludes bidi control characters
- IPRIVATE$$ = isIRI ? "[\\uE000-\\uF8FF]" : "[]",
- //subset
- UNRESERVED$$ = merge(ALPHA$$, DIGIT$$, "[\\-\\.\\_\\~]", UCSCHAR$$),
- SCHEME$ = subexp(ALPHA$$ + merge(ALPHA$$, DIGIT$$, "[\\+\\-\\.]") + "*"),
- USERINFO$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:]")) + "*"),
- DEC_OCTET$ = subexp(subexp("25[0-5]") + "|" + subexp("2[0-4]" + DIGIT$$) + "|" + subexp("1" + DIGIT$$ + DIGIT$$) + "|" + subexp("[1-9]" + DIGIT$$) + "|" + DIGIT$$),
- DEC_OCTET_RELAXED$ = subexp(subexp("25[0-5]") + "|" + subexp("2[0-4]" + DIGIT$$) + "|" + subexp("1" + DIGIT$$ + DIGIT$$) + "|" + subexp("0?[1-9]" + DIGIT$$) + "|0?0?" + DIGIT$$),
- //relaxed parsing rules
- IPV4ADDRESS$ = subexp(DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$ + "\\." + DEC_OCTET_RELAXED$),
- H16$ = subexp(HEXDIG$$ + "{1,4}"),
- LS32$ = subexp(subexp(H16$ + "\\:" + H16$) + "|" + IPV4ADDRESS$),
- IPV6ADDRESS1$ = subexp(subexp(H16$ + "\\:") + "{6}" + LS32$),
- // 6( h16 ":" ) ls32
- IPV6ADDRESS2$ = subexp("\\:\\:" + subexp(H16$ + "\\:") + "{5}" + LS32$),
- // "::" 5( h16 ":" ) ls32
- IPV6ADDRESS3$ = subexp(subexp(H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{4}" + LS32$),
- //[ h16 ] "::" 4( h16 ":" ) ls32
- IPV6ADDRESS4$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,1}" + H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{3}" + LS32$),
- //[ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
- IPV6ADDRESS5$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,2}" + H16$) + "?\\:\\:" + subexp(H16$ + "\\:") + "{2}" + LS32$),
- //[ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
- IPV6ADDRESS6$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,3}" + H16$) + "?\\:\\:" + H16$ + "\\:" + LS32$),
- //[ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
- IPV6ADDRESS7$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,4}" + H16$) + "?\\:\\:" + LS32$),
- //[ *4( h16 ":" ) h16 ] "::" ls32
- IPV6ADDRESS8$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,5}" + H16$) + "?\\:\\:" + H16$),
- //[ *5( h16 ":" ) h16 ] "::" h16
- IPV6ADDRESS9$ = subexp(subexp(subexp(H16$ + "\\:") + "{0,6}" + H16$) + "?\\:\\:"),
- //[ *6( h16 ":" ) h16 ] "::"
- IPV6ADDRESS$ = subexp([IPV6ADDRESS1$, IPV6ADDRESS2$, IPV6ADDRESS3$, IPV6ADDRESS4$, IPV6ADDRESS5$, IPV6ADDRESS6$, IPV6ADDRESS7$, IPV6ADDRESS8$, IPV6ADDRESS9$].join("|")),
- ZONEID$ = subexp(subexp(UNRESERVED$$ + "|" + PCT_ENCODED$) + "+"),
- //RFC 6874
- IPV6ADDRZ$ = subexp(IPV6ADDRESS$ + "\\%25" + ZONEID$),
- //RFC 6874
- IPV6ADDRZ_RELAXED$ = subexp(IPV6ADDRESS$ + subexp("\\%25|\\%(?!" + HEXDIG$$ + "{2})") + ZONEID$),
- //RFC 6874, with relaxed parsing rules
- IPVFUTURE$ = subexp("[vV]" + HEXDIG$$ + "+\\." + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:]") + "+"),
- IP_LITERAL$ = subexp("\\[" + subexp(IPV6ADDRZ_RELAXED$ + "|" + IPV6ADDRESS$ + "|" + IPVFUTURE$) + "\\]"),
- //RFC 6874
- REG_NAME$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$)) + "*"),
- HOST$ = subexp(IP_LITERAL$ + "|" + IPV4ADDRESS$ + "(?!" + REG_NAME$ + ")" + "|" + REG_NAME$),
- PORT$ = subexp(DIGIT$$ + "*"),
- AUTHORITY$ = subexp(subexp(USERINFO$ + "@") + "?" + HOST$ + subexp("\\:" + PORT$) + "?"),
- PCHAR$ = subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@]")),
- SEGMENT$ = subexp(PCHAR$ + "*"),
- SEGMENT_NZ$ = subexp(PCHAR$ + "+"),
- SEGMENT_NZ_NC$ = subexp(subexp(PCT_ENCODED$ + "|" + merge(UNRESERVED$$, SUB_DELIMS$$, "[\\@]")) + "+"),
- PATH_ABEMPTY$ = subexp(subexp("\\/" + SEGMENT$) + "*"),
- PATH_ABSOLUTE$ = subexp("\\/" + subexp(SEGMENT_NZ$ + PATH_ABEMPTY$) + "?"),
- //simplified
- PATH_NOSCHEME$ = subexp(SEGMENT_NZ_NC$ + PATH_ABEMPTY$),
- //simplified
- PATH_ROOTLESS$ = subexp(SEGMENT_NZ$ + PATH_ABEMPTY$),
- //simplified
- PATH_EMPTY$ = "(?!" + PCHAR$ + ")",
- PATH$ = subexp(PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$),
- QUERY$ = subexp(subexp(PCHAR$ + "|" + merge("[\\/\\?]", IPRIVATE$$)) + "*"),
- FRAGMENT$ = subexp(subexp(PCHAR$ + "|[\\/\\?]") + "*"),
- HIER_PART$ = subexp(subexp("\\/\\/" + AUTHORITY$ + PATH_ABEMPTY$) + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$),
- URI$ = subexp(SCHEME$ + "\\:" + HIER_PART$ + subexp("\\?" + QUERY$) + "?" + subexp("\\#" + FRAGMENT$) + "?"),
- RELATIVE_PART$ = subexp(subexp("\\/\\/" + AUTHORITY$ + PATH_ABEMPTY$) + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_EMPTY$),
- RELATIVE$ = subexp(RELATIVE_PART$ + subexp("\\?" + QUERY$) + "?" + subexp("\\#" + FRAGMENT$) + "?"),
- URI_REFERENCE$ = subexp(URI$ + "|" + RELATIVE$),
- ABSOLUTE_URI$ = subexp(SCHEME$ + "\\:" + HIER_PART$ + subexp("\\?" + QUERY$) + "?"),
- GENERIC_REF$ = "^(" + SCHEME$ + ")\\:" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?" + subexp("\\#(" + FRAGMENT$ + ")") + "?$",
- RELATIVE_REF$ = "^(){0}" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_NOSCHEME$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?" + subexp("\\#(" + FRAGMENT$ + ")") + "?$",
- ABSOLUTE_REF$ = "^(" + SCHEME$ + ")\\:" + subexp(subexp("\\/\\/(" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?)") + "?(" + PATH_ABEMPTY$ + "|" + PATH_ABSOLUTE$ + "|" + PATH_ROOTLESS$ + "|" + PATH_EMPTY$ + ")") + subexp("\\?(" + QUERY$ + ")") + "?$",
- SAMEDOC_REF$ = "^" + subexp("\\#(" + FRAGMENT$ + ")") + "?$",
- AUTHORITY_REF$ = "^" + subexp("(" + USERINFO$ + ")@") + "?(" + HOST$ + ")" + subexp("\\:(" + PORT$ + ")") + "?$";
- return {
- NOT_SCHEME: new RegExp(merge("[^]", ALPHA$$, DIGIT$$, "[\\+\\-\\.]"), "g"),
- NOT_USERINFO: new RegExp(merge("[^\\%\\:]", UNRESERVED$$, SUB_DELIMS$$), "g"),
- NOT_HOST: new RegExp(merge("[^\\%\\[\\]\\:]", UNRESERVED$$, SUB_DELIMS$$), "g"),
- NOT_PATH: new RegExp(merge("[^\\%\\/\\:\\@]", UNRESERVED$$, SUB_DELIMS$$), "g"),
- NOT_PATH_NOSCHEME: new RegExp(merge("[^\\%\\/\\@]", UNRESERVED$$, SUB_DELIMS$$), "g"),
- NOT_QUERY: new RegExp(merge("[^\\%]", UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@\\/\\?]", IPRIVATE$$), "g"),
- NOT_FRAGMENT: new RegExp(merge("[^\\%]", UNRESERVED$$, SUB_DELIMS$$, "[\\:\\@\\/\\?]"), "g"),
- ESCAPE: new RegExp(merge("[^]", UNRESERVED$$, SUB_DELIMS$$), "g"),
- UNRESERVED: new RegExp(UNRESERVED$$, "g"),
- OTHER_CHARS: new RegExp(merge("[^\\%]", UNRESERVED$$, RESERVED$$), "g"),
- PCT_ENCODED: new RegExp(PCT_ENCODED$, "g"),
- IPV4ADDRESS: new RegExp("^(" + IPV4ADDRESS$ + ")$"),
- IPV6ADDRESS: new RegExp("^\\[?(" + IPV6ADDRESS$ + ")" + subexp(subexp("\\%25|\\%(?!" + HEXDIG$$ + "{2})") + "(" + ZONEID$ + ")") + "?\\]?$") //RFC 6874, with relaxed parsing rules
- };
-}
-var URI_PROTOCOL = buildExps(false);
-
-var IRI_PROTOCOL = buildExps(true);
-
-var slicedToArray = function () {
- function sliceIterator(arr, i) {
- var _arr = [];
- var _n = true;
- var _d = false;
- var _e = undefined;
-
- try {
- for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
- _arr.push(_s.value);
-
- if (i && _arr.length === i) break;
- }
- } catch (err) {
- _d = true;
- _e = err;
- } finally {
- try {
- if (!_n && _i["return"]) _i["return"]();
- } finally {
- if (_d) throw _e;
- }
- }
-
- return _arr;
- }
-
- return function (arr, i) {
- if (Array.isArray(arr)) {
- return arr;
- } else if (Symbol.iterator in Object(arr)) {
- return sliceIterator(arr, i);
- } else {
- throw new TypeError("Invalid attempt to destructure non-iterable instance");
- }
- };
-}();
-
-
-
-
-
-
-
-
-
-
-
-
-
-var toConsumableArray = function (arr) {
- if (Array.isArray(arr)) {
- for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
-
- return arr2;
- } else {
- return Array.from(arr);
- }
-};
-
-/** Highest positive signed 32-bit float value */
-
-var maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1
-
-/** Bootstring parameters */
-var base = 36;
-var tMin = 1;
-var tMax = 26;
-var skew = 38;
-var damp = 700;
-var initialBias = 72;
-var initialN = 128; // 0x80
-var delimiter = '-'; // '\x2D'
-
-/** Regular expressions */
-var regexPunycode = /^xn--/;
-var regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars
-var regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators
-
-/** Error messages */
-var errors = {
- 'overflow': 'Overflow: input needs wider integers to process',
- 'not-basic': 'Illegal input >= 0x80 (not a basic code point)',
- 'invalid-input': 'Invalid input'
-};
-
-/** Convenience shortcuts */
-var baseMinusTMin = base - tMin;
-var floor = Math.floor;
-var stringFromCharCode = String.fromCharCode;
-
-/*--------------------------------------------------------------------------*/
-
-/**
- * A generic error utility function.
- * @private
- * @param {String} type The error type.
- * @returns {Error} Throws a `RangeError` with the applicable error message.
- */
-function error$1(type) {
- throw new RangeError(errors[type]);
-}
-
-/**
- * A generic `Array#map` utility function.
- * @private
- * @param {Array} array The array to iterate over.
- * @param {Function} callback The function that gets called for every array
- * item.
- * @returns {Array} A new array of values returned by the callback function.
- */
-function map(array, fn) {
- var result = [];
- var length = array.length;
- while (length--) {
- result[length] = fn(array[length]);
- }
- return result;
-}
-
-/**
- * A simple `Array#map`-like wrapper to work with domain name strings or email
- * addresses.
- * @private
- * @param {String} domain The domain name or email address.
- * @param {Function} callback The function that gets called for every
- * character.
- * @returns {Array} A new string of characters returned by the callback
- * function.
- */
-function mapDomain(string, fn) {
- var parts = string.split('@');
- var result = '';
- if (parts.length > 1) {
- // In email addresses, only the domain name should be punycoded. Leave
- // the local part (i.e. everything up to `@`) intact.
- result = parts[0] + '@';
- string = parts[1];
- }
- // Avoid `split(regex)` for IE8 compatibility. See #17.
- string = string.replace(regexSeparators, '\x2E');
- var labels = string.split('.');
- var encoded = map(labels, fn).join('.');
- return result + encoded;
-}
-
-/**
- * Creates an array containing the numeric code points of each Unicode
- * character in the string. While JavaScript uses UCS-2 internally,
- * this function will convert a pair of surrogate halves (each of which
- * UCS-2 exposes as separate characters) into a single code point,
- * matching UTF-16.
- * @see `punycode.ucs2.encode`
- * @see
- * @memberOf punycode.ucs2
- * @name decode
- * @param {String} string The Unicode input string (UCS-2).
- * @returns {Array} The new array of code points.
- */
-function ucs2decode(string) {
- var output = [];
- var counter = 0;
- var length = string.length;
- while (counter < length) {
- var value = string.charCodeAt(counter++);
- if (value >= 0xD800 && value <= 0xDBFF && counter < length) {
- // It's a high surrogate, and there is a next character.
- var extra = string.charCodeAt(counter++);
- if ((extra & 0xFC00) == 0xDC00) {
- // Low surrogate.
- output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
- } else {
- // It's an unmatched surrogate; only append this code unit, in case the
- // next code unit is the high surrogate of a surrogate pair.
- output.push(value);
- counter--;
- }
- } else {
- output.push(value);
- }
- }
- return output;
-}
-
-/**
- * Creates a string based on an array of numeric code points.
- * @see `punycode.ucs2.decode`
- * @memberOf punycode.ucs2
- * @name encode
- * @param {Array} codePoints The array of numeric code points.
- * @returns {String} The new Unicode string (UCS-2).
- */
-var ucs2encode = function ucs2encode(array) {
- return String.fromCodePoint.apply(String, toConsumableArray(array));
-};
-
-/**
- * Converts a basic code point into a digit/integer.
- * @see `digitToBasic()`
- * @private
- * @param {Number} codePoint The basic numeric code point value.
- * @returns {Number} The numeric value of a basic code point (for use in
- * representing integers) in the range `0` to `base - 1`, or `base` if
- * the code point does not represent a value.
- */
-var basicToDigit = function basicToDigit(codePoint) {
- if (codePoint - 0x30 < 0x0A) {
- return codePoint - 0x16;
- }
- if (codePoint - 0x41 < 0x1A) {
- return codePoint - 0x41;
- }
- if (codePoint - 0x61 < 0x1A) {
- return codePoint - 0x61;
- }
- return base;
-};
-
-/**
- * Converts a digit/integer into a basic code point.
- * @see `basicToDigit()`
- * @private
- * @param {Number} digit The numeric value of a basic code point.
- * @returns {Number} The basic code point whose value (when used for
- * representing integers) is `digit`, which needs to be in the range
- * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is
- * used; else, the lowercase form is used. The behavior is undefined
- * if `flag` is non-zero and `digit` has no uppercase form.
- */
-var digitToBasic = function digitToBasic(digit, flag) {
- // 0..25 map to ASCII a..z or A..Z
- // 26..35 map to ASCII 0..9
- return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);
-};
-
-/**
- * Bias adaptation function as per section 3.4 of RFC 3492.
- * https://tools.ietf.org/html/rfc3492#section-3.4
- * @private
- */
-var adapt = function adapt(delta, numPoints, firstTime) {
- var k = 0;
- delta = firstTime ? floor(delta / damp) : delta >> 1;
- delta += floor(delta / numPoints);
- for (; /* no initialization */delta > baseMinusTMin * tMax >> 1; k += base) {
- delta = floor(delta / baseMinusTMin);
- }
- return floor(k + (baseMinusTMin + 1) * delta / (delta + skew));
-};
-
-/**
- * Converts a Punycode string of ASCII-only symbols to a string of Unicode
- * symbols.
- * @memberOf punycode
- * @param {String} input The Punycode string of ASCII-only symbols.
- * @returns {String} The resulting string of Unicode symbols.
- */
-var decode = function decode(input) {
- // Don't use UCS-2.
- var output = [];
- var inputLength = input.length;
- var i = 0;
- var n = initialN;
- var bias = initialBias;
-
- // Handle the basic code points: let `basic` be the number of input code
- // points before the last delimiter, or `0` if there is none, then copy
- // the first basic code points to the output.
-
- var basic = input.lastIndexOf(delimiter);
- if (basic < 0) {
- basic = 0;
- }
-
- for (var j = 0; j < basic; ++j) {
- // if it's not a basic code point
- if (input.charCodeAt(j) >= 0x80) {
- error$1('not-basic');
- }
- output.push(input.charCodeAt(j));
- }
-
- // Main decoding loop: start just after the last delimiter if any basic code
- // points were copied; start at the beginning otherwise.
-
- for (var index = basic > 0 ? basic + 1 : 0; index < inputLength;) /* no final expression */{
-
- // `index` is the index of the next character to be consumed.
- // Decode a generalized variable-length integer into `delta`,
- // which gets added to `i`. The overflow checking is easier
- // if we increase `i` as we go, then subtract off its starting
- // value at the end to obtain `delta`.
- var oldi = i;
- for (var w = 1, k = base;; /* no condition */k += base) {
-
- if (index >= inputLength) {
- error$1('invalid-input');
- }
-
- var digit = basicToDigit(input.charCodeAt(index++));
-
- if (digit >= base || digit > floor((maxInt - i) / w)) {
- error$1('overflow');
- }
-
- i += digit * w;
- var t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;
-
- if (digit < t) {
- break;
- }
-
- var baseMinusT = base - t;
- if (w > floor(maxInt / baseMinusT)) {
- error$1('overflow');
- }
-
- w *= baseMinusT;
- }
-
- var out = output.length + 1;
- bias = adapt(i - oldi, out, oldi == 0);
-
- // `i` was supposed to wrap around from `out` to `0`,
- // incrementing `n` each time, so we'll fix that now:
- if (floor(i / out) > maxInt - n) {
- error$1('overflow');
- }
-
- n += floor(i / out);
- i %= out;
-
- // Insert `n` at position `i` of the output.
- output.splice(i++, 0, n);
- }
-
- return String.fromCodePoint.apply(String, output);
-};
-
-/**
- * Converts a string of Unicode symbols (e.g. a domain name label) to a
- * Punycode string of ASCII-only symbols.
- * @memberOf punycode
- * @param {String} input The string of Unicode symbols.
- * @returns {String} The resulting Punycode string of ASCII-only symbols.
- */
-var encode = function encode(input) {
- var output = [];
-
- // Convert the input in UCS-2 to an array of Unicode code points.
- input = ucs2decode(input);
-
- // Cache the length.
- var inputLength = input.length;
-
- // Initialize the state.
- var n = initialN;
- var delta = 0;
- var bias = initialBias;
-
- // Handle the basic code points.
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = input[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var _currentValue2 = _step.value;
-
- if (_currentValue2 < 0x80) {
- output.push(stringFromCharCode(_currentValue2));
- }
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
-
- var basicLength = output.length;
- var handledCPCount = basicLength;
-
- // `handledCPCount` is the number of code points that have been handled;
- // `basicLength` is the number of basic code points.
-
- // Finish the basic string with a delimiter unless it's empty.
- if (basicLength) {
- output.push(delimiter);
- }
-
- // Main encoding loop:
- while (handledCPCount < inputLength) {
-
- // All non-basic code points < n have been handled already. Find the next
- // larger one:
- var m = maxInt;
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
-
- try {
- for (var _iterator2 = input[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- var currentValue = _step2.value;
-
- if (currentValue >= n && currentValue < m) {
- m = currentValue;
- }
- }
-
- // Increase `delta` enough to advance the decoder's state to ,
- // but guard against overflow.
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
-
- var handledCPCountPlusOne = handledCPCount + 1;
- if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {
- error$1('overflow');
- }
-
- delta += (m - n) * handledCPCountPlusOne;
- n = m;
-
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
- var _iteratorError3 = undefined;
-
- try {
- for (var _iterator3 = input[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
- var _currentValue = _step3.value;
-
- if (_currentValue < n && ++delta > maxInt) {
- error$1('overflow');
- }
- if (_currentValue == n) {
- // Represent delta as a generalized variable-length integer.
- var q = delta;
- for (var k = base;; /* no condition */k += base) {
- var t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;
- if (q < t) {
- break;
- }
- var qMinusT = q - t;
- var baseMinusT = base - t;
- output.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)));
- q = floor(qMinusT / baseMinusT);
- }
-
- output.push(stringFromCharCode(digitToBasic(q, 0)));
- bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);
- delta = 0;
- ++handledCPCount;
- }
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return) {
- _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
-
- ++delta;
- ++n;
- }
- return output.join('');
-};
-
-/**
- * Converts a Punycode string representing a domain name or an email address
- * to Unicode. Only the Punycoded parts of the input will be converted, i.e.
- * it doesn't matter if you call it on a string that has already been
- * converted to Unicode.
- * @memberOf punycode
- * @param {String} input The Punycoded domain name or email address to
- * convert to Unicode.
- * @returns {String} The Unicode representation of the given Punycode
- * string.
- */
-var toUnicode = function toUnicode(input) {
- return mapDomain(input, function (string) {
- return regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string;
- });
-};
-
-/**
- * Converts a Unicode string representing a domain name or an email address to
- * Punycode. Only the non-ASCII parts of the domain name will be converted,
- * i.e. it doesn't matter if you call it with a domain that's already in
- * ASCII.
- * @memberOf punycode
- * @param {String} input The domain name or email address to convert, as a
- * Unicode string.
- * @returns {String} The Punycode representation of the given domain name or
- * email address.
- */
-var toASCII = function toASCII(input) {
- return mapDomain(input, function (string) {
- return regexNonASCII.test(string) ? 'xn--' + encode(string) : string;
- });
-};
-
-/*--------------------------------------------------------------------------*/
-
-/** Define the public API */
-var punycode = {
- /**
- * A string representing the current Punycode.js version number.
- * @memberOf punycode
- * @type String
- */
- 'version': '2.1.0',
- /**
- * An object of methods to convert from JavaScript's internal character
- * representation (UCS-2) to Unicode code points, and back.
- * @see
- * @memberOf punycode
- * @type Object
- */
- 'ucs2': {
- 'decode': ucs2decode,
- 'encode': ucs2encode
- },
- 'decode': decode,
- 'encode': encode,
- 'toASCII': toASCII,
- 'toUnicode': toUnicode
-};
-
-/**
- * URI.js
- *
- * @fileoverview An RFC 3986 compliant, scheme extendable URI parsing/validating/resolving library for JavaScript.
- * @author Gary Court
- * @see http://github.com/garycourt/uri-js
- */
-/**
- * Copyright 2011 Gary Court. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY GARY COURT ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of Gary Court.
- */
-var SCHEMES = {};
-function pctEncChar(chr) {
- var c = chr.charCodeAt(0);
- var e = void 0;
- if (c < 16) e = "%0" + c.toString(16).toUpperCase();else if (c < 128) e = "%" + c.toString(16).toUpperCase();else if (c < 2048) e = "%" + (c >> 6 | 192).toString(16).toUpperCase() + "%" + (c & 63 | 128).toString(16).toUpperCase();else e = "%" + (c >> 12 | 224).toString(16).toUpperCase() + "%" + (c >> 6 & 63 | 128).toString(16).toUpperCase() + "%" + (c & 63 | 128).toString(16).toUpperCase();
- return e;
-}
-function pctDecChars(str) {
- var newStr = "";
- var i = 0;
- var il = str.length;
- while (i < il) {
- var c = parseInt(str.substr(i + 1, 2), 16);
- if (c < 128) {
- newStr += String.fromCharCode(c);
- i += 3;
- } else if (c >= 194 && c < 224) {
- if (il - i >= 6) {
- var c2 = parseInt(str.substr(i + 4, 2), 16);
- newStr += String.fromCharCode((c & 31) << 6 | c2 & 63);
- } else {
- newStr += str.substr(i, 6);
- }
- i += 6;
- } else if (c >= 224) {
- if (il - i >= 9) {
- var _c = parseInt(str.substr(i + 4, 2), 16);
- var c3 = parseInt(str.substr(i + 7, 2), 16);
- newStr += String.fromCharCode((c & 15) << 12 | (_c & 63) << 6 | c3 & 63);
- } else {
- newStr += str.substr(i, 9);
- }
- i += 9;
- } else {
- newStr += str.substr(i, 3);
- i += 3;
- }
- }
- return newStr;
-}
-function _normalizeComponentEncoding(components, protocol) {
- function decodeUnreserved(str) {
- var decStr = pctDecChars(str);
- return !decStr.match(protocol.UNRESERVED) ? str : decStr;
- }
- if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, "");
- if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);
- if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);
- if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);
- if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);
- if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);
- return components;
-}
-
-function _stripLeadingZeros(str) {
- return str.replace(/^0*(.*)/, "$1") || "0";
-}
-function _normalizeIPv4(host, protocol) {
- var matches = host.match(protocol.IPV4ADDRESS) || [];
-
- var _matches = slicedToArray(matches, 2),
- address = _matches[1];
-
- if (address) {
- return address.split(".").map(_stripLeadingZeros).join(".");
- } else {
- return host;
- }
-}
-function _normalizeIPv6(host, protocol) {
- var matches = host.match(protocol.IPV6ADDRESS) || [];
-
- var _matches2 = slicedToArray(matches, 3),
- address = _matches2[1],
- zone = _matches2[2];
-
- if (address) {
- var _address$toLowerCase$ = address.toLowerCase().split('::').reverse(),
- _address$toLowerCase$2 = slicedToArray(_address$toLowerCase$, 2),
- last = _address$toLowerCase$2[0],
- first = _address$toLowerCase$2[1];
-
- var firstFields = first ? first.split(":").map(_stripLeadingZeros) : [];
- var lastFields = last.split(":").map(_stripLeadingZeros);
- var isLastFieldIPv4Address = protocol.IPV4ADDRESS.test(lastFields[lastFields.length - 1]);
- var fieldCount = isLastFieldIPv4Address ? 7 : 8;
- var lastFieldsStart = lastFields.length - fieldCount;
- var fields = Array(fieldCount);
- for (var x = 0; x < fieldCount; ++x) {
- fields[x] = firstFields[x] || lastFields[lastFieldsStart + x] || '';
- }
- if (isLastFieldIPv4Address) {
- fields[fieldCount - 1] = _normalizeIPv4(fields[fieldCount - 1], protocol);
- }
- var allZeroFields = fields.reduce(function (acc, field, index) {
- if (!field || field === "0") {
- var lastLongest = acc[acc.length - 1];
- if (lastLongest && lastLongest.index + lastLongest.length === index) {
- lastLongest.length++;
- } else {
- acc.push({ index: index, length: 1 });
- }
- }
- return acc;
- }, []);
- var longestZeroFields = allZeroFields.sort(function (a, b) {
- return b.length - a.length;
- })[0];
- var newHost = void 0;
- if (longestZeroFields && longestZeroFields.length > 1) {
- var newFirst = fields.slice(0, longestZeroFields.index);
- var newLast = fields.slice(longestZeroFields.index + longestZeroFields.length);
- newHost = newFirst.join(":") + "::" + newLast.join(":");
- } else {
- newHost = fields.join(":");
- }
- if (zone) {
- newHost += "%" + zone;
- }
- return newHost;
- } else {
- return host;
- }
-}
-var URI_PARSE = /^(?:([^:\/?#]+):)?(?:\/\/((?:([^\/?#@]*)@)?(\[[^\/?#\]]+\]|[^\/?#:]*)(?:\:(\d*))?))?([^?#]*)(?:\?([^#]*))?(?:#((?:.|\n|\r)*))?/i;
-var NO_MATCH_IS_UNDEFINED = "".match(/(){0}/)[1] === undefined;
-function parse(uriString) {
- var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- var components = {};
- var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;
- if (options.reference === "suffix") uriString = (options.scheme ? options.scheme + ":" : "") + "//" + uriString;
- var matches = uriString.match(URI_PARSE);
- if (matches) {
- if (NO_MATCH_IS_UNDEFINED) {
- //store each component
- components.scheme = matches[1];
- components.userinfo = matches[3];
- components.host = matches[4];
- components.port = parseInt(matches[5], 10);
- components.path = matches[6] || "";
- components.query = matches[7];
- components.fragment = matches[8];
- //fix port number
- if (isNaN(components.port)) {
- components.port = matches[5];
- }
- } else {
- //IE FIX for improper RegExp matching
- //store each component
- components.scheme = matches[1] || undefined;
- components.userinfo = uriString.indexOf("@") !== -1 ? matches[3] : undefined;
- components.host = uriString.indexOf("//") !== -1 ? matches[4] : undefined;
- components.port = parseInt(matches[5], 10);
- components.path = matches[6] || "";
- components.query = uriString.indexOf("?") !== -1 ? matches[7] : undefined;
- components.fragment = uriString.indexOf("#") !== -1 ? matches[8] : undefined;
- //fix port number
- if (isNaN(components.port)) {
- components.port = uriString.match(/\/\/(?:.|\n)*\:(?:\/|\?|\#|$)/) ? matches[4] : undefined;
- }
- }
- if (components.host) {
- //normalize IP hosts
- components.host = _normalizeIPv6(_normalizeIPv4(components.host, protocol), protocol);
- }
- //determine reference type
- if (components.scheme === undefined && components.userinfo === undefined && components.host === undefined && components.port === undefined && !components.path && components.query === undefined) {
- components.reference = "same-document";
- } else if (components.scheme === undefined) {
- components.reference = "relative";
- } else if (components.fragment === undefined) {
- components.reference = "absolute";
- } else {
- components.reference = "uri";
- }
- //check for reference errors
- if (options.reference && options.reference !== "suffix" && options.reference !== components.reference) {
- components.error = components.error || "URI is not a " + options.reference + " reference.";
- }
- //find scheme handler
- var schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()];
- //check if scheme can't handle IRIs
- if (!options.unicodeSupport && (!schemeHandler || !schemeHandler.unicodeSupport)) {
- //if host component is a domain name
- if (components.host && (options.domainHost || schemeHandler && schemeHandler.domainHost)) {
- //convert Unicode IDN -> ASCII IDN
- try {
- components.host = punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase());
- } catch (e) {
- components.error = components.error || "Host's domain name can not be converted to ASCII via punycode: " + e;
- }
- }
- //convert IRI -> URI
- _normalizeComponentEncoding(components, URI_PROTOCOL);
- } else {
- //normalize encodings
- _normalizeComponentEncoding(components, protocol);
- }
- //perform scheme specific parsing
- if (schemeHandler && schemeHandler.parse) {
- schemeHandler.parse(components, options);
- }
- } else {
- components.error = components.error || "URI can not be parsed.";
- }
- return components;
-}
-
-function _recomposeAuthority(components, options) {
- var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;
- var uriTokens = [];
- if (components.userinfo !== undefined) {
- uriTokens.push(components.userinfo);
- uriTokens.push("@");
- }
- if (components.host !== undefined) {
- //normalize IP hosts, add brackets and escape zone separator for IPv6
- uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, function (_, $1, $2) {
- return "[" + $1 + ($2 ? "%25" + $2 : "") + "]";
- }));
- }
- if (typeof components.port === "number") {
- uriTokens.push(":");
- uriTokens.push(components.port.toString(10));
- }
- return uriTokens.length ? uriTokens.join("") : undefined;
-}
-
-var RDS1 = /^\.\.?\//;
-var RDS2 = /^\/\.(\/|$)/;
-var RDS3 = /^\/\.\.(\/|$)/;
-var RDS5 = /^\/?(?:.|\n)*?(?=\/|$)/;
-function removeDotSegments(input) {
- var output = [];
- while (input.length) {
- if (input.match(RDS1)) {
- input = input.replace(RDS1, "");
- } else if (input.match(RDS2)) {
- input = input.replace(RDS2, "/");
- } else if (input.match(RDS3)) {
- input = input.replace(RDS3, "/");
- output.pop();
- } else if (input === "." || input === "..") {
- input = "";
- } else {
- var im = input.match(RDS5);
- if (im) {
- var s = im[0];
- input = input.slice(s.length);
- output.push(s);
- } else {
- throw new Error("Unexpected dot segment condition");
- }
- }
- }
- return output.join("");
-}
-
-function serialize(components) {
- var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- var protocol = options.iri ? IRI_PROTOCOL : URI_PROTOCOL;
- var uriTokens = [];
- //find scheme handler
- var schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()];
- //perform scheme specific serialization
- if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options);
- if (components.host) {
- //if host component is an IPv6 address
- if (protocol.IPV6ADDRESS.test(components.host)) {}
- //TODO: normalize IPv6 address as per RFC 5952
-
- //if host component is a domain name
- else if (options.domainHost || schemeHandler && schemeHandler.domainHost) {
- //convert IDN via punycode
- try {
- components.host = !options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host);
- } catch (e) {
- components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e;
- }
- }
- }
- //normalize encoding
- _normalizeComponentEncoding(components, protocol);
- if (options.reference !== "suffix" && components.scheme) {
- uriTokens.push(components.scheme);
- uriTokens.push(":");
- }
- var authority = _recomposeAuthority(components, options);
- if (authority !== undefined) {
- if (options.reference !== "suffix") {
- uriTokens.push("//");
- }
- uriTokens.push(authority);
- if (components.path && components.path.charAt(0) !== "/") {
- uriTokens.push("/");
- }
- }
- if (components.path !== undefined) {
- var s = components.path;
- if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) {
- s = removeDotSegments(s);
- }
- if (authority === undefined) {
- s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//"
- }
- uriTokens.push(s);
- }
- if (components.query !== undefined) {
- uriTokens.push("?");
- uriTokens.push(components.query);
- }
- if (components.fragment !== undefined) {
- uriTokens.push("#");
- uriTokens.push(components.fragment);
- }
- return uriTokens.join(""); //merge tokens into a string
-}
-
-function resolveComponents(base, relative) {
- var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
- var skipNormalization = arguments[3];
-
- var target = {};
- if (!skipNormalization) {
- base = parse(serialize(base, options), options); //normalize base components
- relative = parse(serialize(relative, options), options); //normalize relative components
- }
- options = options || {};
- if (!options.tolerant && relative.scheme) {
- target.scheme = relative.scheme;
- //target.authority = relative.authority;
- target.userinfo = relative.userinfo;
- target.host = relative.host;
- target.port = relative.port;
- target.path = removeDotSegments(relative.path || "");
- target.query = relative.query;
- } else {
- if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) {
- //target.authority = relative.authority;
- target.userinfo = relative.userinfo;
- target.host = relative.host;
- target.port = relative.port;
- target.path = removeDotSegments(relative.path || "");
- target.query = relative.query;
- } else {
- if (!relative.path) {
- target.path = base.path;
- if (relative.query !== undefined) {
- target.query = relative.query;
- } else {
- target.query = base.query;
- }
- } else {
- if (relative.path.charAt(0) === "/") {
- target.path = removeDotSegments(relative.path);
- } else {
- if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) {
- target.path = "/" + relative.path;
- } else if (!base.path) {
- target.path = relative.path;
- } else {
- target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path;
- }
- target.path = removeDotSegments(target.path);
- }
- target.query = relative.query;
- }
- //target.authority = base.authority;
- target.userinfo = base.userinfo;
- target.host = base.host;
- target.port = base.port;
- }
- target.scheme = base.scheme;
- }
- target.fragment = relative.fragment;
- return target;
-}
-
-function resolve(baseURI, relativeURI, options) {
- var schemelessOptions = assign({ scheme: 'null' }, options);
- return serialize(resolveComponents(parse(baseURI, schemelessOptions), parse(relativeURI, schemelessOptions), schemelessOptions, true), schemelessOptions);
-}
-
-function normalize(uri, options) {
- if (typeof uri === "string") {
- uri = serialize(parse(uri, options), options);
- } else if (typeOf(uri) === "object") {
- uri = parse(serialize(uri, options), options);
- }
- return uri;
-}
-
-function equal(uriA, uriB, options) {
- if (typeof uriA === "string") {
- uriA = serialize(parse(uriA, options), options);
- } else if (typeOf(uriA) === "object") {
- uriA = serialize(uriA, options);
- }
- if (typeof uriB === "string") {
- uriB = serialize(parse(uriB, options), options);
- } else if (typeOf(uriB) === "object") {
- uriB = serialize(uriB, options);
- }
- return uriA === uriB;
-}
-
-function escapeComponent(str, options) {
- return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.ESCAPE : IRI_PROTOCOL.ESCAPE, pctEncChar);
-}
-
-function unescapeComponent(str, options) {
- return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.PCT_ENCODED : IRI_PROTOCOL.PCT_ENCODED, pctDecChars);
-}
-
-var handler = {
- scheme: "http",
- domainHost: true,
- parse: function parse(components, options) {
- //report missing host
- if (!components.host) {
- components.error = components.error || "HTTP URIs must have a host.";
- }
- return components;
- },
- serialize: function serialize(components, options) {
- //normalize the default port
- if (components.port === (String(components.scheme).toLowerCase() !== "https" ? 80 : 443) || components.port === "") {
- components.port = undefined;
- }
- //normalize the empty path
- if (!components.path) {
- components.path = "/";
- }
- //NOTE: We do not parse query strings for HTTP URIs
- //as WWW Form Url Encoded query strings are part of the HTML4+ spec,
- //and not the HTTP spec.
- return components;
- }
-};
-
-var handler$1 = {
- scheme: "https",
- domainHost: handler.domainHost,
- parse: handler.parse,
- serialize: handler.serialize
-};
-
-var O = {};
-var isIRI = true;
-//RFC 3986
-var UNRESERVED$$ = "[A-Za-z0-9\\-\\.\\_\\~" + (isIRI ? "\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF" : "") + "]";
-var HEXDIG$$ = "[0-9A-Fa-f]"; //case-insensitive
-var PCT_ENCODED$ = subexp(subexp("%[EFef]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%[89A-Fa-f]" + HEXDIG$$ + "%" + HEXDIG$$ + HEXDIG$$) + "|" + subexp("%" + HEXDIG$$ + HEXDIG$$)); //expanded
-//RFC 5322, except these symbols as per RFC 6068: @ : / ? # [ ] & ; =
-//const ATEXT$$ = "[A-Za-z0-9\\!\\#\\$\\%\\&\\'\\*\\+\\-\\/\\=\\?\\^\\_\\`\\{\\|\\}\\~]";
-//const WSP$$ = "[\\x20\\x09]";
-//const OBS_QTEXT$$ = "[\\x01-\\x08\\x0B\\x0C\\x0E-\\x1F\\x7F]"; //(%d1-8 / %d11-12 / %d14-31 / %d127)
-//const QTEXT$$ = merge("[\\x21\\x23-\\x5B\\x5D-\\x7E]", OBS_QTEXT$$); //%d33 / %d35-91 / %d93-126 / obs-qtext
-//const VCHAR$$ = "[\\x21-\\x7E]";
-//const WSP$$ = "[\\x20\\x09]";
-//const OBS_QP$ = subexp("\\\\" + merge("[\\x00\\x0D\\x0A]", OBS_QTEXT$$)); //%d0 / CR / LF / obs-qtext
-//const FWS$ = subexp(subexp(WSP$$ + "*" + "\\x0D\\x0A") + "?" + WSP$$ + "+");
-//const QUOTED_PAIR$ = subexp(subexp("\\\\" + subexp(VCHAR$$ + "|" + WSP$$)) + "|" + OBS_QP$);
-//const QUOTED_STRING$ = subexp('\\"' + subexp(FWS$ + "?" + QCONTENT$) + "*" + FWS$ + "?" + '\\"');
-var ATEXT$$ = "[A-Za-z0-9\\!\\$\\%\\'\\*\\+\\-\\^\\_\\`\\{\\|\\}\\~]";
-var QTEXT$$ = "[\\!\\$\\%\\'\\(\\)\\*\\+\\,\\-\\.0-9\\<\\>A-Z\\x5E-\\x7E]";
-var VCHAR$$ = merge(QTEXT$$, "[\\\"\\\\]");
-var SOME_DELIMS$$ = "[\\!\\$\\'\\(\\)\\*\\+\\,\\;\\:\\@]";
-var UNRESERVED = new RegExp(UNRESERVED$$, "g");
-var PCT_ENCODED = new RegExp(PCT_ENCODED$, "g");
-var NOT_LOCAL_PART = new RegExp(merge("[^]", ATEXT$$, "[\\.]", '[\\"]', VCHAR$$), "g");
-var NOT_HFNAME = new RegExp(merge("[^]", UNRESERVED$$, SOME_DELIMS$$), "g");
-var NOT_HFVALUE = NOT_HFNAME;
-function decodeUnreserved(str) {
- var decStr = pctDecChars(str);
- return !decStr.match(UNRESERVED) ? str : decStr;
-}
-var handler$2 = {
- scheme: "mailto",
- parse: function parse$$1(components, options) {
- var mailtoComponents = components;
- var to = mailtoComponents.to = mailtoComponents.path ? mailtoComponents.path.split(",") : [];
- mailtoComponents.path = undefined;
- if (mailtoComponents.query) {
- var unknownHeaders = false;
- var headers = {};
- var hfields = mailtoComponents.query.split("&");
- for (var x = 0, xl = hfields.length; x < xl; ++x) {
- var hfield = hfields[x].split("=");
- switch (hfield[0]) {
- case "to":
- var toAddrs = hfield[1].split(",");
- for (var _x = 0, _xl = toAddrs.length; _x < _xl; ++_x) {
- to.push(toAddrs[_x]);
- }
- break;
- case "subject":
- mailtoComponents.subject = unescapeComponent(hfield[1], options);
- break;
- case "body":
- mailtoComponents.body = unescapeComponent(hfield[1], options);
- break;
- default:
- unknownHeaders = true;
- headers[unescapeComponent(hfield[0], options)] = unescapeComponent(hfield[1], options);
- break;
- }
- }
- if (unknownHeaders) mailtoComponents.headers = headers;
- }
- mailtoComponents.query = undefined;
- for (var _x2 = 0, _xl2 = to.length; _x2 < _xl2; ++_x2) {
- var addr = to[_x2].split("@");
- addr[0] = unescapeComponent(addr[0]);
- if (!options.unicodeSupport) {
- //convert Unicode IDN -> ASCII IDN
- try {
- addr[1] = punycode.toASCII(unescapeComponent(addr[1], options).toLowerCase());
- } catch (e) {
- mailtoComponents.error = mailtoComponents.error || "Email address's domain name can not be converted to ASCII via punycode: " + e;
- }
- } else {
- addr[1] = unescapeComponent(addr[1], options).toLowerCase();
- }
- to[_x2] = addr.join("@");
- }
- return mailtoComponents;
- },
- serialize: function serialize$$1(mailtoComponents, options) {
- var components = mailtoComponents;
- var to = toArray(mailtoComponents.to);
- if (to) {
- for (var x = 0, xl = to.length; x < xl; ++x) {
- var toAddr = String(to[x]);
- var atIdx = toAddr.lastIndexOf("@");
- var localPart = toAddr.slice(0, atIdx).replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_LOCAL_PART, pctEncChar);
- var domain = toAddr.slice(atIdx + 1);
- //convert IDN via punycode
- try {
- domain = !options.iri ? punycode.toASCII(unescapeComponent(domain, options).toLowerCase()) : punycode.toUnicode(domain);
- } catch (e) {
- components.error = components.error || "Email address's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e;
- }
- to[x] = localPart + "@" + domain;
- }
- components.path = to.join(",");
- }
- var headers = mailtoComponents.headers = mailtoComponents.headers || {};
- if (mailtoComponents.subject) headers["subject"] = mailtoComponents.subject;
- if (mailtoComponents.body) headers["body"] = mailtoComponents.body;
- var fields = [];
- for (var name in headers) {
- if (headers[name] !== O[name]) {
- fields.push(name.replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFNAME, pctEncChar) + "=" + headers[name].replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFVALUE, pctEncChar));
- }
- }
- if (fields.length) {
- components.query = fields.join("&");
- }
- return components;
- }
-};
-
-var URN_PARSE = /^([^\:]+)\:(.*)/;
-//RFC 2141
-var handler$3 = {
- scheme: "urn",
- parse: function parse$$1(components, options) {
- var matches = components.path && components.path.match(URN_PARSE);
- var urnComponents = components;
- if (matches) {
- var scheme = options.scheme || urnComponents.scheme || "urn";
- var nid = matches[1].toLowerCase();
- var nss = matches[2];
- var urnScheme = scheme + ":" + (options.nid || nid);
- var schemeHandler = SCHEMES[urnScheme];
- urnComponents.nid = nid;
- urnComponents.nss = nss;
- urnComponents.path = undefined;
- if (schemeHandler) {
- urnComponents = schemeHandler.parse(urnComponents, options);
- }
- } else {
- urnComponents.error = urnComponents.error || "URN can not be parsed.";
- }
- return urnComponents;
- },
- serialize: function serialize$$1(urnComponents, options) {
- var scheme = options.scheme || urnComponents.scheme || "urn";
- var nid = urnComponents.nid;
- var urnScheme = scheme + ":" + (options.nid || nid);
- var schemeHandler = SCHEMES[urnScheme];
- if (schemeHandler) {
- urnComponents = schemeHandler.serialize(urnComponents, options);
- }
- var uriComponents = urnComponents;
- var nss = urnComponents.nss;
- uriComponents.path = (nid || options.nid) + ":" + nss;
- return uriComponents;
- }
-};
-
-var UUID = /^[0-9A-Fa-f]{8}(?:\-[0-9A-Fa-f]{4}){3}\-[0-9A-Fa-f]{12}$/;
-//RFC 4122
-var handler$4 = {
- scheme: "urn:uuid",
- parse: function parse(urnComponents, options) {
- var uuidComponents = urnComponents;
- uuidComponents.uuid = uuidComponents.nss;
- uuidComponents.nss = undefined;
- if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) {
- uuidComponents.error = uuidComponents.error || "UUID is not valid.";
- }
- return uuidComponents;
- },
- serialize: function serialize(uuidComponents, options) {
- var urnComponents = uuidComponents;
- //normalize UUID
- urnComponents.nss = (uuidComponents.uuid || "").toLowerCase();
- return urnComponents;
- }
-};
-
-SCHEMES[handler.scheme] = handler;
-SCHEMES[handler$1.scheme] = handler$1;
-SCHEMES[handler$2.scheme] = handler$2;
-SCHEMES[handler$3.scheme] = handler$3;
-SCHEMES[handler$4.scheme] = handler$4;
-
-exports.SCHEMES = SCHEMES;
-exports.pctEncChar = pctEncChar;
-exports.pctDecChars = pctDecChars;
-exports.parse = parse;
-exports.removeDotSegments = removeDotSegments;
-exports.serialize = serialize;
-exports.resolveComponents = resolveComponents;
-exports.resolve = resolve;
-exports.normalize = normalize;
-exports.equal = equal;
-exports.escapeComponent = escapeComponent;
-exports.unescapeComponent = unescapeComponent;
-
-Object.defineProperty(exports, '__esModule', { value: true });
-
-})));
-
-
-},{}],"ajv":[function(require,module,exports){
-'use strict';
-
-var compileSchema = require('./compile')
- , resolve = require('./compile/resolve')
- , Cache = require('./cache')
- , SchemaObject = require('./compile/schema_obj')
- , stableStringify = require('fast-json-stable-stringify')
- , formats = require('./compile/formats')
- , rules = require('./compile/rules')
- , $dataMetaSchema = require('./data')
- , util = require('./compile/util');
-
-module.exports = Ajv;
-
-Ajv.prototype.validate = validate;
-Ajv.prototype.compile = compile;
-Ajv.prototype.addSchema = addSchema;
-Ajv.prototype.addMetaSchema = addMetaSchema;
-Ajv.prototype.validateSchema = validateSchema;
-Ajv.prototype.getSchema = getSchema;
-Ajv.prototype.removeSchema = removeSchema;
-Ajv.prototype.addFormat = addFormat;
-Ajv.prototype.errorsText = errorsText;
-
-Ajv.prototype._addSchema = _addSchema;
-Ajv.prototype._compile = _compile;
-
-Ajv.prototype.compileAsync = require('./compile/async');
-var customKeyword = require('./keyword');
-Ajv.prototype.addKeyword = customKeyword.add;
-Ajv.prototype.getKeyword = customKeyword.get;
-Ajv.prototype.removeKeyword = customKeyword.remove;
-Ajv.prototype.validateKeyword = customKeyword.validate;
-
-var errorClasses = require('./compile/error_classes');
-Ajv.ValidationError = errorClasses.Validation;
-Ajv.MissingRefError = errorClasses.MissingRef;
-Ajv.$dataMetaSchema = $dataMetaSchema;
-
-var META_SCHEMA_ID = 'http://json-schema.org/draft-07/schema';
-
-var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes', 'strictDefaults' ];
-var META_SUPPORT_DATA = ['/properties'];
-
-/**
- * Creates validator instance.
- * Usage: `Ajv(opts)`
- * @param {Object} opts optional options
- * @return {Object} ajv instance
- */
-function Ajv(opts) {
- if (!(this instanceof Ajv)) return new Ajv(opts);
- opts = this._opts = util.copy(opts) || {};
- setLogger(this);
- this._schemas = {};
- this._refs = {};
- this._fragments = {};
- this._formats = formats(opts.format);
-
- this._cache = opts.cache || new Cache;
- this._loadingSchemas = {};
- this._compilations = [];
- this.RULES = rules();
- this._getId = chooseGetId(opts);
-
- opts.loopRequired = opts.loopRequired || Infinity;
- if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true;
- if (opts.serialize === undefined) opts.serialize = stableStringify;
- this._metaOpts = getMetaSchemaOptions(this);
-
- if (opts.formats) addInitialFormats(this);
- if (opts.keywords) addInitialKeywords(this);
- addDefaultMetaSchema(this);
- if (typeof opts.meta == 'object') this.addMetaSchema(opts.meta);
- if (opts.nullable) this.addKeyword('nullable', {metaSchema: {type: 'boolean'}});
- addInitialSchemas(this);
-}
-
-
-
-/**
- * Validate data using schema
- * Schema will be compiled and cached (using serialized JSON as key. [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize.
- * @this Ajv
- * @param {String|Object} schemaKeyRef key, ref or schema object
- * @param {Any} data to be validated
- * @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
- */
-function validate(schemaKeyRef, data) {
- var v;
- if (typeof schemaKeyRef == 'string') {
- v = this.getSchema(schemaKeyRef);
- if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"');
- } else {
- var schemaObj = this._addSchema(schemaKeyRef);
- v = schemaObj.validate || this._compile(schemaObj);
- }
-
- var valid = v(data);
- if (v.$async !== true) this.errors = v.errors;
- return valid;
-}
-
-
-/**
- * Create validating function for passed schema.
- * @this Ajv
- * @param {Object} schema schema object
- * @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords.
- * @return {Function} validating function
- */
-function compile(schema, _meta) {
- var schemaObj = this._addSchema(schema, undefined, _meta);
- return schemaObj.validate || this._compile(schemaObj);
-}
-
-
-/**
- * Adds schema to the instance.
- * @this Ajv
- * @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
- * @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
- * @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead.
- * @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
- * @return {Ajv} this for method chaining
- */
-function addSchema(schema, key, _skipValidation, _meta) {
- if (Array.isArray(schema)){
- for (var i=0; i} errors optional array of validation errors, if not passed errors from the instance are used.
- * @param {Object} options optional options with properties `separator` and `dataVar`.
- * @return {String} human readable string with all errors descriptions
- */
-function errorsText(errors, options) {
- errors = errors || this.errors;
- if (!errors) return 'No errors';
- options = options || {};
- var separator = options.separator === undefined ? ', ' : options.separator;
- var dataVar = options.dataVar === undefined ? 'data' : options.dataVar;
-
- var text = '';
- for (var i=0; i%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i,u=/^(?:(?:http[s\u017F]?|ftp):\/\/)(?:(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+(?::(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?@)?(?:(?!10(?:\.[0-9]{1,3}){3})(?!127(?:\.[0-9]{1,3}){3})(?!169\.254(?:\.[0-9]{1,3}){2})(?!192\.168(?:\.[0-9]{1,3}){2})(?!172\.(?:1[6-9]|2[0-9]|3[01])(?:\.[0-9]{1,3}){2})(?:[1-9][0-9]?|1[0-9][0-9]|2[01][0-9]|22[0-3])(?:\.(?:1?[0-9]{1,2}|2[0-4][0-9]|25[0-5])){2}(?:\.(?:[1-9][0-9]?|1[0-9][0-9]|2[0-4][0-9]|25[0-4]))|(?:(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)(?:\.(?:(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+-?)*(?:[0-9KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])+)*(?:\.(?:(?:[KSa-z\xA1-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]){2,})))(?::[0-9]{2,5})?(?:\/(?:[\0-\x08\x0E-\x1F!-\x9F\xA1-\u167F\u1681-\u1FFF\u200B-\u2027\u202A-\u202E\u2030-\u205E\u2060-\u2FFF\u3001-\uD7FF\uE000-\uFEFE\uFF00-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF])*)?$/i,h=/^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i,d=/^(?:\/(?:[^~/]|~0|~1)*)*$/,f=/^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i,p=/^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/;function m(e){return a.copy(m[e="full"==e?"full":"fast"])}function v(e){var r=e.match(o);if(!r)return!1;var t,a=+r[2],s=+r[3];return 1<=a&&a<=12&&1<=s&&s<=(2!=a||((t=+r[1])%4!=0||t%100==0&&t%400!=0)?i[a]:29)}function y(e,r){var t=e.match(n);if(!t)return!1;var a=t[1],s=t[2],o=t[3];return(a<=23&&s<=59&&o<=59||23==a&&59==s&&60==o)&&(!r||t[5])}(r.exports=m).fast={date:/^\d\d\d\d-[0-1]\d-[0-3]\d$/,time:/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i,"date-time":/^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i,uri:/^(?:[a-z][a-z0-9+-.]*:)(?:\/?\/)?[^\s]*$/i,"uri-reference":/^(?:(?:[a-z][a-z0-9+-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i,"uri-template":c,url:u,email:/^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,hostname:s,ipv4:/^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,ipv6:/^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,regex:w,uuid:h,"json-pointer":d,"json-pointer-uri-fragment":f,"relative-json-pointer":p},m.full={date:v,time:y,"date-time":function(e){var r=e.split(g);return 2==r.length&&v(r[0])&&y(r[1],!0)},uri:function(e){return P.test(e)&&l.test(e)},"uri-reference":/^(?:[a-z][a-z0-9+\-.]*:)?(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'"()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?(?:\?(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i,"uri-template":c,url:u,email:/^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,hostname:s,ipv4:/^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,ipv6:/^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,regex:w,uuid:h,"json-pointer":d,"json-pointer-uri-fragment":f,"relative-json-pointer":p};var g=/t|\s/i;var P=/\/|:/;var E=/[^\\]\\Z/;function w(e){if(E.test(e))return!1;try{return new RegExp(e),!0}catch(e){return!1}}},{"./util":10}],5:[function(e,r,t){"use strict";var $=e("./resolve"),R=e("./util"),D=e("./error_classes"),j=e("fast-json-stable-stringify"),O=e("../dotjs/validate"),I=R.ucs2length,A=e("fast-deep-equal"),C=D.Validation;function k(e,c,u,r){var d=this,f=this._opts,h=[void 0],p={},l=[],t={},m=[],a={},v=[],s=function(e,r,t){var a=L.call(this,e,r,t);return 0<=a?{index:a,compiling:!0}:{index:a=this._compilations.length,compiling:!(this._compilations[a]={schema:e,root:r,baseId:t})}}.call(this,e,c=c||{schema:e,refVal:h,refs:p},r),o=this._compilations[s.index];if(s.compiling)return o.callValidate=P;var y=this._formats,g=this.RULES;try{var i=E(e,c,u,r);o.validate=i;var n=o.callValidate;return n&&(n.schema=i.schema,n.errors=null,n.refs=i.refs,n.refVal=i.refVal,n.root=i.root,n.$async=i.$async,f.sourceCode&&(n.source=i.source)),i}finally{(function(e,r,t){var a=L.call(this,e,r,t);0<=a&&this._compilations.splice(a,1)}).call(this,e,c,r)}function P(){var e=o.validate,r=e.apply(this,arguments);return P.errors=e.errors,r}function E(e,r,t,a){var s=!r||r&&r.schema==e;if(r.schema!=c.schema)return k.call(d,e,r,t,a);var o,i=!0===e.$async,n=O({isTop:!0,schema:e,isRoot:s,baseId:a,root:r,schemaPath:"",errSchemaPath:"#",errorPath:'""',MissingRefError:D.MissingRef,RULES:g,validate:O,util:R,resolve:$,resolveRef:w,usePattern:_,useDefault:F,useCustomRule:x,opts:f,formats:y,logger:d.logger,self:d});n=Q(h,q)+Q(l,z)+Q(m,T)+Q(v,N)+n,f.processCode&&(n=f.processCode(n));try{o=new Function("self","RULES","formats","root","refVal","defaults","customRules","equal","ucs2length","ValidationError",n)(d,g,y,c,h,m,v,A,I,C),h[0]=o}catch(e){throw d.logger.error("Error compiling schema, function code:",n),e}return o.schema=e,o.errors=null,o.refs=p,o.refVal=h,o.root=s?o:r,i&&(o.$async=!0),!0===f.sourceCode&&(o.source={code:n,patterns:l,defaults:m}),o}function w(e,r,t){r=$.url(e,r);var a,s,o=p[r];if(void 0!==o)return S(a=h[o],s="refVal["+o+"]");if(!t&&c.refs){var i=c.refs[r];if(void 0!==i)return S(a=c.refVal[i],s=b(r,a))}s=b(r);var n=$.call(d,E,c,r);if(void 0===n){var l=u&&u[r];l&&(n=$.inlineRef(l,f.inlineRefs)?l:k.call(d,l,c,u,e))}if(void 0!==n)return h[p[r]]=n,S(n,s);delete p[r]}function b(e,r){var t=h.length;return h[t]=r,"refVal"+(p[e]=t)}function S(e,r){return"object"==typeof e||"boolean"==typeof e?{code:r,schema:e,inline:!0}:{code:r,$async:e&&!!e.$async}}function _(e){var r=t[e];return void 0===r&&(r=t[e]=l.length,l[r]=e),"pattern"+r}function F(e){switch(typeof e){case"boolean":case"number":return""+e;case"string":return R.toQuotedString(e);case"object":if(null===e)return"null";var r=j(e),t=a[r];return void 0===t&&(t=a[r]=m.length,m[t]=e),"default"+t}}function x(e,r,t,a){if(!1!==d._opts.validateSchema){var s=e.definition.dependencies;if(s&&!s.every(function(e){return Object.prototype.hasOwnProperty.call(t,e)}))throw new Error("parent schema must have all required keywords: "+s.join(","));var o=e.definition.validateSchema;if(o)if(!o(r)){var i="keyword schema is invalid: "+d.errorsText(o.errors);if("log"!=d._opts.validateSchema)throw new Error(i);d.logger.error(i)}}var n,l=e.definition.compile,c=e.definition.inline,u=e.definition.macro;if(l)n=l.call(d,r,t,a);else if(u)n=u.call(d,r,t,a),!1!==f.validateSchema&&d.validateSchema(n,!0);else if(c)n=c.call(d,a,e.keyword,r,t);else if(!(n=e.definition.validate))return;if(void 0===n)throw new Error('custom keyword "'+e.keyword+'"failed to compile');var h=v.length;return{code:"customRule"+h,validate:v[h]=n}}}function L(e,r,t){for(var a=0;a",y=d?">":"<",g=void 0;if(m){var P=e.util.getData(p.$data,o,e.dataPathArr),E="exclusive"+s,w="exclType"+s,b="exclIsNumber"+s,S="' + "+(x="op"+s)+" + '";a+=" var schemaExcl"+s+" = "+P+"; ";var _;g=f;(_=_||[]).push(a+=" var "+E+"; var "+w+" = typeof "+(P="schemaExcl"+s)+"; if ("+w+" != 'boolean' && "+w+" != 'undefined' && "+w+" != 'number') { "),a="",!1!==e.createErrors?(a+=" { keyword: '"+(g||"_exclusiveLimit")+"' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: {} ",!1!==e.opts.messages&&(a+=" , message: '"+f+" should be boolean' "),e.opts.verbose&&(a+=" , schema: validate.schema"+n+" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var F=a;a=_.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+F+"]); ":" validate.errors = ["+F+"]; return false; ":" var err = "+F+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+=" } else if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || "),a+=" "+w+" == 'number' ? ( ("+E+" = "+t+" === undefined || "+P+" "+v+"= "+t+") ? "+u+" "+y+"= "+P+" : "+u+" "+y+" "+t+" ) : ( ("+E+" = "+P+" === true) ? "+u+" "+y+"= "+t+" : "+u+" "+y+" "+t+" ) || "+u+" !== "+u+") { var op"+s+" = "+E+" ? '"+v+"' : '"+v+"='; ",void 0===i&&(l=e.errSchemaPath+"/"+(g=f),t=P,h=m)}else{S=v;if((b="number"==typeof p)&&h){var x="'"+S+"'";a+=" if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || "),a+=" ( "+t+" === undefined || "+p+" "+v+"= "+t+" ? "+u+" "+y+"= "+p+" : "+u+" "+y+" "+t+" ) || "+u+" !== "+u+") { "}else{b&&void 0===i?(E=!0,l=e.errSchemaPath+"/"+(g=f),t=p,y+="="):(b&&(t=Math[d?"min":"max"](p,i)),p===(!b||t)?(E=!0,l=e.errSchemaPath+"/"+(g=f),y+="="):(E=!1,S+="="));x="'"+S+"'";a+=" if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || "),a+=" "+u+" "+y+" "+t+" || "+u+" !== "+u+") { "}}g=g||r,(_=_||[]).push(a),a="",!1!==e.createErrors?(a+=" { keyword: '"+(g||"_limit")+"' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { comparison: "+x+", limit: "+t+", exclusive: "+E+" } ",!1!==e.opts.messages&&(a+=" , message: 'should be "+S+" ",a+=h?"' + "+t:t+"'"),e.opts.verbose&&(a+=" , schema: ",a+=h?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";F=a;return a=_.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+F+"]); ":" validate.errors = ["+F+"]; return false; ":" var err = "+F+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+=" } ",c&&(a+=" else { "),a}},{}],14:[function(e,r,t){"use strict";r.exports=function(e,r){var t,a=" ",s=e.level,o=e.dataLevel,i=e.schema[r],n=e.schemaPath+e.util.getProperty(r),l=e.errSchemaPath+"/"+r,c=!e.opts.allErrors,u="data"+(o||""),h=e.opts.$data&&i&&i.$data;t=h?(a+=" var schema"+s+" = "+e.util.getData(i.$data,o,e.dataPathArr)+"; ","schema"+s):i,a+="if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || ");var d=r,f=f||[];f.push(a+=" "+u+".length "+("maxItems"==r?">":"<")+" "+t+") { "),a="",!1!==e.createErrors?(a+=" { keyword: '"+(d||"_limitItems")+"' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { limit: "+t+" } ",!1!==e.opts.messages&&(a+=" , message: 'should NOT have ",a+="maxItems"==r?"more":"fewer",a+=" than ",a+=h?"' + "+t+" + '":""+i,a+=" items' "),e.opts.verbose&&(a+=" , schema: ",a+=h?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var p=a;return a=f.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+p+"]); ":" validate.errors = ["+p+"]; return false; ":" var err = "+p+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+="} ",c&&(a+=" else { "),a}},{}],15:[function(e,r,t){"use strict";r.exports=function(e,r){var t,a=" ",s=e.level,o=e.dataLevel,i=e.schema[r],n=e.schemaPath+e.util.getProperty(r),l=e.errSchemaPath+"/"+r,c=!e.opts.allErrors,u="data"+(o||""),h=e.opts.$data&&i&&i.$data;t=h?(a+=" var schema"+s+" = "+e.util.getData(i.$data,o,e.dataPathArr)+"; ","schema"+s):i,a+="if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || "),a+=!1===e.opts.unicode?" "+u+".length ":" ucs2length("+u+") ";var d=r,f=f||[];f.push(a+=" "+("maxLength"==r?">":"<")+" "+t+") { "),a="",!1!==e.createErrors?(a+=" { keyword: '"+(d||"_limitLength")+"' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { limit: "+t+" } ",!1!==e.opts.messages&&(a+=" , message: 'should NOT be ",a+="maxLength"==r?"longer":"shorter",a+=" than ",a+=h?"' + "+t+" + '":""+i,a+=" characters' "),e.opts.verbose&&(a+=" , schema: ",a+=h?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var p=a;return a=f.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+p+"]); ":" validate.errors = ["+p+"]; return false; ":" var err = "+p+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+="} ",c&&(a+=" else { "),a}},{}],16:[function(e,r,t){"use strict";r.exports=function(e,r){var t,a=" ",s=e.level,o=e.dataLevel,i=e.schema[r],n=e.schemaPath+e.util.getProperty(r),l=e.errSchemaPath+"/"+r,c=!e.opts.allErrors,u="data"+(o||""),h=e.opts.$data&&i&&i.$data;t=h?(a+=" var schema"+s+" = "+e.util.getData(i.$data,o,e.dataPathArr)+"; ","schema"+s):i,a+="if ( ",h&&(a+=" ("+t+" !== undefined && typeof "+t+" != 'number') || ");var d=r,f=f||[];f.push(a+=" Object.keys("+u+").length "+("maxProperties"==r?">":"<")+" "+t+") { "),a="",!1!==e.createErrors?(a+=" { keyword: '"+(d||"_limitProperties")+"' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { limit: "+t+" } ",!1!==e.opts.messages&&(a+=" , message: 'should NOT have ",a+="maxProperties"==r?"more":"fewer",a+=" than ",a+=h?"' + "+t+" + '":""+i,a+=" properties' "),e.opts.verbose&&(a+=" , schema: ",a+=h?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var p=a;return a=f.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+p+"]); ":" validate.errors = ["+p+"]; return false; ":" var err = "+p+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+="} ",c&&(a+=" else { "),a}},{}],17:[function(e,r,t){"use strict";r.exports=function(e,r){var t=" ",a=e.schema[r],s=e.schemaPath+e.util.getProperty(r),o=e.errSchemaPath+"/"+r,i=!e.opts.allErrors,n=e.util.copy(e),l="";n.level++;var c="valid"+n.level,u=n.baseId,h=!0,d=a;if(d)for(var f,p=-1,m=d.length-1;p "+F+") { ";var $=c+"["+F+"]";d.schema=_,d.schemaPath=i+"["+F+"]",d.errSchemaPath=n+"/"+F,d.errorPath=e.util.getPathExpr(e.errorPath,F,e.opts.jsonPointers,!0),d.dataPathArr[v]=F;var R=e.validate(d);d.baseId=g,e.util.varOccurences(R,y)<2?t+=" "+e.util.varReplace(R,y,$)+" ":t+=" var "+y+" = "+$+"; "+R+" ",t+=" } ",l&&(t+=" if ("+p+") { ",f+="}")}if("object"==typeof P&&(e.opts.strictKeywords?"object"==typeof P&&0 "+o.length+") { for (var "+m+" = "+o.length+"; "+m+" < "+c+".length; "+m+"++) { ",d.errorPath=e.util.getPathExpr(e.errorPath,m,e.opts.jsonPointers,!0);$=c+"["+m+"]";d.dataPathArr[v]=m;R=e.validate(d);d.baseId=g,e.util.varOccurences(R,y)<2?t+=" "+e.util.varReplace(R,y,$)+" ":t+=" var "+y+" = "+$+"; "+R+" ",l&&(t+=" if (!"+p+") break; "),t+=" } } ",l&&(t+=" if ("+p+") { ",f+="}")}}else if(e.opts.strictKeywords?"object"==typeof o&&0 1e-"+e.opts.multipleOfPrecision+" ":" division"+s+" !== parseInt(division"+s+") ",a+=" ) ",h&&(a+=" ) ");var d=d||[];d.push(a+=" ) { "),a="",!1!==e.createErrors?(a+=" { keyword: 'multipleOf' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { multipleOf: "+t+" } ",!1!==e.opts.messages&&(a+=" , message: 'should be multiple of ",a+=h?"' + "+t:t+"'"),e.opts.verbose&&(a+=" , schema: ",a+=h?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var f=a;return a=d.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+f+"]); ":" validate.errors = ["+f+"]; return false; ":" var err = "+f+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+="} ",c&&(a+=" else { "),a}},{}],30:[function(e,r,t){"use strict";r.exports=function(e,r){var t=" ",a=e.level,s=e.dataLevel,o=e.schema[r],i=e.schemaPath+e.util.getProperty(r),n=e.errSchemaPath+"/"+r,l=!e.opts.allErrors,c="data"+(s||""),u="errs__"+a,h=e.util.copy(e);h.level++;var d="valid"+h.level;if(e.opts.strictKeywords?"object"==typeof o&&0 1) { ";var f=e.schema.items&&e.schema.items.type,p=Array.isArray(f);if(!f||"object"==f||"array"==f||p&&(0<=f.indexOf("object")||0<=f.indexOf("array")))a+=" outer: for (;i--;) { for (j = i; j--;) { if (equal("+u+"[i], "+u+"[j])) { "+h+" = false; break outer; } } } ";else a+=" var itemIndices = {}, item; for (;i--;) { var item = "+u+"[i]; ",a+=" if ("+e.util["checkDataType"+(p?"s":"")](f,"item",!0)+") continue; ",p&&(a+=" if (typeof item == 'string') item = '\"' + item; "),a+=" if (typeof itemIndices[item] == 'number') { "+h+" = false; j = itemIndices[item]; break; } itemIndices[item] = i; } ";a+=" } ",d&&(a+=" } ");var m=m||[];m.push(a+=" if (!"+h+") { "),a="",!1!==e.createErrors?(a+=" { keyword: 'uniqueItems' , dataPath: (dataPath || '') + "+e.errorPath+" , schemaPath: "+e.util.toQuotedString(l)+" , params: { i: i, j: j } ",!1!==e.opts.messages&&(a+=" , message: 'should NOT have duplicate items (items ## ' + j + ' and ' + i + ' are identical)' "),e.opts.verbose&&(a+=" , schema: ",a+=d?"validate.schema"+n:""+i,a+=" , parentSchema: validate.schema"+e.schemaPath+" , data: "+u+" "),a+=" } "):a+=" {} ";var v=a;a=m.pop(),a+=!e.compositeRule&&c?e.async?" throw new ValidationError(["+v+"]); ":" validate.errors = ["+v+"]; return false; ":" var err = "+v+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ",a+=" } ",c&&(a+=" else { ")}else c&&(a+=" if (true) { ");return a}},{}],38:[function(e,r,t){"use strict";r.exports=function(a,e){var r="",t=!0===a.schema.$async,s=a.util.schemaHasRulesExcept(a.schema,a.RULES.all,"$ref"),o=a.self._getId(a.schema);if(a.opts.strictKeywords){var i=a.util.schemaUnknownRules(a.schema,a.RULES.keywords);if(i){var n="unknown keyword: "+i;if("log"!==a.opts.strictKeywords)throw new Error(n);a.logger.warn(n)}}if(a.isTop&&(r+=" var validate = ",t&&(a.async=!0,r+="async "),r+="function(data, dataPath, parentData, parentDataProperty, rootData) { 'use strict'; ",o&&(a.opts.sourceCode||a.opts.processCode)&&(r+=" /*# sourceURL="+o+" */ ")),"boolean"==typeof a.schema||!s&&!a.schema.$ref){var l=a.level,c=a.dataLevel,u=a.schema[e="false schema"],h=a.schemaPath+a.util.getProperty(e),d=a.errSchemaPath+"/"+e,f=!a.opts.allErrors,p="data"+(c||""),m="valid"+l;if(!1===a.schema){a.isTop?f=!0:r+=" var "+m+" = false; ",(Z=Z||[]).push(r),r="",!1!==a.createErrors?(r+=" { keyword: 'false schema' , dataPath: (dataPath || '') + "+a.errorPath+" , schemaPath: "+a.util.toQuotedString(d)+" , params: {} ",!1!==a.opts.messages&&(r+=" , message: 'boolean schema is false' "),a.opts.verbose&&(r+=" , schema: false , parentSchema: validate.schema"+a.schemaPath+" , data: "+p+" "),r+=" } "):r+=" {} ";var v=r;r=Z.pop(),r+=!a.compositeRule&&f?a.async?" throw new ValidationError(["+v+"]); ":" validate.errors = ["+v+"]; return false; ":" var err = "+v+"; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; "}else r+=a.isTop?t?" return data; ":" validate.errors = null; return true; ":" var "+m+" = true; ";return a.isTop&&(r+=" }; return validate; "),r}if(a.isTop){var y=a.isTop;l=a.level=0,c=a.dataLevel=0,p="data";if(a.rootId=a.resolve.fullPath(a.self._getId(a.root.schema)),a.baseId=a.baseId||a.rootId,delete a.isTop,a.dataPathArr=[void 0],void 0!==a.schema.default&&a.opts.useDefaults&&a.opts.strictDefaults){var g="default is ignored in the schema root";if("log"!==a.opts.strictDefaults)throw new Error(g);a.logger.warn(g)}r+=" var vErrors = null; ",r+=" var errors = 0; ",r+=" if (rootData === undefined) rootData = data; "}else{l=a.level,p="data"+((c=a.dataLevel)||"");if(o&&(a.baseId=a.resolve.url(a.baseId,o)),t&&!a.async)throw new Error("async schema in sync schema");r+=" var errs_"+l+" = errors;"}m="valid"+l,f=!a.opts.allErrors;var P="",E="",w=a.schema.type,b=Array.isArray(w);if(w&&a.opts.nullable&&!0===a.schema.nullable&&(b?-1==w.indexOf("null")&&(w=w.concat("null")):"null"!=w&&(w=[w,"null"],b=!0)),b&&1==w.length&&(w=w[0],b=!1),a.schema.$ref&&s){if("fail"==a.opts.extendRefs)throw new Error('$ref: validation keywords used in schema at path "'+a.errSchemaPath+'" (see option extendRefs)');!0!==a.opts.extendRefs&&(s=!1,a.logger.warn('$ref: keywords ignored in schema at path "'+a.errSchemaPath+'"'))}if(a.schema.$comment&&a.opts.$comment&&(r+=" "+a.RULES.all.$comment.code(a,"$comment")),w){if(a.opts.coerceTypes)var S=a.util.coerceToTypes(a.opts.coerceTypes,w);var _=a.RULES.types[w];if(S||b||!0===_||_&&!G(_)){h=a.schemaPath+".type",d=a.errSchemaPath+"/type",h=a.schemaPath+".type",d=a.errSchemaPath+"/type";if(r+=" if ("+a.util[b?"checkDataTypes":"checkDataType"](w,p,!0)+") { ",S){var F="dataType"+l,x="coerced"+l;r+=" var "+F+" = typeof "+p+"; ","array"==a.opts.coerceTypes&&(r+=" if ("+F+" == 'object' && Array.isArray("+p+")) "+F+" = 'array'; "),r+=" var "+x+" = undefined; ";var $="",R=S;if(R)for(var D,j=-1,O=R.length-1;j= 0x80 (not a basic code point)","invalid-input":"Invalid input"},C=Math.floor,k=String.fromCharCode;function L(e){throw new RangeError(i[e])}function n(e,r){var t=e.split("@"),a="";return 1>1,e+=C(e/r);455C((A-s)/h))&&L("overflow"),s+=f*h;var p=d<=i?1:i+26<=d?26:d-i;if(fC(A/m)&&L("overflow"),h*=m}var v=t.length+1;i=q(s-u,v,0==u),C(s/v)>A-o&&L("overflow"),o+=C(s/v),s%=v,t.splice(s++,0,o)}return String.fromCodePoint.apply(String,t)}function c(e){var r=[],t=(e=z(e)).length,a=128,s=0,o=72,i=!0,n=!1,l=void 0;try{for(var c,u=e[Symbol.iterator]();!(i=(c=u.next()).done);i=!0){var h=c.value;h<128&&r.push(k(h))}}catch(e){n=!0,l=e}finally{try{!i&&u.return&&u.return()}finally{if(n)throw l}}var d=r.length,f=d;for(d&&r.push("-");fC((A-s)/w)&&L("overflow"),s+=(p-a)*w,a=p;var b=!0,S=!1,_=void 0;try{for(var F,x=e[Symbol.iterator]();!(b=(F=x.next()).done);b=!0){var $=F.value;if($A&&L("overflow"),$==a){for(var R=s,D=36;;D+=36){var j=D<=o?1:o+26<=D?26:D-o;if(R>6|192).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase():"%"+(r>>12|224).toString(16).toUpperCase()+"%"+(r>>6&63|128).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase()}function f(e){for(var r="",t=0,a=e.length;tA-Z\\x5E-\\x7E]",'[\\"\\\\]')),M=new RegExp(V,"g"),B=new RegExp("(?:(?:%[EFef][0-9A-Fa-f]%[0-9A-Fa-f][0-9A-Fa-f]%[0-9A-Fa-f][0-9A-Fa-f])|(?:%[89A-Fa-f][0-9A-Fa-f]%[0-9A-Fa-f][0-9A-Fa-f])|(?:%[0-9A-Fa-f][0-9A-Fa-f]))","g"),G=new RegExp(J("[^]","[A-Za-z0-9\\!\\$\\%\\'\\*\\+\\-\\^\\_\\`\\{\\|\\}\\~]","[\\.]",'[\\"]',K),"g"),Y=new RegExp(J("[^]",V,"[\\!\\$\\'\\(\\)\\*\\+\\,\\;\\:\\@]"),"g"),W=Y;function X(e){var r=f(e);return r.match(M)?r:e}var ee={scheme:"mailto",parse:function(e,r){var t=e,a=t.to=t.path?t.path.split(","):[];if(t.path=void 0,t.query){for(var s=!1,o={},i=t.query.split("&"),n=0,l=i.length;n);
-
- message: string;
- errors: Array;
- ajv: true;
- validation: true;
- }
-
- class MissingRefError extends Error {
- constructor(baseId: string, ref: string, message?: string);
- static message: (baseId: string, ref: string) => string;
-
- message: string;
- missingRef: string;
- missingSchema: string;
- }
-}
-
-declare namespace ajv {
- type ValidationError = AjvErrors.ValidationError;
-
- type MissingRefError = AjvErrors.MissingRefError;
-
- interface Ajv {
- /**
- * Validate data using schema
- * Schema will be compiled and cached (using serialized JSON as key, [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize by default).
- * @param {string|object|Boolean} schemaKeyRef key, ref or schema object
- * @param {Any} data to be validated
- * @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
- */
- validate(schemaKeyRef: object | string | boolean, data: any): boolean | PromiseLike;
- /**
- * Create validating function for passed schema.
- * @param {object|Boolean} schema schema object
- * @return {Function} validating function
- */
- compile(schema: object | boolean): ValidateFunction;
- /**
- * Creates validating function for passed schema with asynchronous loading of missing schemas.
- * `loadSchema` option should be a function that accepts schema uri and node-style callback.
- * @this Ajv
- * @param {object|Boolean} schema schema object
- * @param {Boolean} meta optional true to compile meta-schema; this parameter can be skipped
- * @param {Function} callback optional node-style callback, it is always called with 2 parameters: error (or null) and validating function.
- * @return {PromiseLike} validating function
- */
- compileAsync(schema: object | boolean, meta?: Boolean, callback?: (err: Error, validate: ValidateFunction) => any): PromiseLike;
- /**
- * Adds schema to the instance.
- * @param {object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
- * @param {string} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
- * @return {Ajv} this for method chaining
- */
- addSchema(schema: Array