Permalink
Find file Copy path
1068905 Jun 10, 2018
2 contributors

Users who have contributed to this file

@ktsaou @philwhineray
executable file 7961 lines (6686 sloc) 306 KB
#!/bin/bash
#
# update-ipsets - for FireHOL - A firewall for humans...
#
# Copyright
#
# Copyright (C) 2015-2017 Costa Tsaousis <costa@tsaousis.gr>
# Copyright (C) 2015-2017 Phil Whineray <phil@sanewall.org>
#
# License
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# See the file COPYING for details.
#
# What this program does:
#
# 1. It downloads a number of IP lists
# - respects network resource: it will download a file only if it has
# been changed on the server (IF_MODIFIED_SINCE)
# - it will not attempt to download a file too frequently
# (it has a maximum frequency per download URL embedded, so that
# even if a server does not support IF_MODIFIED_SINCE it will not
# download the IP list too frequently).
# - it will use compression when possible.
#
# 2. Once a file is downloaded, it will convert it either to
# an ip:hash or a net:hash ipset.
# It can convert:
# - text files
# - snort rules files
# - PIX rules files
# - XML files (like RSS feeds)
# - CSV files
# - compressed files (zip, gz, etc)
# - generaly, anything that can be converted using shell commands
#
# 3. For all file types it can keep a history of the processed sets
# that can be merged with the new downloaded one, so that it can
# populate the generated set with all the IPs of the last X days.
#
# 4. For each set updated, it will:
# - save it to disk
# - update a kernel ipset, having the same name
#
# 5. It can commit all successfully updated files to a git repository.
# Just do 'git init' in $SYSCONFDIR/firehol/ipsets to enable it.
# If it is called with -g it will also push the committed changes
# to a remote git server (to have this done by cron, please set
# git to automatically push changes without human action).
#
# 6. It can compare ipsets and keep track of geomaping, history of size,
# age of IPs listed, retention policy, overlaps with other sets.
# To enable it, run it with -c.
#
# -----------------------------------------------------------------------------
#
# How to use it:
#
# This script depends on iprange, found also in firehol.
# It does not depend on firehol. You can use it without firehol.
#
# 1. Run this script. It will give you instructions on which
# IP lists are available and what to do to enable them.
# 2. Enable a few lists, following its instructions.
# 3. Run it again to update the lists.
# 4. Put it in a cron job to do the updates automatically.
# -----------------------------------------------------------------------------
READLINK_CMD=${READLINK_CMD:-readlink}
BASENAME_CMD=${BASENAME_CMD:-basename}
DIRNAME_CMD=${DIRNAME_CMD:-dirname}
function realdir {
local r="$1"; local t=$($READLINK_CMD "$r")
while [ "$t" ]; do
r=$(cd $($DIRNAME_CMD "$r") && cd $($DIRNAME_CMD "$t") && pwd -P)/$($BASENAME_CMD "$t")
t=$($READLINK_CMD "$r")
done
$DIRNAME_CMD "$r"
}
PROGRAM_FILE="$0"
PROGRAM_DIR="${FIREHOL_OVERRIDE_PROGRAM_DIR:-$(realdir "$0")}"
PROGRAM_PWD="${PWD}"
declare -a PROGRAM_ORIGINAL_ARGS=("${@}")
for functions_file in install.config functions.common
do
if [ -r "$PROGRAM_DIR/$functions_file" ]
then
source "$PROGRAM_DIR/$functions_file"
else
1>&2 echo "Cannot access $PROGRAM_DIR/$functions_file"
exit 1
fi
done
common_disable_localization || exit
common_public_umask || exit
marksreset() { :; }
markdef() { :; }
if [ -r "${FIREHOL_CONFIG_DIR}/firehol-defaults.conf" ]
then
source "${FIREHOL_CONFIG_DIR}/firehol-defaults.conf" || exit 1
fi
RUNNING_ON_TERMINAL=0
if [ "z$1" = "z-nc" ]
then
shift
else
common_setup_terminal && RUNNING_ON_TERMINAL=1
fi
$RENICE_CMD 10 $$ >/dev/null 2>/dev/null
# -----------------------------------------------------------------------------
# logging
error() {
echo >&2 -e "${COLOR_BGRED}${COLOR_WHITE}${COLOR_BOLD} ERROR ${COLOR_RESET}: ${@}"
$LOGGER_CMD -p daemon.err -t "update-ipsets.sh[$$]" "${@}"
}
warning() {
echo >&2 -e "${COLOR_BGYELLOW}${COLOR_BLACK}${COLOR_BOLD} WARNING ${COLOR_RESET}: ${@}"
$LOGGER_CMD -p daemon.warning -t "update-ipsets.sh[$$]" "${@}"
}
info() {
echo >&2 "${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "${@}"
}
verbose() {
[ ${VERBOSE} -eq 1 ] && echo >&2 "${@}"
}
silent() {
[ ${SILENT} -ne 1 ] && echo >&2 "${@}"
}
print_ipset_indent=35
print_ipset_spacer="$(printf "%${print_ipset_indent}s| " "")"
print_ipset_last=
print_ipset_reset() {
print_ipset_last=
}
print_ipset_header() {
local ipset="${1}"
if [ "${ipset}" = "${print_ipset_last}" ]
then
printf >&2 "%${print_ipset_indent}s| " ""
else
[ ${SILENT} -ne 1 ] && echo >&2 "${print_ipset_spacer}"
printf >&2 "${COLOR_GREEN}%${print_ipset_indent}s${COLOR_RESET}| " "${ipset}"
print_ipset_last="${ipset}"
fi
}
ipset_error() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGRED}${COLOR_WHITE}${COLOR_BOLD} ERROR ${COLOR_RESET} ${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "ERROR: ${ipset}: ${@}"
}
ipset_warning() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGYELLOW}${COLOR_BLACK}${COLOR_BOLD} WARNING ${COLOR_RESET} ${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "WARNING: ${ipset}: ${@}"
}
ipset_info() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 "${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "INFO: ${ipset}: ${@}"
}
ipset_saved() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGGREEN}${COLOR_RED}${COLOR_BOLD} SAVED ${COLOR_RESET} ${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "SAVED: ${ipset}: ${@}"
}
ipset_loaded() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGGREEN}${COLOR_BLACK}${COLOR_BOLD} LOADED ${COLOR_RESET} ${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "LOADED: ${ipset}: ${@}"
}
ipset_same() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGWHITE}${COLOR_BLACK}${COLOR_BOLD} SAME ${COLOR_RESET} ${@}"
$LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "DOWNLOADED SAME: ${ipset}: ${@}"
}
ipset_notupdated() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGWHITE}${COLOR_BLACK}${COLOR_BOLD} NOT UPDATED ${COLOR_RESET} ${@}"
# $LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "NOT UPDATED: ${ipset}: ${@}"
}
ipset_notyet() {
local ipset="${1}"
shift
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGWHITE}${COLOR_BLACK}${COLOR_BOLD} LATER ${COLOR_RESET} ${@}"
# $LOGGER_CMD -p daemon.info -t "update-ipsets.sh[$$]" "LATER: ${ipset}: ${@}"
}
ipset_disabled() {
local ipset="${1}"
shift
if [ ${SILENT} -eq 0 ]
then
print_ipset_header "${ipset}"
echo >&2 -e "${COLOR_BGWHITE}${COLOR_BLACK}${COLOR_BOLD} DISABLED ${COLOR_RESET} ${@}"
print_ipset_header "${ipset}"
echo >&2 "To enable run: update-ipsets enable ${ipset}"
fi
}
ipset_silent() {
local ipset="${1}"
shift
if [ ${SILENT} -eq 0 ]
then
print_ipset_header "${ipset}"
echo >&2 "${@}"
fi
}
ipset_verbose() {
local ipset="${1}"
shift
if [ ${VERBOSE} -eq 1 ]
then
print_ipset_header "${ipset}"
echo >&2 "${@}"
fi
}
# -----------------------------------------------------------------------------
# find a working iprange command
HAVE_IPRANGE=${IPRANGE_CMD}
if [ ! -z "${IPRANGE_CMD}" ]
then
${IPRANGE_CMD} --has-reduce 2>/dev/null || HAVE_IPRANGE=
fi
if [ -z "$HAVE_IPRANGE" ]
then
error "Cannot find a working iprange command. It should be part of FireHOL but it is not installed."
exit 1
fi
# -----------------------------------------------------------------------------
# CONFIGURATION
if [ "${UID}" = "0" -o -z "${UID}" ]
then
BASE_DIR="${BASE_DIR-${FIREHOL_CONFIG_DIR}/ipsets}"
CONFIG_FILE="${CONFIG_FILE-${FIREHOL_CONFIG_DIR}/update-ipsets.conf}"
RUN_PARENT_DIR="${RUN_PARENT_DIR-$LOCALSTATEDIR/run}"
CACHE_DIR="${CACHE_DIR-$LOCALSTATEDIR/cache/update-ipsets}"
LIB_DIR="${LIB_DIR-$LOCALSTATEDIR/lib/update-ipsets}"
IPSETS_APPLY=1
else
$MKDIR_CMD -p "${HOME}/.update-ipsets" || exit 1
BASE_DIR="${BASE_DIR-${HOME}/ipsets}"
CONFIG_FILE="${CONFIG_FILE-${HOME}/.update-ipsets/update-ipsets.conf}"
RUN_PARENT_DIR="${RUN_PARENT_DIR-${HOME}/.update-ipsets}"
CACHE_DIR="${CACHE_DIR-${HOME}/.update-ipsets/cache}"
LIB_DIR="${LIB_DIR-${HOME}/.update-ipsets/lib}"
IPSETS_APPLY=0
fi
# admin defined ipsets
ADMIN_SUPPLIED_IPSETS="${ADMIN_SUPPLIED_IPSETS-${FIREHOL_CONFIG_DIR}/ipsets.d}"
# distribution defined ipsets
DISTRIBUTION_SUPPLIED_IPSETS="${DISTRIBUTION_SUPPLIED_IPSETS-${FIREHOL_SHARE_DIR}/ipsets.d}"
# user defined ipsets
USER_SUPPLIED_IPSETS="${USER_SUPPLIED_IPSETS-${HOME}/.update-ipsets/ipsets.d}"
# where to keep the history files
HISTORY_DIR="${HISTORY_DIR-${BASE_DIR}/history}"
# where to keep the files we cannot process
# when empty, error files will be deleted
ERRORS_DIR="${ERRORS_DIR-${BASE_DIR}/errors}"
# where to keep the tmp files
# a subdirectory will be created as RUN_DIR
TMP_DIR="${TMP_DIR-/tmp}"
# options to be given to iprange for reducing netsets
IPSET_REDUCE_FACTOR=${IPSET_REDUCE_FACTOR-20}
IPSET_REDUCE_ENTRIES=${IPSET_REDUCE_ENTRIES-65536}
# how many entries the ipset charts should have
WEB_CHARTS_ENTRIES=${WEB_CHARTS_ENTRIES-500}
# if the .git directory is present, push it also
PUSH_TO_GIT=${PUSH_TO_GIT-0}
# when PUSH_TO_GIT is enabled, this controls if each
# ipset will get its own commit, or all files will be
# committed together
PUSH_TO_GIT_MERGED=${PUSH_TO_GIT_MERGED-1}
# additional options to add as the git commit/push lines
PUSH_TO_GIT_COMMIT_OPTIONS=""
PUSH_TO_GIT_PUSH_OPTIONS=""
# if we will also push github gh-pages
PUSH_TO_GIT_WEB=${PUSH_TO_GIT_WEB-${PUSH_TO_GIT}}
# the maximum time in seconds, to connect to the remote web server
MAX_CONNECT_TIME=${MAX_CONNECT_TIME-10}
# agent string to use when performing downloads
USER_AGENT="FireHOL-Update-Ipsets/3.0 (linux-gnu) https://iplists.firehol.org/"
# the maximum time in seconds any download may take
MAX_DOWNLOAD_TIME=${MAX_DOWNLOAD_TIME-300}
# ignore a few download failures
# if the download fails more than these consecutive times, the ipset will be
# penalized X times its failures (ie. MINUTES * ( FAILURES - the following number) )
IGNORE_REPEATING_DOWNLOAD_ERRORS=${IGNORE_REPEATING_DOWNLOAD_ERRORS-10}
# how many DNS queries to execute in parallel when resolving hostnames to IPs
# IMPORTANT: Increasing this too much and you are going to need A LOT of bandwidth!
# IMPORTANT: Giving a lot parallel requests to your name server will create a queue
# that will start filling up as time passes, possibly hitting a quota
# on the name server.
PARALLEL_DNS_QUERIES=${PARALLEL_DNS_QUERIES-10}
# where to put the CSV files for the web server
# if empty or does not exist, web files will not be generated
WEB_DIR=""
# how to chown web files
WEB_OWNER=""
# where is the web url to show info about each ipset
# the ipset name is appended to it
WEB_URL="http://iplists.firehol.org/?ipset="
# the path to copy downloaded files to, using ${WEB_OWNER} permissions
# if empty, do not copy them
WEB_DIR_FOR_IPSETS=""
# options for the web site
# the ipset name will be appended
LOCAL_COPY_URL="https://iplists.firehol.org/files/"
GITHUB_CHANGES_URL="https://github.com/firehol/blocklist-ipsets/commits/master/"
GITHUB_SETINFO="https://github.com/firehol/blocklist-ipsets/tree/master/"
# -----------------------------------------------------------------------------
# Command line parsing
CLEANUP_OLD=0
ENABLE_ALL=0
IGNORE_LASTCHECKED=0
FORCE_WEB_REBUILD=0
REPROCESS_ALL=0
SILENT=0
VERBOSE=0
declare -a LISTS_TO_ENABLE=()
declare -A RUN_ONLY_THESE_IPSETS=()
usage() {
$CAT_CMD <<EOFUSAGE
FireHOL update-ipsets $VERSION
(C) 2015 Costa Tsaousis
USAGE:
${PROGRAM_FILE} [options]
The above will execute an update on the configured ipsets
or
${PROGRAM_FILE} enable ipset1 ipset2 ipset3 ...
The above will only enable the given ipsets and exit
It does not validate that the ipsets exists.
options are:
-s
--silent log less than default
This will not report all the possible ipsets that
can be enabled.
-v
--verbose log more than default
This will produce more log, to see what the program
does (more like debugging info).
-f FILE
--config FILE the configuration file to use, the default is:
${CONFIG_FILE}
-i
--recheck Each ipset has a hardcoded refresh frequency.
When we check if it has been updated on the server
we may find that it has not.
update-ipsets.sh will then attempt to re-check
in half the original frequency.
When this option is given, update-ipsets.sh will
ignore that it has checked it before and attempt
to download all ipsets that have not been updated.
DO NOT ENABLE THIS OPTION WHEN RUNNING VIA CRON.
We have to respect the server resources of the
IP list maintainers' servers!
-g
--push-git In the base directory (default: ${BASE_DIR})
you can setup git (just cd to it and run 'git init').
Once update-ipsets.sh finds a git initialized, it
will automatically commit all ipset and netset files
to it.
This option enables an automatic 'git push' at the
end of all commits.
You have to set it up so that git will not ask for
credentials to do the push (normally this done by
using ssh in the git push URL and configuring the
ssh keys for automatic login - keep in mind that
if update-ipsets is running through cron, the user
that runs it has to have the ssh keys installed).
--enable-all Enable all the ipsets at once
This will also execute an update on them
-r
--rebuild Will re-process all ipsets, even the ones that have
not been updated.
This is required in cases of program updates that
need to trigger a full refresh of the generated
metadata (it only affects the web site).
--cleanup Will cleanup obsolete ipsets that are not
available anymore.
run ipset1 ipset2 ...
Will only process the given ipsets.
This parameter must be the last in command line, it
assumes all parameters after the keyword 'run' are
ipsets names.
EOFUSAGE
}
while [ ! -z "${1}" ]
do
case "${1}" in
enable)
shift
LISTS_TO_ENABLE=("${@}")
break
;;
run)
shift
while [ ! -z "${1}" ]
do
RUN_ONLY_THESE_IPSETS[${1}]="${1}"
shift
done
break
;;
--cleanup) CLEANUP_OLD=1;;
--rebuild|-r) FORCE_WEB_REBUILD=1;;
--reprocess|-p) REPROCESS_ALL=1;;
--silent|-s) SILENT=1;;
--push-git|-g) PUSH_TO_GIT=1;;
--recheck|-i) IGNORE_LASTCHECKED=1;;
--compare|-c) ;; # obsolete
--verbose|-v) VERBOSE=1;;
--config|-f) CONFIG_FILE="${2}"; shift ;;
--enable-all) ENABLE_ALL=1;;
--help|-h) usage; exit 1 ;;
*) error "Unknown command line argument '${1}'".; exit 1 ;;
esac
shift
done
if [ -f "${CONFIG_FILE}" ]
then
info "Loading configuration from ${CONFIG_FILE}"
source "${CONFIG_FILE}"
fi
# -----------------------------------------------------------------------------
# FIX DIRECTORIES
if [ -z "${BASE_DIR}" ]
then
error "BASE_DIR is unset. Set it in '${CONFIG_FILE}'."
exit 1
fi
if [ -z "${RUN_PARENT_DIR}" ]
then
error "RUN_PARENT_DIR is unset. Set it in '${CONFIG_FILE}'."
exit 1
fi
if [ ! -d "${RUN_PARENT_DIR}" ]
then
error "RUN_PARENT_DIR='${RUN_PARENT_DIR}' does not exist. Set it in '${CONFIG_FILE}'."
exit 1
fi
if [ -z "${LIB_DIR}" ]
then
error "LIB_DIR is unset. Probably you empty it in '${CONFIG_FILE}'. Please leave it set."
exit 1
fi
if [ -z "${CACHE_DIR}" ]
then
error "CACHE_DIR is unset. Probably you empty it in '${CONFIG_FILE}'. Please leave it set."
exit 1
fi
if [ -z "${TMP_DIR}" ]
then
error "TMP_DIR is unset. Set it in '${CONFIG_FILE}'."
exit 1
fi
if [ ! -d "${TMP_DIR}" ]
then
error "TMP_DIR='${TMP_DIR}' does not exist. Set it in '${CONFIG_FILE}'."
exit 1
fi
if [ -z "${WEB_DIR}" ]
then
WEB_DIR=
elif [ ! -d "${WEB_DIR}" ]
then
warning "WEB_DIR='${WEB_DIR}' is invalid. Disabling web site updates. Set WEB_DIR in '${CONFIG_FILE}' to enable it."
WEB_DIR=
fi
for d in "${BASE_DIR}" "${HISTORY_DIR}" "${ERRORS_DIR}" "${CACHE_DIR}" "${LIB_DIR}"
do
[ -z "${d}" -o -d "${d}" ] && continue
$MKDIR_CMD -p "${d}" || exit 1
info "Created directory '${d}'."
done
cd "${BASE_DIR}" || exit 1
# -----------------------------------------------------------------------------
# if we are just enabling ipsets
if [ "${#LISTS_TO_ENABLE[@]}" -gt 0 ]
then
for x in "${LISTS_TO_ENABLE[@]}"
do
if [ -f "${BASE_DIR}/${x}.source" ]
then
warning "${x}: is already enabled"
else
info "${x}: Enabling ${x}..."
$TOUCH_CMD -t 0001010000 "${BASE_DIR}/${x}.source" || exit 1
fi
done
exit 0
fi
ipset_shall_be_run() {
local ipset="${1}"
if [ ! -f "${BASE_DIR}/${ipset}.source" ]
then
if [ ${ENABLE_ALL} -eq 1 -a -z "${IPSET_TMP_DO_NOT_ENABLE_WITH_ALL[${ipset}]}" ]
then
ipset_silent "${ipset}" "Enabling due to --enable-all option."
$TOUCH_CMD -t 0001010000 "${BASE_DIR}/${ipset}.source" || return 1
else
ipset_disabled "${ipset}"
# cleanup the cache
[ ! -z "${IPSET_CHECKED_DATE[${ipset}]}" ] && cache_remove_ipset "${ipset}"
return 1
fi
fi
if [ ${#RUN_ONLY_THESE_IPSETS[@]} -ne 0 -a -z "${RUN_ONLY_THESE_IPSETS[${ipset}]}" ]
then
ipset_verbose "${ipset}" "skipping - not requested"
return 2
fi
return 0
}
# -----------------------------------------------------------------------------
# Make sure we are the only process doing this job
# to ensure only one runs
UPDATE_IPSETS_LOCK_FILE="${RUN_PARENT_DIR}/update-ipsets.lock"
exlcusive_lock() {
exec 200>"${UPDATE_IPSETS_LOCK_FILE}"
if [ $? -ne 0 ]; then exit; fi
${FLOCK_CMD} -n 200
if [ $? -ne 0 ]
then
echo >&2 "Already running. Try later..."
exit 1
fi
return 0
}
exlcusive_lock
# -----------------------------------------------------------------------------
# CLEANUP
RUN_DIR=$(${MKTEMP_CMD} -d "${TMP_DIR}/update-ipsets-XXXXXXXXXX")
if [ $? -ne 0 ]
then
error "ERROR: Cannot create temporary directory in ${TMP_DIR}."
exit 1
fi
cd "${RUN_DIR}"
PROGRAM_COMPLETED=0
cleanup() {
# make sure the cache is saved
CACHE_SAVE_ENABLED=1
cache_save
cd "${TMP_DIR}"
if [ ! -z "${RUN_DIR}" -a -d "${RUN_DIR}" ]
then
verbose "Cleaning up temporary files in ${RUN_DIR}."
$RM_CMD -rf "${RUN_DIR}"
fi
trap exit EXIT
if [ ${PROGRAM_COMPLETED} -eq 1 ]
then
verbose "Completed successfully."
exit 0
fi
verbose "Completed with errors."
exit 1
}
trap cleanup EXIT
trap cleanup SIGHUP
trap cleanup INT
# -----------------------------------------------------------------------------
# other preparations
if [ ! -d "${BASE_DIR}/.git" -a ${PUSH_TO_GIT} -ne 0 ]
then
info "Git is not initialized in ${BASE_DIR}. Ignoring git support."
PUSH_TO_GIT=0
else
[ -z "${GIT_CMD}" ] && PUSH_TO_GIT=0
fi
[ -d "${BASE_DIR}/.git" -a ! -f "${BASE_DIR}/.gitignore" ] && printf "*.setinfo\n*.source\n" >"${BASE_DIR}/.gitignore"
# -----------------------------------------------------------------------------
# COMMON FUNCTIONS
# echo all the parameters, sorted
params_sort() {
local x=
for x in "${@}"
do
echo "${x}"
done | $SORT_CMD
}
# convert a number of minutes to a human readable text
mins_to_text() {
local days= hours= mins="${1}"
if [ -z "${mins}" -o $[mins + 0] -eq 0 ]
then
echo "none"
return 0
fi
days=$[mins / (24*60)]
mins=$[mins - (days * 24 * 60)]
hours=$[mins / 60]
mins=$[mins - (hours * 60)]
case ${days} in
0) ;;
1) printf "1 day " ;;
*) printf "%d days " ${days} ;;
esac
case ${hours} in
0) ;;
1) printf "1 hour " ;;
*) printf "%d hours " ${hours} ;;
esac
case ${mins} in
0) ;;
1) printf "1 min " ;;
*) printf "%d mins " ${mins} ;;
esac
printf "\n"
return 0
}
declare -A UPDATED_DIRS=()
declare -A UPDATED_SETS=()
git_add_if_not_already_added() {
local file="${1}"
$GIT_CMD -C "${BASE_DIR}" ls-files "${file}" --error-unmatch >/dev/null 2>&1
if [ $? -ne 0 ]
then
[ ! -f "${BASE_DIR}/${file}" ] && $TOUCH_CMD "${BASE_DIR}/${file}"
verbose "Adding '${file}' to git"
$GIT_CMD -C "${BASE_DIR}" add "${file}"
return $?
fi
return 0
}
git_ignore_file() {
local file="${1}"
local found=$($CAT_CMD "${BASE_DIR}/.gitignore" | $GREP_CMD "^${file}$")
if [ -z "${found}" ]
then
echo "${file}" >>"${BASE_DIR}/.gitignore" || return 1
fi
return 0
}
# http://stackoverflow.com/questions/3046436/how-do-you-stop-tracking-a-remote-branch-in-git
# to delete a branch on git
# localy only - remote will not be affected
#
# BRANCH_TO_DELETE_LOCALY_ONLY="master"
# git branch -d -r origin/${BRANCH_TO_DELETE_LOCALY_ONLY}
# git config --unset branch.${BRANCH_TO_DELETE_LOCALY_ONLY}.remote
# git config --unset branch.${BRANCH_TO_DELETE_LOCALY_ONLY}.merge
# git gc --aggressive --prune=all --force
declare -A IPSET_TMP_DO_NOT_REDISTRIBUTE=()
declare -A IPSET_TMP_ACCEPT_EMPTY=()
declare -A IPSET_TMP_NO_IF_MODIFIED_SINCE=()
declare -A IPSET_TMP_DO_NOT_ENABLE_WITH_ALL=()
commit_to_git() {
cd "${BASE_DIR}" || return 1
if [ -d .git -a ! -z "${!UPDATED_SETS[*]}" ]
then
local d=
for d in "${!UPDATED_DIRS[@]}"
do
[ ! -f ${d}/README-EDIT.md ] && $TOUCH_CMD ${d}/README-EDIT.md
(
$CAT_CMD ${d}/README-EDIT.md
echo
echo "The following list was automatically generated on `$DATE_CMD -u`."
echo
echo "The update frequency is the maximum allowed by internal configuration. A list will never be downloaded sooner than the update frequency stated. A list may also not be downloaded, after this frequency expired, if it has not been modified on the server (as reported by HTTP \`IF_MODIFIED_SINCE\` method)."
echo
echo "name|info|type|entries|update|"
echo ":--:|:--:|:--:|:-----:|:----:|"
$CAT_CMD ${d}/*.setinfo
) >${d}/README.md
UPDATED_SETS[${d}/README.md]="${d}/README.md"
git_add_if_not_already_added "${d}/README.md"
done
declare -a to_be_pushed=()
local ipset=
for ipset in "${!UPDATED_SETS[@]}"
do
[ ! -z "${IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]}" ] && continue
[ ! -f "${UPDATED_SETS[${ipset}]}" ] && continue
to_be_pushed=("${to_be_pushed[@]}" "${UPDATED_SETS[${ipset}]}")
done
info "Generating script to fix timestamps..."
(
echo "#!/bin/bash"
echo "[ ! \"\$1\" = \"YES_I_AM_SURE_DO_IT_PLEASE\" ] && echo \"READ ME NOW\" && exit 1"
for d in $(params_sort "${!IPSET_FILE[@]}")
do
echo "[ -f '${IPSET_FILE[${d}]}' ] && $TOUCH_CMD --date=@${IPSET_SOURCE_DATE[${d}]} '${IPSET_FILE[${d}]}'"
done
) | $SED_CMD "s|'${BASE_DIR}/|'|g" >set_file_timestamps.sh
git_add_if_not_already_added set_file_timestamps.sh
echo >&2
info "Committing ${to_be_pushed[@]} to git repository"
local date="$($DATE_CMD -u)"
if [ ${PUSH_TO_GIT_MERGED} -eq 0 ]
then
# we commit each file alone, to have a clear history per file in github
for d in "${to_be_pushed[@]}" set_file_timestamps.sh
do
echo "${d}..."
$GIT_CMD commit ${PUSH_TO_GIT_COMMIT_OPTIONS} "${d}" -m "${date} update"
done
else
# we commit all files together
$GIT_CMD commit ${PUSH_TO_GIT_COMMIT_OPTIONS} "${to_be_pushed[@]}" set_file_timestamps.sh -m "${date} update"
fi
if [ ${PUSH_TO_GIT} -ne 0 ]
then
echo >&2
info "Pushing git commits to remote server"
$GIT_CMD push ${PUSH_TO_GIT_PUSH_OPTIONS}
fi
fi
}
copy_ipsets_to_web() {
[ -z "${WEB_DIR_FOR_IPSETS}" -o ! -d "${WEB_DIR_FOR_IPSETS}" ] && return 0
local ipset= f= d=
for ipset in "${!UPDATED_SETS[@]}"
do
[ ! -z "${IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]}" ] && continue
[ ! -f "${UPDATED_SETS[${ipset}]}" ] && continue
# relative filename - may include a dir
f="${UPDATED_SETS[${ipset}]}"
d="${f/\/*/}"
[ "${d}" = "${f}" ] && d=
if [ ! -z "${d}" ]
then
echo >&2 "Creating directory ${WEB_DIR_FOR_IPSETS}/${d}"
${MKDIR_CMD} -p "${WEB_DIR_FOR_IPSETS}/${d}"
[ ! -z "${WEB_OWNER}" ] && ${CHOWN_CMD} "${WEB_OWNER}" "${WEB_DIR_FOR_IPSETS}/${d}"
fi
echo >&2 "Copying ${f} to ${WEB_DIR_FOR_IPSETS}/${f}"
${CP_CMD} "${f}" "${WEB_DIR_FOR_IPSETS}/${f}.new"
[ ! -z "${WEB_OWNER}" ] && ${CHOWN_CMD} "${WEB_OWNER}" "${WEB_DIR_FOR_IPSETS}/${f}.new"
${MV_CMD} "${WEB_DIR_FOR_IPSETS}/${f}.new" "${WEB_DIR_FOR_IPSETS}/${f}"
done
}
# touch a file to a relative date in the past
touch_in_the_past() {
local mins_ago="${1}" file="${2}"
local now=$($DATE_CMD +%s)
local date=$($DATE_CMD -d @$[now - (mins_ago * 60)] +"%y%m%d%H%M.%S")
$TOUCH_CMD -t "${date}" "${file}"
}
touch_in_the_past $[7 * 24 * 60] "${RUN_DIR}/.warn_if_last_downloaded_before_this"
# get all the active ipsets in the system
ipset_list_names() {
if [ ${IPSETS_APPLY} -eq 1 ]
then
( $IPSET_CMD --list -t || $IPSET_CMD --list ) | $GREP_CMD "^Name: " | $CUT_CMD -d ' ' -f 2
return $?
fi
return 0
}
echo
echo "`$DATE_CMD`: ${0} ${*}"
echo
if [ ${IPSETS_APPLY} -eq 1 ]
then
# find the active ipsets
info "Getting list of active ipsets..."
declare -A sets=()
for x in $(ipset_list_names)
do
sets[$x]=1
done
silent "Found these ipsets active: ${!sets[@]}"
fi
# -----------------------------------------------------------------------------
# check if a file is too old
check_file_too_old() {
local ipset="${1}" file="${2}"
if [ -f "${file}" -a "${RUN_DIR}/.warn_if_last_downloaded_before_this" -nt "${file}" ]
then
ipset_warning "${ipset}" "DATA ARE TOO OLD!"
return 1
fi
return 0
}
history_keep() {
local ipset="${1}" file="${2}" slot=
slot="`$DATE_CMD -r "${file}" +%s`.set"
if [ ! -d "${HISTORY_DIR}/${ipset}" ]
then
$MKDIR_CMD "${HISTORY_DIR}/${ipset}" || return 2
$CHMOD_CMD 700 "${HISTORY_DIR}/${ipset}"
fi
# copy the new file to the history
# we use the binary format of iprange for fast operations later
$IPRANGE_CMD "${file}" --print-binary >"${HISTORY_DIR}/${ipset}/${slot}"
$TOUCH_CMD -r "${file}" "${HISTORY_DIR}/${ipset}/${slot}"
}
history_cleanup() {
local ipset="${1}" mins="${2}"
# touch a reference file
touch_in_the_past ${mins} "${RUN_DIR}/history.reference" || return 3
for x in ${HISTORY_DIR}/${ipset}/*.set
do
if [ ! "${x}" -nt "${RUN_DIR}/history.reference" ]
then
ipset_verbose "${ipset}" "deleting history file '${x}'"
$RM_CMD "${x}"
fi
done
}
history_get() {
local ipset="${1}" mins="${2}" \
tmp= x=
# touch a reference file
touch_in_the_past ${mins} "${RUN_DIR}/history.reference" || return 3
# get all the history files, that are newer than our reference
${IPRANGE_CMD} --union-all $($FIND_CMD "${HISTORY_DIR}/${ipset}"/*.set -newer "${RUN_DIR}/history.reference")
$RM_CMD "${RUN_DIR}/history.reference"
return 0
}
# -----------------------------------------------------------------------------
# DOWNLOADERS
# RETURN
# 0 = SUCCESS
# 99 = NOT MODIFIED ON THE SERVER
# ANY OTHER = FAILED
# Fetch a url - the output file has the last modified timestamp of the server.
# On the next run, the file is downloaded only if it has changed on the server.
DOWNLOADER_MESSAGE=
DOWNLOADER_OK=0
DOWNLOADER_FAILED=1
DOWNLOADER_NOTMODIFIED=255
DOWNLOADER_IPSET=
downloader_log() {
local severity="${1}" message="${2}"
case "${severity}" in
info) ipset_info "${DOWNLOADER_IPSET}" "${message}" ;;
silent) ipset_silent "${DOWNLOADER_IPSET}" "${message}" ;;
warning) ipset_warning "${DOWNLOADER_IPSET}" "${message}" ;;
error) ipset_warning "${DOWNLOADER_IPSET}" "${message}" ;;
*) ipset_verbose "${DOWNLOADER_IPSET}" "${message}" ;;
esac
}
copyfile() {
local file="${1}" reference="${2}" url="${3}"
eval "local doptions=(${4})"
if [ ! -z "${doptions[0]}" -a -f "${doptions[0]}" ]
then
$CAT_CMD "${doptions[0]}" >"${file}"
$TOUCH_CMD -r "${doptions[0]}" "${file}"
DOWNLOADER_MESSAGE="copied file '${doptions[0]}'"
return ${DOWNLOADER_OK}
fi
DOWNLOADER_MESSAGE="file '${doptions[0]}' is not found"
return ${DOWNLOADER_FAILED}
}
geturl() {
local file="${1}" reference="${2}" url="${3}" doptions=() ret= http_code= curl_opts=() message=
eval "local doptions=(${4})"
if [ -z "${reference}" -o ! -f "${reference}" ]
then
reference="${RUN_DIR}/geturl-reference"
$TOUCH_CMD -t 0001010000 "${reference}"
else
# copy the timestamp of the reference
# to our file - we need this to check it later
$TOUCH_CMD -r "${reference}" "${file}"
curl_opts+=("--time-cond" "${reference}")
fi
[ ${VERBOSE} -eq 0 ] && curl_opts+=("--silent")
downloader_log verbose "curl ${doptions} '${url}'"
http_code=$( \
$CURL_CMD --connect-timeout ${MAX_CONNECT_TIME} --max-time ${MAX_DOWNLOAD_TIME} \
--retry 0 --fail --compressed --user-agent "${USER_AGENT}" \
"${curl_opts[@]}" \
--output "${file}" --remote-time \
--location --referer "http://iplists.firehol.org/" \
--write-out '%{http_code}' "${doptions[@]}" "${url}" \
)
ret=$?
case "${ret}" in
0) if [ "${http_code}" = "304" -a ! "${file}" -nt "${reference}" ]
then
message="Not Modified"
ret=${DOWNLOADER_NOTMODIFIED}
else
message="OK"
ret=${DOWNLOADER_OK}
fi
;;
1) message="Unsupported Protocol"; ret=${DOWNLOADER_FAILED} ;;
2) message="Failed to initialize"; ret=${DOWNLOADER_FAILED} ;;
3) message="Malformed URL"; ret=${DOWNLOADER_FAILED} ;;
5) message="Can't resolve proxy"; ret=${DOWNLOADER_FAILED} ;;
6) message="Can't resolve host"; ret=${DOWNLOADER_FAILED} ;;
7) message="Failed to connect"; ret=${DOWNLOADER_FAILED} ;;
18) message="Partial Transfer"; ret=${DOWNLOADER_FAILED} ;;
22) message="HTTP Error"; ret=${DOWNLOADER_FAILED} ;;
23) message="Cannot write local file"; ret=${DOWNLOADER_FAILED} ;;
26) message="Read Error"; ret=${DOWNLOADER_FAILED} ;;
28) message="Timeout"; ret=${DOWNLOADER_FAILED} ;;
35) message="SSL Error"; ret=${DOWNLOADER_FAILED} ;;
47) message="Too many redirects"; ret=${DOWNLOADER_FAILED} ;;
52) message="Server did not reply anything"; ret=${DOWNLOADER_FAILED} ;;
55) message="Failed sending network data"; ret=${DOWNLOADER_FAILED} ;;
56) message="Failure in receiving network data"; ret=${DOWNLOADER_FAILED} ;;
61) message="Unrecognized transfer encoding"; ret=${DOWNLOADER_FAILED} ;;
*) message="Error ${ret} returned by curl"; ret=${DOWNLOADER_FAILED} ;;
esac
DOWNLOADER_MESSAGE="HTTP/${http_code} ${message}"
return ${ret}
}
# download a file if it has not been downloaded in the last $mins
DOWNLOAD_OK=0
DOWNLOAD_FAILED=1
DOWNLOAD_NOT_UPDATED=2
download_manager() {
local ipset="${1}" mins="${2}" url="${3}" \
st= ret= \
tmp= now="$($DATE_CMD +%s)" base= omins= detail= inc= fails= dt=
# make sure it is numeric
[ "$[mins + 0]" -lt 1 ] && mins=1
omins=${mins}
# add some time (1/100th), to make sure the source is updated
inc=$[ (mins + 50) / 100 ]
# if the download period is less than 30min, do not add anything
[ ${mins} -le 30 ] && inc=0
# if the added time is above 10min, make it 10min
[ ${inc} -gt 10 ] && inc=10
mins=$[mins + inc]
# make sure we have a proper time for last-checked
st=0
[ -f "${BASE_DIR}/${ipset}.source" ] && st="$($DATE_CMD -r "${BASE_DIR}/${ipset}.source" +%s)"
[ -z "${IPSET_CHECKED_DATE[${ipset}]}" ] && IPSET_CHECKED_DATE[${ipset}]=${st}
[ -z "${IPSET_CHECKED_DATE[${ipset}]}" ] && IPSET_CHECKED_DATE[${ipset}]=0
[ -z "${IPSET_DOWNLOAD_FAILURES[${ipset}]}" ] && IPSET_DOWNLOAD_FAILURES[${ipset}]=0
# nunber of consecutive failures so far
fails=${IPSET_DOWNLOAD_FAILURES[${ipset}]}
base=${IPSET_CHECKED_DATE[${ipset}]}
if [ ${IGNORE_LASTCHECKED} -eq 1 ]
then
base=${st}
fails=0
fi
dt=$[ now - base ]
detail="$[dt/60]/${mins} mins passed, will fetch in $[mins - (dt/60)] mins"
if [ ${fails} -gt ${IGNORE_REPEATING_DOWNLOAD_ERRORS} ]
then
mins=$[ mins * (fails - IGNORE_REPEATING_DOWNLOAD_ERRORS) ]
dt=$[ now - base ]
detail="$[dt/60]/${mins} mins passed, will fetch in $[mins - (dt/60)] mins"
ipset_silent "${ipset}" "${fails} fails so far, time increased from ${omins} to ${mins} mins"
elif [ ${fails} -gt 0 ]
then
mins=$[ (mins + 1) / 2 ]
dt=$[ now - base ]
detail="$[dt/60]/${mins} mins passed, will fetch in $[mins - (dt/60)] mins"
ipset_silent "${ipset}" "${fails} fails so far, time decreased from ${omins} to ${mins} mins"
fi
# echo >&2 "${ipset}: source:${st} processed:${IPSET_PROCESSED_DATE[${ipset}]} checked:${IPSET_CHECKED_DATE[${ipset}]}, fails:${IPSET_DOWNLOAD_FAILURES[${ipset}]}, mins:${omins}, dt:$[dt / 60]"
# if it is too soon, do nothing
if [ ${dt} -lt $[ mins * 60 ] ]
then
ipset_notyet "${ipset}" "${detail}"
return ${DOWNLOAD_NOT_UPDATED}
fi
# return ${DOWNLOAD_NOT_UPDATED}
IPSET_CHECKED_DATE[${ipset}]="${now}"
ipset_info "${ipset}" "$[dt/60]/${mins} mins passed, downloading..."
# download it
local reference="${BASE_DIR}/${ipset}.source"
[ ! -z "${IPSET_TMP_NO_IF_MODIFIED_SINCE[${ipset}]}" ] && reference=""
if [ ${#url} -gt 55 ]
then
ipset_silent "${ipset}" "fetch: '$(printf '%-50.50s ... ' "${url}")'"
else
ipset_silent "${ipset}" "fetch: '${url}'"
fi
tmp=`$MKTEMP_CMD "${RUN_DIR}/download-${ipset}-XXXXXXXXXX"` || return ${DOWNLOAD_FAILED}
[ -z "${IPSET_DOWNLOADER[${ipset}]}" ] && IPSET_DOWNLOADER[${ipset}]="geturl"
DOWNLOADER_IPSET="${ipset}"
ipset_verbose "${ipset}" "running downloader '${IPSET_DOWNLOADER[${ipset}]}'"
"${IPSET_DOWNLOADER[${ipset}]}" "${tmp}" "${reference}" "${url}" "${IPSET_DOWNLOADER_OPTIONS[${ipset}]}"
ret=$?
ipset_info "${ipset}" "${DOWNLOADER_MESSAGE}"
# if the downloaded file is empty, but we don't accept empty files
if [ $ret -eq 0 -a ! -s "${tmp}" -a -z "${IPSET_TMP_ACCEPT_EMPTY[${ipset}]}" ]
then
ret=9999
ipset_silent "${ipset}" "downloaded file is empty"
fi
case $ret in
# DOWNLOADER_OK
0)
ipset_silent "${ipset}" "downloaded successfully"
IPSET_CHECKED_DATE[${ipset}]="$($DATE_CMD -r "${tmp}" +%s)"
IPSET_DOWNLOAD_FAILURES[${ipset}]=0
cache_save
;;
# DOWNLOADER_NOTMODIFIED
255)
IPSET_DOWNLOAD_FAILURES[${ipset}]=0
cache_save
ipset_notupdated "${ipset}" "file on server has not been updated yet"
$RM_CMD "${tmp}"
return ${DOWNLOAD_NOT_UPDATED}
;;
# DOWNLOADER_FAILED
*)
$RM_CMD "${tmp}"
IPSET_DOWNLOAD_FAILURES[${ipset}]=$(( fails + 1 ))
ipset_error "${ipset}" "failed - ${IPSET_DOWNLOAD_FAILURES[${ipset}]} consecutive failures so far."
cache_save
return ${DOWNLOAD_FAILED}
;;
esac
[ ! -z "${IPSET_TMP_NO_IF_MODIFIED_SINCE[${ipset}]}" ] && $TOUCH_CMD "${tmp}"
# check if the downloaded file is the same with the last one
$DIFF_CMD -q "${BASE_DIR}/${ipset}.source" "${tmp}" >/dev/null 2>&1
if [ $? -eq 0 ]
then
# they are the same
ipset_same "${ipset}" "downloaded file is the same to the old one."
# copy the timestamp of the downloaded to our file
$TOUCH_CMD -r "${tmp}" "${BASE_DIR}/${ipset}.source"
$RM_CMD "${tmp}"
return ${DOWNLOAD_NOT_UPDATED}
fi
# move it to its place
ipset_silent "${ipset}" "saving downloaded file"
$MV_CMD "${tmp}" "${BASE_DIR}/${ipset}.source" || return ${DOWNLOAD_FAILED}
return ${DOWNLOAD_OK}
}
# -----------------------------------------------------------------------------
# keep a cache of the data about all completed ipsets
declare -A IPSET_INFO=()
declare -A IPSET_SOURCE=()
declare -A IPSET_URL=()
declare -A IPSET_FILE=()
declare -A IPSET_IPV=()
declare -A IPSET_HASH=()
declare -A IPSET_MINS=()
declare -A IPSET_HISTORY_MINS=()
declare -A IPSET_ENTRIES=()
declare -A IPSET_IPS=()
declare -A IPSET_SOURCE_DATE=()
declare -A IPSET_PROCESSED_DATE=()
declare -A IPSET_CHECKED_DATE=()
declare -A IPSET_CATEGORY=()
declare -A IPSET_MAINTAINER=()
declare -A IPSET_MAINTAINER_URL=()
declare -A IPSET_LICENSE=()
declare -A IPSET_GRADE=()
declare -A IPSET_PROTECTION=()
declare -A IPSET_INTENDED_USE=()
declare -A IPSET_FALSE_POSITIVES=()
declare -A IPSET_POISONING=()
declare -A IPSET_SERVICES=()
declare -A IPSET_ENTRIES_MIN=()
declare -A IPSET_ENTRIES_MAX=()
declare -A IPSET_IPS_MIN=()
declare -A IPSET_IPS_MAX=()
declare -A IPSET_STARTED_DATE=()
declare -A IPSET_CLOCK_SKEW=()
declare -A IPSET_DOWNLOAD_FAILURES=()
declare -A IPSET_VERSION=()
declare -A IPSET_AVERAGE_UPDATE_TIME=()
declare -A IPSET_MIN_UPDATE_TIME=()
declare -A IPSET_MAX_UPDATE_TIME=()
declare -A IPSET_DOWNLOADER=()
declare -A IPSET_DOWNLOADER_OPTIONS=()
# TODO - FIXME
#declare -A IPSET_PREFIXES=()
CACHE_SAVE_ENABLED=1
cache_save() {
[ ${CACHE_SAVE_ENABLED} -eq 0 ] && return 0
#info "Saving cache"
declare -p \
IPSET_INFO \
IPSET_SOURCE \
IPSET_URL \
IPSET_FILE \
IPSET_IPV \
IPSET_HASH \
IPSET_MINS \
IPSET_HISTORY_MINS \
IPSET_ENTRIES \
IPSET_IPS \
IPSET_SOURCE_DATE \
IPSET_CHECKED_DATE \
IPSET_PROCESSED_DATE \
IPSET_CATEGORY \
IPSET_MAINTAINER \
IPSET_MAINTAINER_URL \
IPSET_LICENSE \
IPSET_GRADE \
IPSET_PROTECTION \
IPSET_INTENDED_USE \
IPSET_FALSE_POSITIVES \
IPSET_POISONING \
IPSET_SERVICES \
IPSET_ENTRIES_MIN \
IPSET_ENTRIES_MAX \
IPSET_IPS_MIN \
IPSET_IPS_MAX \
IPSET_STARTED_DATE \
IPSET_CLOCK_SKEW \
IPSET_DOWNLOAD_FAILURES \
IPSET_VERSION \
IPSET_AVERAGE_UPDATE_TIME \
IPSET_MIN_UPDATE_TIME \
IPSET_MAX_UPDATE_TIME \
IPSET_DOWNLOADER \
IPSET_DOWNLOADER_OPTIONS \
>"${BASE_DIR}/.cache.new.$$"
[ -f "${BASE_DIR}/.cache" ] && $CP_CMD "${BASE_DIR}/.cache" "${BASE_DIR}/.cache.old"
$MV_CMD "${BASE_DIR}/.cache.new.$$" "${BASE_DIR}/.cache" || exit 1
}
if [ -f "${BASE_DIR}/.cache" ]
then
verbose "Loading cache file: ${BASE_DIR}/.cache"
source "${BASE_DIR}/.cache"
fi
cache_save_metadata_backup() {
local ipset="${1}"
ipset_verbose "${ipset}" "saving metadata backup"
printf >"${LIB_DIR}/${ipset}/metadata" "\
IPSET_INFO[${ipset}]=%q\n\
IPSET_SOURCE[${ipset}]=%q\n\
IPSET_URL[${ipset}]=%q\n\
IPSET_FILE[${ipset}]=%q\n\
IPSET_IPV[${ipset}]=%q\n\
IPSET_HASH[${ipset}]=%q\n\
IPSET_MINS[${ipset}]=%q\n\
IPSET_HISTORY_MINS[${ipset}]=%q\n\
IPSET_ENTRIES[${ipset}]=%q\n\
IPSET_IPS[${ipset}]=%q\n\
IPSET_SOURCE_DATE[${ipset}]=%q\n\
IPSET_CHECKED_DATE[${ipset}]=%q\n\
IPSET_PROCESSED_DATE[${ipset}]=%q\n\
IPSET_CATEGORY[${ipset}]=%q\n\
IPSET_MAINTAINER[${ipset}]=%q\n\
IPSET_MAINTAINER_URL[${ipset}]=%q\n\
IPSET_LICENSE[${ipset}]=%q\n\
IPSET_GRADE[${ipset}]=%q\n\
IPSET_PROTECTION[${ipset}]=%q\n\
IPSET_INTENDED_USE[${ipset}]=%q\n\
IPSET_FALSE_POSITIVES[${ipset}]=%q\n\
IPSET_POISONING[${ipset}]=%q\n\
IPSET_SERVICES[${ipset}]=%q\n\
IPSET_ENTRIES_MIN[${ipset}]=%q\n\
IPSET_ENTRIES_MAX[${ipset}]=%q\n\
IPSET_IPS_MIN[${ipset}]=%q\n\
IPSET_IPS_MAX[${ipset}]=%q\n\
IPSET_STARTED_DATE[${ipset}]=%q\n\
IPSET_CLOCK_SKEW[${ipset}]=%q\n\
IPSET_DOWNLOAD_FAILURES[${ipset}]=%q\n\
IPSET_VERSION[${ipset}]=%q\n\
IPSET_AVERAGE_UPDATE_TIME[${ipset}]=%q\n\
IPSET_MIN_UPDATE_TIME[${ipset}]=%q\n\
IPSET_MAX_UPDATE_TIME[${ipset}]=%q\n\
IPSET_DOWNLOADER[${ipset}]=%q\n\
IPSET_DOWNLOADER_OPTIONS[${ipset}]=%q\n\
" \
"${IPSET_INFO[${ipset}]}" \
"${IPSET_SOURCE[${ipset}]}" \
"${IPSET_URL[${ipset}]}" \
"${IPSET_FILE[${ipset}]}" \
"${IPSET_IPV[${ipset}]}" \
"${IPSET_HASH[${ipset}]}" \
"${IPSET_MINS[${ipset}]}" \
"${IPSET_HISTORY_MINS[${ipset}]}" \
"${IPSET_ENTRIES[${ipset}]}" \
"${IPSET_IPS[${ipset}]}" \
"${IPSET_SOURCE_DATE[${ipset}]}" \
"${IPSET_CHECKED_DATE[${ipset}]}" \
"${IPSET_PROCESSED_DATE[${ipset}]}" \
"${IPSET_CATEGORY[${ipset}]}" \
"${IPSET_MAINTAINER[${ipset}]}" \
"${IPSET_MAINTAINER_URL[${ipset}]}" \
"${IPSET_LICENSE[${ipset}]}" \
"${IPSET_GRADE[${ipset}]}" \
"${IPSET_PROTECTION[${ipset}]}" \
"${IPSET_INTENDED_USE[${ipset}]}" \
"${IPSET_FALSE_POSITIVES[${ipset}]}" \
"${IPSET_POISONING[${ipset}]}" \
"${IPSET_SERVICES[${ipset}]}" \
"${IPSET_ENTRIES_MIN[${ipset}]}" \
"${IPSET_ENTRIES_MAX[${ipset}]}" \
"${IPSET_IPS_MIN[${ipset}]}" \
"${IPSET_IPS_MAX[${ipset}]}" \
"${IPSET_STARTED_DATE[${ipset}]}" \
"${IPSET_CLOCK_SKEW[${ipset}]}" \
"${IPSET_DOWNLOAD_FAILURES[${ipset}]}" \
"${IPSET_VERSION[${ipset}]}" \
"${IPSET_AVERAGE_UPDATE_TIME[${ipset}]}" \
"${IPSET_MIN_UPDATE_TIME[${ipset}]}" \
"${IPSET_MAX_UPDATE_TIME[${ipset}]}" \
"${IPSET_DOWNLOADER[${ipset}]}" \
"${IPSET_DOWNLOADER_OPTIONS[${ipset}]}" \
${NULL}
}
cache_remove_ipset() {
local ipset="${1}"
ipset_verbose "${ipset}" "removing from cache"
unset IPSET_INFO[${ipset}]
unset IPSET_SOURCE[${ipset}]
unset IPSET_URL[${ipset}]
unset IPSET_FILE[${ipset}]
unset IPSET_IPV[${ipset}]
unset IPSET_HASH[${ipset}]
unset IPSET_MINS[${ipset}]
unset IPSET_HISTORY_MINS[${ipset}]
unset IPSET_ENTRIES[${ipset}]
unset IPSET_IPS[${ipset}]
unset IPSET_SOURCE_DATE[${ipset}]
unset IPSET_CHECKED_DATE[${ipset}]
unset IPSET_PROCESSED_DATE[${ipset}]
unset IPSET_CATEGORY[${ipset}]
unset IPSET_MAINTAINER[${ipset}]
unset IPSET_MAINTAINER_URL[${ipset}]
unset IPSET_LICENSE[${ipset}]
unset IPSET_GRADE[${ipset}]
unset IPSET_PROTECTION[${ipset}]
unset IPSET_INTENDED_USE[${ipset}]
unset IPSET_FALSE_POSITIVES[${ipset}]
unset IPSET_POISONING[${ipset}]
unset IPSET_SERVICES[${ipset}]
unset IPSET_ENTRIES_MIN[${ipset}]
unset IPSET_ENTRIES_MAX[${ipset}]
unset IPSET_IPS_MIN[${ipset}]
unset IPSET_IPS_MAX[${ipset}]
unset IPSET_STARTED_DATE[${ipset}]
unset IPSET_CLOCK_SKEW[${ipset}]
unset IPSET_DOWNLOAD_FAILURES[${ipset}]
unset IPSET_VERSION[${ipset}]
unset IPSET_AVERAGE_UPDATE_TIME[${ipset}]
unset IPSET_MIN_UPDATE_TIME[${ipset}]
unset IPSET_MAX_UPDATE_TIME[${ipset}]
unset IPSET_DOWNLOADER[${ipset}]
unset IPSET_DOWNLOADER_OPTIONS[${ipset}]
cache_save
}
ipset_services_to_json_array() {
local x= i=0
for x in "${@}"
do
i=$[i + 1]
[ $i -gt 1 ] && printf ", "
printf "\"%s\"" "${x}"
done
}
ipset_normalize_for_json() {
local ipset="${1}"
ipset_verbose "${ipset}" "normalizing data..."
[ -z "${IPSET_ENTRIES_MIN[${ipset}]}" ] && IPSET_ENTRIES_MIN[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ -z "${IPSET_ENTRIES_MAX[${ipset}]}" ] && IPSET_ENTRIES_MAX[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ -z "${IPSET_IPS_MIN[${ipset}]}" ] && IPSET_IPS_MIN[${ipset}]="${IPSET_IPS[${ipset}]}"
[ -z "${IPSET_IPS_MAX[${ipset}]}" ] && IPSET_IPS_MAX[${ipset}]="${IPSET_IPS[${ipset}]}"
[ -z "${IPSET_STARTED_DATE[${ipset}]}" ] && IPSET_STARTED_DATE[${ipset}]="${IPSET_SOURCE_DATE[${ipset}]}"
[ -z "${IPSET_PROCESSED_DATE[${ipset}]}" ] && IPSET_PROCESSED_DATE[${ipset}]="${IPSET_SOURCE_DATE[${ipset}]}"
[ -z "${IPSET_CHECKED_DATE[${ipset}]}" ] && IPSET_CHECKED_DATE[${ipset}]="${IPSET_PROCESSED_DATE[${ipset}]}"
[ -z "${IPSET_CLOCK_SKEW[${ipset}]}" ] && IPSET_CLOCK_SKEW[${ipset}]=0
[ -z "${IPSET_DOWNLOAD_FAILURES[${ipset}]}" ] && IPSET_DOWNLOAD_FAILURES[${ipset}]=0
[ -z "${IPSET_VERSION[${ipset}]}" ] && IPSET_VERSION[${ipset}]=0
[ -z "${IPSET_AVERAGE_UPDATE_TIME[${ipset}]}" ] && IPSET_AVERAGE_UPDATE_TIME[${ipset}]=${IPSET_MINS[${ipset}]}
[ -z "${IPSET_MIN_UPDATE_TIME[${ipset}]}" ] && IPSET_MIN_UPDATE_TIME[${ipset}]=${IPSET_AVERAGE_UPDATE_TIME[${ipset}]}
[ -z "${IPSET_MAX_UPDATE_TIME[${ipset}]}" ] && IPSET_MAX_UPDATE_TIME[${ipset}]=${IPSET_AVERAGE_UPDATE_TIME[${ipset}]}
}
ipset_json() {
local ipset="${1}" geolite2= ipdeny= ip2location= ipip= comparison= info=
[ -f "${RUN_DIR}/${ipset}_geolite2_country.json" ] && geolite2="${ipset}_geolite2_country.json"
[ -f "${RUN_DIR}/${ipset}_ipdeny_country.json" ] && ipdeny="${ipset}_ipdeny_country.json"
[ -f "${RUN_DIR}/${ipset}_ip2location_country.json" ] && ip2location="${ipset}_ip2location_country.json"
[ -f "${RUN_DIR}/${ipset}_ipip_country.json" ] && ipip="${ipset}_ipip_country.json"
[ -f "${RUN_DIR}/${ipset}_comparison.json" ] && comparison="${ipset}_comparison.json"
info="${IPSET_INFO[${ipset}]}"
info="$(echo "${info}" | $SED_CMD "s/)/)\n/g" | $SED_CMD "s|\[\(.*\)\](\(.*\))|<a href=\"\2\">\1</a>|g" | $TR_CMD "\n\t" " ")"
info="${info//\"/\\\"}"
local file_local= commit_history= url=
if [ -z "${IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]}" ]
then
url="${IPSET_URL[${ipset}]}"
file_local="${LOCAL_COPY_URL}${IPSET_FILE[${ipset}]}"
commit_history="${GITHUB_CHANGES_URL}${IPSET_FILE[${ipset}]}"
fi
ipset_normalize_for_json "${ipset}"
ipset_verbose "${ipset}" "generating JSON info..."
$CAT_CMD <<EOFJSON
{
"name": "${ipset}",
"entries": ${IPSET_ENTRIES[${ipset}]},
"entries_min": ${IPSET_ENTRIES_MIN[${ipset}]},
"entries_max": ${IPSET_ENTRIES_MAX[${ipset}]},
"ips": ${IPSET_IPS[${ipset}]},
"ips_min": ${IPSET_IPS_MIN[${ipset}]},
"ips_max": ${IPSET_IPS_MAX[${ipset}]},
"ipv": "${IPSET_IPV[${ipset}]}",
"hash": "${IPSET_HASH[${ipset}]}",
"frequency": ${IPSET_MINS[${ipset}]},
"aggregation": ${IPSET_HISTORY_MINS[${ipset}]},
"started": ${IPSET_STARTED_DATE[${ipset}]}000,
"updated": ${IPSET_SOURCE_DATE[${ipset}]}000,
"processed": ${IPSET_PROCESSED_DATE[${ipset}]}000,
"checked": ${IPSET_CHECKED_DATE[${ipset}]}000,
"clock_skew": $[ IPSET_CLOCK_SKEW[${ipset}] * 1000 ],
"category": "${IPSET_CATEGORY[${ipset}]}",
"maintainer": "${IPSET_MAINTAINER[${ipset}]}",
"maintainer_url": "${IPSET_MAINTAINER_URL[${ipset}]}",
"info": "${info}",
"source": "${url}",
"file": "${IPSET_FILE[${ipset}]}",
"history": "${ipset}_history.csv",
"geolite2": "${geolite2}",
"ipdeny": "${ipdeny}",
"ip2location": "${ip2location}",
"ipip": "${ipip}",
"comparison": "${comparison}",
"file_local": "${file_local}",
"commit_history": "${commit_history}",
"license": "${IPSET_LICENSE[${ipset}]}",
"grade": "${IPSET_GRADE[${ipset}]}",
"protection": "${IPSET_PROTECTION[${ipset}]}",
"intended_use": "${IPSET_INTENDED_USE[${ipset}]}",
"false_positives": "${IPSET_FALSE_POSITIVES[${ipset}]}",
"poisoning": "${IPSET_POISONING[${ipset}]}",
"services": [ $(ipset_services_to_json_array ${IPSET_SERVICES[${ipset}]}) ],
"errors": ${IPSET_DOWNLOAD_FAILURES[${ipset}]},
"version": ${IPSET_VERSION[${ipset}]},
"average_update": ${IPSET_AVERAGE_UPDATE_TIME[${ipset}]},
"min_update": ${IPSET_MIN_UPDATE_TIME[${ipset}]},
"max_update": ${IPSET_MAX_UPDATE_TIME[${ipset}]},
"downloader": "${IPSET_DOWNLOADER[${ipset}]}"
}
EOFJSON
}
ipset_json_index() {
local ipset="${1}" checked=
ipset_normalize_for_json "${ipset}"
checked=${IPSET_CHECKED_DATE[${ipset}]}
[ ${IPSET_CHECKED_DATE[${ipset}]} -lt ${IPSET_PROCESSED_DATE[${ipset}]} ] && checked=${IPSET_PROCESSED_DATE[${ipset}]}
ipset_verbose "${ipset}" "generating JSON index..."
$CAT_CMD <<EOFALL
{
"ipset": "${ipset}",
"category": "${IPSET_CATEGORY[${ipset}]}",
"maintainer": "${IPSET_MAINTAINER[${ipset}]}",
"started": ${IPSET_STARTED_DATE[${ipset}]}000,
"updated": ${IPSET_SOURCE_DATE[${ipset}]}000,
"checked": ${checked}000,
"clock_skew": $[ IPSET_CLOCK_SKEW[${ipset}] * 1000 ],
"ips": ${IPSET_IPS[${ipset}]},
"errors": ${IPSET_DOWNLOAD_FAILURES[${ipset}]}
EOFALL
printf " }"
}
# array to store hourly retention of past IPs
declare -a RETENTION_HISTOGRAM=()
# array to store hourly age of currently listed IPs
declare -a RETENTION_HISTOGRAM_REST=()
# the timestamp we started monitoring this ipset
declare RETENTION_HISTOGRAM_STARTED=
# if set to 0, the ipset has been completely refreshed
# i.e. all IPs have been removed / recycled at least once
declare RETENTION_HISTOGRAM_INCOMPLETE=1
# should only be called from retention_detect()
# because it needs the RETENTION_HISTOGRAM array loaded
retention_print() {
local ipset="${1}"
printf "{\n \"ipset\": \"${ipset}\",\n \"started\": ${RETENTION_HISTOGRAM_STARTED}000,\n \"updated\": ${IPSET_SOURCE_DATE[${ipset}]}000,\n \"incomplete\": ${RETENTION_HISTOGRAM_INCOMPLETE},\n"
ipset_verbose "${ipset}" "calculating retention hours..."
local x= hours= ips= sum=0 pad="\n\t\t\t"
for x in "${!RETENTION_HISTOGRAM[@]}"
do
(( sum += ${RETENTION_HISTOGRAM[${x}]} ))
hours="${hours}${pad}${x}"
ips="${ips}${pad}${RETENTION_HISTOGRAM[${x}]}"
pad=",\n\t\t\t"
done
printf " \"past\": {\n \"hours\": [ ${hours} ],\n \"ips\": [ ${ips} ],\n \"total\": ${sum}\n },\n"
ipset_verbose "${ipset}" "calculating current hours..."
local x= hours= ips= sum=0 pad="\n\t\t\t"
for x in "${!RETENTION_HISTOGRAM_REST[@]}"
do
(( sum += ${RETENTION_HISTOGRAM_REST[${x}]} ))
hours="${hours}${pad}${x}"
ips="${ips}${pad}${RETENTION_HISTOGRAM_REST[${x}]}"
pad=",\n\t\t\t"
done
printf " \"current\": {\n \"hours\": [ ${hours} ],\n \"ips\": [ ${ips} ],\n \"total\": ${sum}\n }\n}\n"
}
retention_detect() {
cd "${BASE_DIR}" || return 1
local ipset="${1}"
# can we do it?
[ -z "${IPSET_FILE[${ipset}]}" -o -z "${LIB_DIR}" -o ! -d "${LIB_DIR}" ] && return 1
# load the ipset retention histogram
RETENTION_HISTOGRAM=()
RETENTION_HISTOGRAM_REST=()
RETENTION_HISTOGRAM_STARTED="${IPSET_SOURCE_DATE[${ipset}]}"
RETENTION_HISTOGRAM_INCOMPLETE=1
if [ -f "${LIB_DIR}/${ipset}/histogram" ]
then
ipset_verbose "${ipset}" "loading old data"
source "${LIB_DIR}/${ipset}/histogram"
fi
ndate=$($DATE_CMD -r "${IPSET_FILE[${ipset}]}" +%s)
ipset_silent "${ipset}" "generating histogram for ${ndate} update..."
# create the cache directory for this ipset
if [ ! -d "${LIB_DIR}/${ipset}" ]
then
$MKDIR_CMD -p "${LIB_DIR}/${ipset}" || return 2
fi
if [ ! -d "${LIB_DIR}/${ipset}/new" ]
then
$MKDIR_CMD -p "${LIB_DIR}/${ipset}/new" || return 2
fi
if [ ! -f "${LIB_DIR}/${ipset}/latest" ]
then
# we don't have an older version
ipset_verbose "${ipset}" "this is a new ipset - initializing"
$TOUCH_CMD -r "${IPSET_FILE[${ipset}]}" "${LIB_DIR}/${ipset}/latest"
RETENTION_HISTOGRAM_STARTED="${IPSET_SOURCE_DATE[${ipset}]}"
elif [ ! "${IPSET_FILE[${ipset}]}" -nt "${LIB_DIR}/${ipset}/latest" ]
# the new file is older than the latest, return
then
ipset_verbose "${ipset}" "new ipset file is not newer than latest"
retention_print "${ipset}"
return 0
fi
if [ -f "${LIB_DIR}/${ipset}/new/${ndate}" ]
then
# we already have a file for this date, return
ipset_warning "${ipset}" "we already have a file for date ${ndate}"
retention_print "${ipset}"
return 0
fi
# find the new ips in this set
ipset_verbose "${ipset}" "finding the new IPs in this update..."
${IPRANGE_CMD} "${IPSET_FILE[${ipset}]}" --exclude-next "${LIB_DIR}/${ipset}/latest" --print-binary >"${LIB_DIR}/${ipset}/new/${ndate}" || ipset_error "${ipset}" "cannot find the new IPs in this update."
$TOUCH_CMD -r "${IPSET_FILE[${ipset}]}" "${LIB_DIR}/${ipset}/new/${ndate}"
local ips_added=0
if [ ! -s "${LIB_DIR}/${ipset}/new/${ndate}" ]
then
# there are no new IPs included
ipset_verbose "${ipset}" "no new IPs in this update"
$RM_CMD "${LIB_DIR}/${ipset}/new/${ndate}"
else
ips_added=$(${IPRANGE_CMD} -C "${LIB_DIR}/${ipset}/new/${ndate}")
ips_added=${ips_added/*,/}
ipset_verbose "${ipset}" "added ${ips_added} new IPs"
fi
ipset_verbose "${ipset}" "finding the removed IPs in this update..."
local ips_removed=$(${IPRANGE_CMD} "${LIB_DIR}/${ipset}/latest" --exclude-next "${IPSET_FILE[${ipset}]}" | ${IPRANGE_CMD} -C)
ips_removed=${ips_removed/*,/}
ipset_verbose "${ipset}" "removed ${ips_removed} IPs"
ipset_silent "${ipset}" "added ${ips_added}, removed ${ips_removed} unique IPs"
ipset_verbose "${ipset}" "saving in changesets (${ndate})"
[ ! -f "${LIB_DIR}/${ipset}/changesets.csv" ] && echo >"${LIB_DIR}/${ipset}/changesets.csv" "DateTime,IPsAdded,IPsRemoved"
echo >>"${LIB_DIR}/${ipset}/changesets.csv" "${ndate},${ips_added},${ips_removed}"
# ok keep it
ipset_silent "${ipset}" "keeping this update as the latest..."
${IPRANGE_CMD} "${IPSET_FILE[${ipset}]}" --print-binary >"${LIB_DIR}/${ipset}/latest" || ipset_error "${ipset}" "failed to keep the ${ndate} update as the latest"
$TOUCH_CMD -r "${IPSET_FILE[${ipset}]}" "${LIB_DIR}/${ipset}/latest"
if [ ! -f "${LIB_DIR}/${ipset}/retention.csv" ]
then
ipset_verbose "${ipset}" "generating the retention file"
echo "date_removed,date_added,hours,ips" >"${LIB_DIR}/${ipset}/retention.csv"
fi
# -------------------------------------------------------------------------
ipset_silent "${ipset}" "comparing this update against all past"
# find the new/* files that are affected
local -a new_files=("${LIB_DIR}/${ipset}/new"/*)
local name1= name2= entries1= entries2= ips1= ips2= combined= common= odate= hours= removed=
if [ ${#new_files[@]} -gt 0 ]
then
# we are searching for the affected files
# to find them we compare:
#
# > ips1 (the number of IPs in the latest)
# > combined (the number of IPs in both the latest and the history file in question)
# when ips1 = combined, all IPs in the history file in question are still in the latest
#
# > ips2 (the number of IPs in the history file in question)
# > common (the IPs common in latest and the history file in question)
# when ips2 = common, all IPs in the history file in question are still in the latest
#
${IPRANGE_CMD} "${LIB_DIR}/${ipset}/latest" --compare-next "${new_files[@]}" |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
[ $[ combined - ips1 ] -ne 0 -o $[ ips2 - common ] -ne 0 ] && echo "${name2}"
done | $SORT_CMD -u >"${RUN_DIR}/retention_affected_updates"
[ $? -ne 0 ] && ipset_error "${ipset}" "cannot find its affected updates"
else
[ -f "${RUN_DIR}/retention_affected_updates" ] && ${RM_CMD} "${RUN_DIR}/retention_affected_updates"
${TOUCH_CMD} "${RUN_DIR}/retention_affected_updates"
fi
local x=
for x in $($CAT_CMD "${RUN_DIR}/retention_affected_updates")
do
# find how many hours have passed
odate="${x/*\//}"
hours=$[ (ndate + 1800 - odate) / 3600 ]
# are all the IPs of this file still the latest?
${IPRANGE_CMD} --common "${x}" "${LIB_DIR}/${ipset}/latest" --print-binary >"${x}.stillthere" || ipset_error "${ipset}" "cannot find IPs still present in ${x}"
${IPRANGE_CMD} "${x}" --exclude-next "${x}.stillthere" --print-binary >"${x}.removed" || ipset_error "${ipset}" "cannot find IPs removed from ${x}"
if [ -s "${x}.removed" ]
then
# no, something removed, find it
removed=$(${IPRANGE_CMD} -C "${x}.removed")
$RM_CMD "${x}.removed"
# these are the unique IPs removed
removed="${removed/*,/}"
ipset_verbose "${ipset}" "${x}: ${removed} IPs removed"
echo "${ndate},${odate},${hours},${removed}" >>"${LIB_DIR}/${ipset}/retention.csv"
# update the histogram
# only if the date added is after the date we started
[ ${odate} -gt ${RETENTION_HISTOGRAM_STARTED} ] && RETENTION_HISTOGRAM[${hours}]=$[ ${RETENTION_HISTOGRAM[${hours}]} + removed ]
else
removed=0
# yes, nothing removed from this run
ipset_verbose "${ipset}" "${x}: nothing removed"
$RM_CMD "${x}.removed"
fi
# check if there is something still left
if [ ! -s "${x}.stillthere" ]
then
# nothing left for this timestamp, remove files
ipset_verbose "${ipset}" "${x}: nothing left in this"
$RM_CMD "${x}" "${x}.stillthere"
else
ipset_verbose "${ipset}" "${x}: there is still something in it"
$TOUCH_CMD -r "${x}" "${x}.stillthere"
$MV_CMD "${x}.stillthere" "${x}" || ipset_error "${ipset}" "cannot replace ${x} with updated data"
fi
done
ipset_verbose "${ipset}" "cleaning up retention cache..."
# cleanup empty slots in our arrays
for x in "${!RETENTION_HISTOGRAM[@]}"
do
if [ $[ RETENTION_HISTOGRAM[${x}] ] -eq 0 ]
then
unset RETENTION_HISTOGRAM[${x}]
fi
done
# -------------------------------------------------------------------------
ipset_verbose "${ipset}" "determining the age of currently listed IPs..."
if [ "${#RETENTION_HISTOGRAM[@]}" -eq 0 ]
then
RETENTION_HISTOGRAM=()
fi
# empty the remaining IPs counters
# they will be re-calculated below
RETENTION_HISTOGRAM_REST=()
RETENTION_HISTOGRAM_INCOMPLETE=0
# find the IPs in all new/*
local -a new_files=("${LIB_DIR}/${ipset}/new"/*)
if [ "${#new_files[@]}" -gt 0 ]
then
${IPRANGE_CMD} --count-unique-all "${new_files[@]}" >"${RUN_DIR}/retention_rest" 2>/dev/null
else
[ -f "${RUN_DIR}/retention_rest" ] && ${RM_CMD} "${RUN_DIR}/retention_rest"
${TOUCH_CMD} "${RUN_DIR}/retention_rest"
fi
local entries= ips=
while IFS="," read x entries ips
do
odate="${x/*\//}"
hours=$[ (ndate + 1800 - odate) / 3600 ]
ipset_verbose "${ipset}" "${x}: ${hours} hours have passed"
[ ${odate} -le ${RETENTION_HISTOGRAM_STARTED} ] && RETENTION_HISTOGRAM_INCOMPLETE=1
RETENTION_HISTOGRAM_REST[${hours}]=$[ ${RETENTION_HISTOGRAM_REST[${hours}]} + ips ]
done <"${RUN_DIR}/retention_rest"
# -------------------------------------------------------------------------
# save the histogram
ipset_verbose "${ipset}" "saving retention cache..."
declare -p RETENTION_HISTOGRAM_STARTED RETENTION_HISTOGRAM_INCOMPLETE RETENTION_HISTOGRAM RETENTION_HISTOGRAM_REST >"${LIB_DIR}/${ipset}/histogram"
ipset_verbose "${ipset}" "printing retention..."
retention_print "${ipset}"
ipset_verbose "${ipset}" "printed retention histogram"
return 0
}
sitemap_init() {
local sitemap_date="${1}"
$CAT_CMD >${RUN_DIR}/sitemap.xml <<EOFSITEMAPA
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>${WEB_URL/\?*/}</loc>
<lastmod>${sitemap_date}</lastmod>
<changefreq>always</changefreq>
</url>
EOFSITEMAPA
}
sitemap_ipset() {
local ipset="${1}" sitemap_date="${2}"
$CAT_CMD >>"${RUN_DIR}/sitemap.xml" <<EOFSITEMAP1
<url>
<loc>${WEB_URL}${ipset}</loc>
<lastmod>${sitemap_date}</lastmod>
<changefreq>always</changefreq>
</url>
EOFSITEMAP1
}
history_statistics() {
# the file should be in this format:
# DateTime,Entries,IPs
local ipset="${1}" file="${2}" \
xdate xentries xips \
xlast xdt \
xavg xlo xhi xelo xehi xilo xihi \
xtotal=0 count=0
# calculate the average update time of the list
# and the min/max entries and IPs
while IFS="," read xdate xentries xips
do
# skip the header
[ "${xdate}" = "DateTime" ] && continue
# skip invalids
[ $[xdate] -le 0 ] && continue
# the first valid entry
# set xlast and the lo/hi entries and IPs
if [ ${count} -eq 0 ]
then
xlast=${xdate}
xelo=${xentries}
xehi=${xentries}
xilo=${xips}
xihi=${xips}
count=$[count + 1]
continue
fi
# skip entries that are not in the valid order
# in this case, the new date is older than the last
[ $[xdate] -le $[xlast] ] && continue
# calculate the time diff
xdt=$[ xdate - xlast ]
[ ${xdt} -le 0 ] && continue
# the second line
# set the lo/hi dt
if [ ${count} -eq 1 ]
then
xlo=${xdt}
xhi=${xdt}
fi
# for all the rest of the lines
xtotal=$[ xtotal + xdt ]
xlast=${xdate}
count=$[ count + 1 ]
# dt
[ ${xdt} -lt ${xlo} ] && xlo=${xdt}
[ ${xdt} -gt ${xhi} ] && xhi=${xdt}
# entries
[ ${xentries} -lt ${xelo} ] && xelo=${xentries}
[ ${xentries} -gt ${xehi} ] && xehi=${xentries}
# IPs
[ ${xips} -lt ${xilo} ] && xilo=${xips}
[ ${xips} -gt ${xihi} ] && xihi=${xips}
done <"${file}"
local update_mins=$[ IPSET_MINS[${ipset}] ]
# the average update time, in minutes
if [ ${count} -eq 1 ]
then
xavg=${update_mins}
else
xavg=$[(xtotal / (count - 1) + 60) / 60]
fi
# the lo/hi update time, in minutes
xlo=$[(xlo + 60) / 60]
xhi=$[(xhi + 60) / 60]
IPSET_AVERAGE_UPDATE_TIME[${ipset}]=$[xavg]
IPSET_MIN_UPDATE_TIME[${ipset}]=$[xlo]
IPSET_MAX_UPDATE_TIME[${ipset}]=$[xhi]
ipset_silent "${ipset}" "last ${count} updates: avg: ${xavg} mins (${xlo} - ${xhi}) - config: ${update_mins} mins"
IPSET_ENTRIES_MIN[${ipset}]=$[xelo]
IPSET_ENTRIES_MAX[${ipset}]=$[xehi]
IPSET_IPS_MIN[${ipset}]=$[xilo]
IPSET_IPS_MAX[${ipset}]=$[xihi]
ipset_silent "${ipset}" "entries: ${xelo} to ${xehi}, IPs: ${xilo} to ${xihi}"
# if the list is downloaded, try to figure out
# if we download it too frequently or too late
if [ ! -z "${IPSET_URL[${ipset}]}" -a ${count} -gt $[WEB_CHARTS_ENTRIES / 10] ]
then
if [ $[xavg] -lt $[ update_mins * 5 / 4 ] ]
then
ipset_warning "${ipset}" "may need to lower update freq from ${update_mins} to $[ xavg * 2 / 3 ] mins"
elif [ $[xavg] -gt $[ update_mins * 3 ] ]
then
ipset_warning "${ipset}" "may need to increase update freq from ${update_mins} to $[ update_mins * 3 / 2 ] mins"
fi
fi
return 0
}
update_web() {
cd "${BASE_DIR}" || return 1
print_ipset_reset
echo >&2
if [ -z "${WEB_DIR}" -o ! -d "${WEB_DIR}" -o -z "${LIB_DIR}" -o ! -d "${LIB_DIR}" ]
then
return 1
fi
if [ "${#UPDATED_SETS[@]}" -eq 0 -a ! ${FORCE_WEB_REBUILD} -eq 1 ]
then
echo >&2 "Not updating web site - nothing updated in this run."
return 1
fi
local all=() updated=() geolite2_country=() ipdeny_country=() ip2location_country=() ipip_country=() \
x= i= to_all= all_count=0 \
sitemap_date="$($DATE_CMD -I)"
# the sitemap is re-generated on each run
sitemap_init "${sitemap_date}"
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Updating History..."
for x in $(params_sort "${!IPSET_FILE[@]}")
do
if [ -z "${IPSET_FILE[$x]}" ]
then
ipset_warning "${x}" "empty filename - skipping it"
continue
fi
# remove deleted files
if [ ! -f "${IPSET_FILE[$x]}" ]
then
ipset_warning "${x}" "file ${IPSET_FILE[$x]} not found - removing it from cache"
cache_remove_ipset "${x}"
continue
fi
## check if it has been updated in a previous run
## and has not been copied to our cache
#if [ -z "${UPDATED_SETS[${x}]}" -a -f "${LIB_DIR}/${x}/latest" -a "${IPSET_FILE[$x]}" -nt "${LIB_DIR}/${x}/latest" ]
# then
# silent "${x}: found unupdated cache $($DATE_CMD -r "${IPSET_FILE[$x]}" +%s) vs $($DATE_CMD -r "${LIB_DIR}/${x}/latest" +%s)"
# # UPDATED_SETS[${x}]="${IPSET_FILE[$x]}"
#fi
if [ ! -d "${LIB_DIR}/${x}" ]
then
ipset_silent "${x}" "creating lib directory for tracking it"
$MKDIR_CMD -p "${LIB_DIR}/${x}" || continue
fi
# update the history CSV files
if [ ! -z "${UPDATED_SETS[${x}]}" -o ! -f "${LIB_DIR}/${x}/history.csv" ]
then
if [ ! -f "${LIB_DIR}/${x}/history.csv" ]
then
ipset_verbose "${x}" "creating history file header"
echo "DateTime,Entries,UniqueIPs" >"${LIB_DIR}/${x}/history.csv"
# $TOUCH_CMD "${LIB_DIR}/${x}/history.csv"
$CHMOD_CMD 0644 "${LIB_DIR}/${x}/history.csv"
fi
ipset_silent "${x}" "entries: ${IPSET_ENTRIES[${x}]}, unique IPs: ${IPSET_IPS[${x}]}"
echo >>"${LIB_DIR}/${x}/history.csv" "$($DATE_CMD -r "${IPSET_SOURCE[${x}]}" +%s),${IPSET_ENTRIES[${x}]},${IPSET_IPS[${x}]}"
ipset_verbose "${x}" "preparing web history file (last ${WEB_CHARTS_ENTRIES} entries)"
echo >"${RUN_DIR}/${x}_history.csv" "DateTime,Entries,UniqueIPs"
$TAIL_CMD -n ${WEB_CHARTS_ENTRIES} "${LIB_DIR}/${x}/history.csv" | $GREP_CMD -v "^DateTime" | $SORT_CMD -n >>"${RUN_DIR}/${x}_history.csv"
history_statistics "${x}" "${RUN_DIR}/${x}_history.csv"
fi
to_all=1
# prepare the parameters for iprange to compare the sets
if [[ "${IPSET_FILE[$x]}" =~ ^geolite2.* ]]
then
ipset_verbose "${x}" "is a GeoLite2 file"
to_all=0
case "${x}" in
country_*) i=${x/country_/} ;;
continent_*) i= ;;
anonymous) to_all=1; i= ;;
satellite) to_all=1; i= ;;
*) i= ;;
esac
[ ! -z "${i}" ] && geolite2_country=("${geolite2_country[@]}" "${IPSET_FILE[$x]}" "as" "${i^^}")
elif [[ "${IPSET_FILE[$x]}" =~ ^ipdeny_country.* ]]
then
ipset_verbose "${x}" "is an IPDeny file"
to_all=0
case "${x}" in
id_country_*) i=${x/id_country_/} ;;
id_continent_*) i= ;;
*) i= ;;
esac
[ ! -z "${i}" ] && ipdeny_country=("${ipdeny_country[@]}" "${IPSET_FILE[$x]}" "as" "${i^^}")
elif [[ "${IPSET_FILE[$x]}" =~ ^ip2location_country.* ]]
then
ipset_verbose "${x}" "is an IP2Location file"
to_all=0
case "${x}" in
ip2location_country_*) i=${x/ip2location_country_/} ;;
ip2location_continent_*) i= ;;
*) i= ;;
esac
[ ! -z "${i}" ] && ip2location_country=("${ip2location_country[@]}" "${IPSET_FILE[$x]}" "as" "${i^^}")
elif [[ "${IPSET_FILE[$x]}" =~ ^ipip_country.* ]]
then
ipset_verbose "${x}" "is an IPIP file"
to_all=0
case "${x}" in
ipip_country_*) i=${x/ipip_country_/} ;;
ipip_continent_*) i= ;;
*) i= ;;
esac
[ ! -z "${i}" ] && ipip_country=("${ipip_country[@]}" "${IPSET_FILE[$x]}" "as" "${i^^}")
fi
if [ ${to_all} -eq 1 ]
then
cache_save_metadata_backup "${x}"
ipset_verbose "${x}" "ipset will be compared with all others"
all=("${all[@]}" "${IPSET_FILE[$x]}" "as" "${x}")
all_count=$[ all_count + 1 ]
# if we need a full rebuild, pretend all are updated
[ ${FORCE_WEB_REBUILD} -eq 1 ] && UPDATED_SETS[${x}]="${IPSET_FILE[${x}]}"
if [ ! -z "${UPDATED_SETS[${x}]}" ]
then
ipset_verbose "${x}" "ipset has been updated in this run"
updated=("${updated[@]}" "${IPSET_FILE[$x]}" "as" "${x}")
fi
ipset_verbose "${x}" "adding ipset to web all-ipsets.json"
if [ ! -f "${RUN_DIR}/all-ipsets.json" ]
then
printf >"${RUN_DIR}/all-ipsets.json" "[\n"
else
printf >>"${RUN_DIR}/all-ipsets.json" ",\n"
fi
ipset_json_index "${x}" >>"${RUN_DIR}/all-ipsets.json"
sitemap_ipset "${x}" "${sitemap_date}"
fi
done
printf >>"${RUN_DIR}/all-ipsets.json" "\n]\n"
echo '</urlset>' >>"${RUN_DIR}/sitemap.xml"
echo >&2
# to save the calculated IPSET_*_UPDATE_TIME
cache_save
#info "ALL: ${all[@]}"
#info "UPDATED: ${updated[@]}"
print_ipset_reset
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Comparing all ipsets (${all_count} x ${all_count} = $[all_count * (all_count - 1) / 2 - 1] unique comparisons)..."
local before=$($DATE_CMD +%s)
${IPRANGE_CMD} --compare "${all[@]}" |\
${GREP_CMD} -v ",0$" |\
$SORT_CMD |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
if [ ! -f "${RUN_DIR}/${name1}_comparison.json" ]
then
printf >"${RUN_DIR}/${name1}_comparison.json" "[\n"
else
printf >>"${RUN_DIR}/${name1}_comparison.json" ",\n"
fi
printf >>"${RUN_DIR}/${name1}_comparison.json" " {\n \"name\": \"${name2}\",\n \"category\": \"${IPSET_CATEGORY[${name2}]}\",\n \"ips\": ${ips2},\n \"common\": ${common}\n }"
if [ ! -f "${RUN_DIR}/${name2}_comparison.json" ]
then
printf >"${RUN_DIR}/${name2}_comparison.json" "[\n"
else
printf >>"${RUN_DIR}/${name2}_comparison.json" ",\n"
fi
printf >>"${RUN_DIR}/${name2}_comparison.json" " {\n \"name\": \"${name1}\",\n \"category\": \"${IPSET_CATEGORY[${name1}]}\",\n \"ips\": ${ips1},\n \"common\": ${common}\n }"
done
for x in $($FIND_CMD "${RUN_DIR}" -name \*_comparison.json)
do
printf "\n]\n" >>${x}
done
local after=$($DATE_CMD +%s)
[ ${after} -eq ${before} ] && after=$[before + 1]
echo >&2 "$[all_count * (all_count - 1) / 2 - 1] ipset comparisons made in $[after - before] seconds ($[(all_count * (all_count - 1) / 2 - 1) / (after - before)] ipset comparisons/s)"
echo >&2
if [ "${#updated[*]}" -ne 0 -a "${#geolite2_country[*]}" -ne 0 ]
then
print_ipset_reset
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Comparing updated ipsets with GeoLite2 country..."
${IPRANGE_CMD} "${updated[@]}" --compare-next "${geolite2_country[@]}" |\
${GREP_CMD} -v ",0$" |\
$SORT_CMD |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
if [ ! -f "${RUN_DIR}/${name1}_geolite2_country.json" ]
then
printf "[\n" >"${RUN_DIR}/${name1}_geolite2_country.json"
else
printf ",\n" >>"${RUN_DIR}/${name1}_geolite2_country.json"
fi
printf " {\n \"code\": \"${name2}\",\n \"value\": ${common}\n }" >>"${RUN_DIR}/${name1}_geolite2_country.json"
done
echo >&2
for x in $($FIND_CMD "${RUN_DIR}" -name \*_geolite2_country.json)
do
printf "\n]\n" >>${x}
done
fi
if [ "${#updated[*]}" -ne 0 -a "${#ipdeny_country[*]}" -ne 0 ]
then
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Comparing updated ipsets with IPDeny country..."
${IPRANGE_CMD} "${updated[@]}" --compare-next "${ipdeny_country[@]}" |\
${GREP_CMD} -v ",0$" |\
$SORT_CMD |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
if [ ! -f "${RUN_DIR}/${name1}_ipdeny_country.json" ]
then
printf "[\n" >"${RUN_DIR}/${name1}_ipdeny_country.json"
else
printf ",\n" >>"${RUN_DIR}/${name1}_ipdeny_country.json"
fi
printf " {\n \"code\": \"${name2}\",\n \"value\": ${common}\n }" >>"${RUN_DIR}/${name1}_ipdeny_country.json"
done
echo >&2
for x in $($FIND_CMD "${RUN_DIR}" -name \*_ipdeny_country.json)
do
printf "\n]\n" >>${x}
done
fi
if [ "${#updated[*]}" -ne 0 -a "${#ip2location_country[*]}" -ne 0 ]
then
print_ipset_reset
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Comparing updated ipsets with IP2Location country..."
${IPRANGE_CMD} "${updated[@]}" --compare-next "${ip2location_country[@]}" |\
${GREP_CMD} -v ",0$" |\
$SORT_CMD |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
if [ ! -f "${RUN_DIR}/${name1}_ip2location_country.json" ]
then
printf "[\n" >"${RUN_DIR}/${name1}_ip2location_country.json"
else
printf ",\n" >>"${RUN_DIR}/${name1}_ip2location_country.json"
fi
printf " {\n \"code\": \"${name2}\",\n \"value\": ${common}\n }" >>"${RUN_DIR}/${name1}_ip2location_country.json"
done
echo >&2
for x in $($FIND_CMD "${RUN_DIR}" -name \*_ip2location_country.json)
do
printf "\n]\n" >>${x}
done
fi
if [ "${#updated[*]}" -ne 0 -a "${#ipip_country[*]}" -ne 0 ]
then
print_ipset_reset
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Comparing updated ipsets with IPIP country..."
${IPRANGE_CMD} "${updated[@]}" --compare-next "${ipip_country[@]}" |\
${GREP_CMD} -v ",0$" |\
$SORT_CMD |\
while IFS="," read name1 name2 entries1 entries2 ips1 ips2 combined common
do
if [ ! -f "${RUN_DIR}/${name1}_ipip_country.json" ]
then
printf "[\n" >"${RUN_DIR}/${name1}_ipip_country.json"
else
printf ",\n" >>"${RUN_DIR}/${name1}_ipip_country.json"
fi
printf " {\n \"code\": \"${name2}\",\n \"value\": ${common}\n }" >>"${RUN_DIR}/${name1}_ipip_country.json"
done
echo >&2
for x in $($FIND_CMD "${RUN_DIR}" -name \*_ipip_country.json)
do
printf "\n]\n" >>${x}
done
fi
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Generating updated ipsets JSON files..."
print_ipset_reset
for x in $(params_sort "${!UPDATED_SETS[@]}")
do
ipset_json "${x}" >"${RUN_DIR}/${x}.json"
done
echo >&2
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Generating retention histograms for updated ipsets..."
print_ipset_reset
for x in $(params_sort "${!UPDATED_SETS[@]}")
do
[[ "${IPSET_FILE[$x]}" =~ ^geolite2.* ]] && continue
[[ "${IPSET_FILE[$x]}" =~ ^ipdeny.* ]] && continue
[[ "${IPSET_FILE[$x]}" =~ ^ip2location.* ]] && continue
[[ "${IPSET_FILE[$x]}" =~ ^ipip.* ]] && continue
retention_detect "${x}" >"${RUN_DIR}/${x}_retention.json" || $RM_CMD "${RUN_DIR}/${x}_retention.json"
# this has to be done after retention_detect()
echo >"${RUN_DIR}"/${x}_changesets.csv "DateTime,AddedIPs,RemovedIPs"
$TAIL_CMD -n $[ WEB_CHARTS_ENTRIES + 1] "${LIB_DIR}/${x}/changesets.csv" | $GREP_CMD -v "^DateTime" | $TAIL_CMD -n +2 >>"${RUN_DIR}/${x}_changesets.csv"
done
echo >&2
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Saving generated web files..."
print_ipset_reset
$CHMOD_CMD 0644 "${RUN_DIR}"/*.{json,csv,xml}
$MV_CMD -f "${RUN_DIR}"/*.{json,csv,xml} "${WEB_DIR}/"
[ ! -z "${WEB_OWNER}" ] && $CHOWN_CMD ${WEB_OWNER} "${WEB_DIR}"/*.{json,csv,xml}
if [ -d "${WEB_DIR}/.git" ]
then
print_ipset_reset
echo >&2 "-------------------------------------------------------------------------------"
echo >&2 "Adding generated web files to git..."
cd "${WEB_DIR}" || return 1
$GIT_CMD add *.json *.csv *.xml
$GIT_CMD commit ${PUSH_TO_GIT_COMMIT_OPTIONS} -a -m "$($DATE_CMD -u) update"
if [ ${PUSH_TO_GIT_WEB} -eq 1 ]
then
echo >&2 "Pushing generated web files to git..."
$GIT_CMD push ${PUSH_TO_GIT_PUSH_OPTIONS} origin gh-pages
fi
cd "${BASE_DIR}" || exit 1
echo >&2
fi
}
ipset_apply_counter=0
ipset_apply() {
local ipset="${1}" ipv="${2}" hash="${3}" file="${4}" entries= tmpname= opts= ret= ips=
if [ ${IPSETS_APPLY} -eq 0 ]
then
ipset_saved "${ipset}" "I am not allowed to talk to the kernel."
return 0
fi
ipset_apply_counter=$[ipset_apply_counter + 1]
tmpname="tmp-$$-${RANDOM}-${ipset_apply_counter}"
if [ "${ipv}" = "ipv4" ]
then
if [ -z "${sets[$ipset]}" ]
then
ipset_saved "${ipset}" "no need to load ipset in kernel"
# $IPSET_CMD --create ${ipset} "${hash}hash" || return 1
return 0
fi
if [ "${hash}" = "net" ]
then
${IPRANGE_CMD} "${file}" \
--ipset-reduce ${IPSET_REDUCE_FACTOR} \
--ipset-reduce-entries ${IPSET_REDUCE_ENTRIES} \
--print-prefix "-A ${tmpname} " >"${RUN_DIR}/${tmpname}"
ret=$?
elif [ "${hash}" = "ip" ]
then
${IPRANGE_CMD} -1 "${file}" --print-prefix "-A ${tmpname} " >"${RUN_DIR}/${tmpname}"
ret=$?
fi
if [ ${ret} -ne 0 ]
then
ipset_error "${ipset}" "iprange failed"
$RM_CMD "${RUN_DIR}/${tmpname}"
return 1
fi
entries=$($WC_CMD -l <"${RUN_DIR}/${tmpname}")
ips=$($IPRANGE_CMD -C "${file}")
ips=${ips/*,/}
# this is needed for older versions of ipset
echo "COMMIT" >>"${RUN_DIR}/${tmpname}"
ipset_info "${ipset}" "loading to kernel (to temporary ipset)..."
opts=
if [ ${entries} -gt 65536 ]
then
opts="maxelem ${entries}"
fi
$IPSET_CMD create "${tmpname}" ${hash}hash ${opts}
if [ $? -ne 0 ]
then
ipset_error "${ipset}" "failed to create temporary ipset ${tmpname}"
$RM_CMD "${RUN_DIR}/${tmpname}"
return 1
fi
$IPSET_CMD --flush "${tmpname}"
$IPSET_CMD --restore <"${RUN_DIR}/${tmpname}"
ret=$?
$RM_CMD "${RUN_DIR}/${tmpname}"
if [ ${ret} -ne 0 ]
then
ipset_error "${ipset}" "failed to restore ipset from ${tmpname}"
$IPSET_CMD --destroy "${tmpname}"
return 1
fi
ipset_info "${ipset}" "swapping temporary ipset to production"
$IPSET_CMD --swap "${tmpname}" "${ipset}"
ret=$?
$IPSET_CMD --destroy "${tmpname}"
if [ $? -ne 0 ]
then
ipset_error "${ipset}" "failed to destroy temporary ipset"
return 1
fi
if [ $ret -ne 0 ]
then
ipset_error "${ipset}" "failed to swap temporary ipset ${tmpname}"
return 1
fi
ipset_loaded "${ipset}" "${entries} entries, ${ips} unique IPs"
else
ipset_error "${ipset}" "CANNOT HANDLE IPv6 IPSETS YET"
return 1
fi
return 0
}
IPSET_PUBLIC_URL=
ipset_attributes() {
local ipset="${1}"
shift
ipset_verbose "${ipset}" "parsing attributes: ${*}"
while [ ! -z "${1}" ]
do
case "${1}" in
redistribute) unset IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]; shift; continue ;;
dont_redistribute) IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]="1"; shift; continue ;;
can_be_empty|empty) IPSET_TMP_ACCEPT_EMPTY[${ipset}]="1"; shift; continue ;;
never_empty|no_empty) unset IPSET_TMP_ACCEPT_EMPTY[${ipset}]; shift; continue ;;
no_if_modified_since) IPSET_TMP_NO_IF_MODIFIED_SINCE[${ipset}]="1"; shift; continue ;;
dont_enable_with_all) IPSET_TMP_DO_NOT_ENABLE_WITH_ALL[${ipset}]="1"; shift; continue ;;
inbound|outbound) IPSET_PROTECTION[${ipset}]="${1}"; shift; continue ;;
downloader) IPSET_DOWNLOADER[${ipset}]="${2}" ;;
downloader_options) IPSET_DOWNLOADER_OPTIONS[${ipset}]="${2}" ;;
category) IPSET_CATEGORY[${ipset}]="${2}" ;;
maintainer) IPSET_MAINTAINER[${ipset}]="${2}" ;;
maintainer_url) IPSET_MAINTAINER_URL[${ipset}]="${2}" ;;
license) IPSET_LICENSE[${ipset}]="${2}" ;;
grade) IPSET_GRADE[${ipset}]="${2}" ;;
protection) IPSET_PROTECTION[${ipset}]="${2}" ;;
intended_use) IPSET_INTENDED_USE[${ipset}]="${2}" ;;
false_positives) IPSET_FALSE_POSITIVES[${ipset}]="${2}" ;;
poisoning) IPSET_POISONING[${ipset}]="${2}" ;;
service|services) IPSET_SERVICES[${ipset}]="${2}" ;;
# we use IPSET_PUBLIC_URL to replace / hide the actual URL we use
public_url) IPSET_URL[${ipset}]="${2}"; IPSET_PUBLIC_URL="${2}" ;;
*) ipset_warning "${ipset}" "unknown ipset option '${1}' with value '${2}'." ;;
esac
shift 2
done
[ -z "${IPSET_LICENSE[${ipset}]}" ] && IPSET_LICENSE[${ipset}]="unknown"
[ -z "${IPSET_GRADE[${ipset}]}" ] && IPSET_GRADE[${ipset}]="unknown"
[ -z "${IPSET_PROTECTION[${ipset}]}" ] && IPSET_PROTECTION[${ipset}]="unknown"
[ -z "${IPSET_INTENDED_USE[${ipset}]}" ] && IPSET_INTENDED_USE[${ipset}]="unknown"
[ -z "${IPSET_FALSE_POSITIVES[${ipset}]}" ] && IPSET_FALSE_POSITIVES[${ipset}]="unknown"
[ -z "${IPSET_POISONING[${ipset}]}" ] && IPSET_POISONING[${ipset}]="unknown"
[ -z "${IPSET_SERVICES[${ipset}]}" ] && IPSET_SERVICES[${ipset}]="unknown"
return 0
}
# -----------------------------------------------------------------------------
# finalize() is called when a successful download and convertion completes
# to update the ipset in the kernel and possibly commit it to git
finalize() {
local ipset="${1}" tmp="${2}" \
src="${3}" dst="${4}" \
mins="${5}" history_mins="${6}" \
ipv="${7}" limit="${8}" hash="${9}" \
url="${10}" category="${11}" info="${12}" \
maintainer="${13}" maintainer_url="${14}"
shift 14
# ipset the ipset name
# tmp the processed source, ready to be used
# src the source, as downloaded (we need the date)
# dst the destination to save the final ipset
if [ ! -f "${BASE_DIR}/${src}" ]
then
ipset_error "${ipset}" "source file '${BASE_DIR}/${src}' does not exist"
return 1
fi
if [ ! -f "${tmp}" ]
then
ipset_error "${ipset}" "tmp file '${tmp}' does not exist"
return 1
fi
ipset_attributes "${ipset}" "${@}"
# check
if [ -z "${info}" ]
then
ipset_warning "${ipset}" "INTERNAL ERROR (finalize): no info supplied"
info="${category}"
fi
if [ -f "${BASE_DIR}/${dst}" -a ! -z "${IPSET_FILE[${ipset}]}" -a ${REPROCESS_ALL} -eq 0 ]
then
${IPRANGE_CMD} "${BASE_DIR}/${dst}" --diff "${tmp}" --quiet
if [ $? -eq 0 ]
then
# they are the same
$RM_CMD "${tmp}"
ipset_same "${ipset}" "processed set is the same with the previous one."
# keep the old set, but make it think it was from this source
ipset_verbose "${ipset}" "touching ${dst} from ${src}."
$TOUCH_CMD -r "${BASE_DIR}/${src}" "${BASE_DIR}/${dst}"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 0
# else
# ipset_info "${ipset}" "processed file differs from the last."
fi
#else
# ipset_info "${ipset}" "not comparing file with the last."
fi
# calculate how many entries/IPs are in it
local ipset_opts=
local entries=$(${IPRANGE_CMD} -C "${tmp}")
local ips=${entries/*,/}
local entries=${entries/,*/}
if [ $[ ips ] -eq 0 ]
then
if [ -z "${IPSET_TMP_ACCEPT_EMPTY[${ipset}]}" ]
then
$RM_CMD "${tmp}"
ipset_error "${ipset}" "processed file has no valid entries (zero unique IPs)"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
else
ipset_warning "${ipset}" "processed file has no valid entries (zero unique IPs)"
fi
fi
ipset_apply ${ipset} ${ipv} ${hash} ${tmp}
if [ $? -ne 0 ]
then
if [ ! -z "${ERRORS_DIR}" -a -d "${ERRORS_DIR}" ]
then
$MV_CMD "${tmp}" "${ERRORS_DIR}/${dst}"
ipset_error "${ipset}" "failed to update ipset (error file left as '${ERRORS_DIR}/${dst}')."
else
$RM_CMD "${tmp}"
ipset_error "${ipset}" "failed to update ipset."
fi
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
fi
local quantity="${ips} unique IPs"
[ "${hash}" = "net" ] && quantity="${entries} subnets, ${ips} unique IPs"
IPSET_FILE[${ipset}]="${dst}"
IPSET_IPV[${ipset}]="${ipv}"
IPSET_HASH[${ipset}]="${hash}"
IPSET_MINS[${ipset}]="${mins}"
IPSET_HISTORY_MINS[${ipset}]="${history_mins}"
IPSET_INFO[${ipset}]="${info}"
IPSET_ENTRIES[${ipset}]="${entries}"
IPSET_IPS[${ipset}]="${ips}"
IPSET_URL[${ipset}]="${url}"
IPSET_SOURCE[${ipset}]="${src}"
IPSET_SOURCE_DATE[${ipset}]=$($DATE_CMD -r "${BASE_DIR}/${src}" +%s)
IPSET_PROCESSED_DATE[${ipset}]=$($DATE_CMD +%s)
IPSET_CATEGORY[${ipset}]="${category}"
IPSET_MAINTAINER[${ipset}]="${maintainer}"
IPSET_MAINTAINER_URL[${ipset}]="${maintainer_url}"
[ -z "${IPSET_ENTRIES_MIN[${ipset}]}" ] && IPSET_ENTRIES_MIN[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ "${IPSET_ENTRIES_MIN[${ipset}]}" -gt "${IPSET_ENTRIES[${ipset}]}" ] && IPSET_ENTRIES_MIN[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ -z "${IPSET_ENTRIES_MAX[${ipset}]}" ] && IPSET_ENTRIES_MAX[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ "${IPSET_ENTRIES_MAX[${ipset}]}" -lt "${IPSET_ENTRIES[${ipset}]}" ] && IPSET_ENTRIES_MAX[${ipset}]="${IPSET_ENTRIES[${ipset}]}"
[ -z "${IPSET_IPS_MIN[${ipset}]}" ] && IPSET_IPS_MIN[${ipset}]="${IPSET_IPS[${ipset}]}"
[ "${IPSET_IPS_MIN[${ipset}]}" -gt "${IPSET_IPS[${ipset}]}" ] && IPSET_IPS_MIN[${ipset}]="${IPSET_IPS[${ipset}]}"
[ -z "${IPSET_IPS_MAX[${ipset}]}" ] && IPSET_IPS_MAX[${ipset}]="${IPSET_IPS[${ipset}]}"
[ "${IPSET_IPS_MAX[${ipset}]}" -lt "${IPSET_IPS[${ipset}]}" ] && IPSET_IPS_MAX[${ipset}]="${IPSET_IPS[${ipset}]}"
[ -z "${IPSET_STARTED_DATE[${ipset}]}" ] && IPSET_STARTED_DATE[${ipset}]="${IPSET_SOURCE_DATE[${ipset}]}"
local version=${IPSET_VERSION[${ipset}]}
[ -z "${version}" ] && version=0
version=$[ version + 1 ]
IPSET_VERSION[${ipset}]=${version}
ipset_silent "${ipset}" "version ${version}, ${quantity}"
local now="$($DATE_CMD +%s)"
if [ "${now}" -lt "${IPSET_SOURCE_DATE[${ipset}]}" ]
then
IPSET_CLOCK_SKEW[${ipset}]=$[ IPSET_SOURCE_DATE[${ipset}] - now ]
ipset_warning "${ipset}" "updated time is future (${IPSET_CLOCK_SKEW[${ipset}]} seconds)"
else
IPSET_CLOCK_SKEW[${ipset}]=0
fi
# generate the final file
# we do this on another tmp file
$CAT_CMD >"${tmp}.wh" <<EOFHEADER
#
# ${ipset}
#
# ${ipv} hash:${hash} ipset
#
`echo "${info}" | $SED_CMD "s|](|] (|g" | $FOLD_CMD -w 60 -s | $SED_CMD "s/^/# /g"`
#
# Maintainer : ${maintainer}
# Maintainer URL : ${maintainer_url}
# List source URL : ${url}
# Source File Date: `$DATE_CMD -r "${BASE_DIR}/${src}" -u`
#
# Category : ${category}
# Version : ${version}
#
# This File Date : `$DATE_CMD -u`
# Update Frequency: `mins_to_text ${mins}`
# Aggregation : `mins_to_text ${history_mins}`
# Entries : ${quantity}
#
# Full list analysis, including geolocation map, history,
# retention policy, overlaps with other lists, etc.
# available at:
#
# ${WEB_URL}${ipset}
#
# Generated by FireHOL's update-ipsets.sh
# Processed with FireHOL's iprange
#
EOFHEADER
# Intended Use : ${IPSET_INTENDED_USE[${ipset}]}
# Services : ${IPSET_SERVICES[${ipset}]}
# Protection : ${IPSET_PROTECTION[${ipset}]}
# Grade : ${IPSET_GRADE[${ipset}]}
# License : ${IPSET_LICENSE[${ipset}]}
# False Positives : ${IPSET_FALSE_POSITIVES[${ipset}]}
# Poisoning : ${IPSET_POISONING[${ipset}]}
$CAT_CMD "${tmp}" >>"${tmp}.wh"
$RM_CMD "${tmp}"
$TOUCH_CMD -r "${BASE_DIR}/${src}" "${tmp}.wh"
$MV_CMD "${tmp}.wh" "${BASE_DIR}/${dst}" || return 1
UPDATED_SETS[${ipset}]="${dst}"
local dir="`$DIRNAME_CMD "${dst}"`"
UPDATED_DIRS[${dir}]="${dir}"
if [ -d "${BASE_DIR}/.git" ]
then
# --- BEGIN: Fix bug with duplicate setinfo files ---
# fix a bug where the .setinfo file was created
# using the ${dst} name, instead of the ${ipset} name
if [ "${dst}.setinfo" != "${ipset}.setinfo" -a -f "${BASE_DIR}/${dst}.setinfo" ]
then
if [ ! -f "${BASE_DIR}/${ipset}.setinfo" -o "${BASE_DIR}/${dst}.setinfo" -nt "${BASE_DIR}/${ipset}.setinfo" ]
then
${MV_CMD} "${BASE_DIR}/${dst}.setinfo" "${BASE_DIR}/${ipset}.setinfo"
else
${RM_CMD} "${BASE_DIR}/${dst}.setinfo"
fi
fi
# --- END: Fix bug with duplicate setinfo files ---
if [ -z "${IPSET_TMP_DO_NOT_REDISTRIBUTE[${ipset}]}" ]
then
echo >"${BASE_DIR}/${ipset}.setinfo" "[${ipset}](${WEB_URL}${ipset})|${info}|${ipv} hash:${hash}|${quantity}|updated every $(mins_to_text ${mins})`if [ ! -z "${url}" ]; then echo " from [this link](${url})"; fi`"
git_add_if_not_already_added "${dst}"
else
echo >"${BASE_DIR}/${ipset}.setinfo" "[${ipset}](${WEB_URL}${ipset})|${info}|${ipv} hash:${hash}|${quantity}|updated every $(mins_to_text ${mins})"
git_ignore_file "${dst}"
fi
fi
cache_save
return 0
}
# -----------------------------------------------------------------------------
update() {
cd "${RUN_DIR}" || return 1
local ipset="${1}" mins="${2}" history_mins="${3}" ipv="${4}" limit="${5}" \
url="${6}" \
processor="${7-$CAT_CMD}" \
category="${8}" \
info="${9}" \
maintainer="${10}" maintainer_url="${11}" force=${REPROCESS_ALL}
shift 11
# read it attributes
IPSET_PUBLIC_URL=
ipset_attributes "${ipset}" "${@}"
local tmp= error=0 now= date= ret= \
pre_filter="$CAT_CMD" post_filter="$CAT_CMD" post_filter2="$CAT_CMD" filter="$CAT_CMD" \
src="${ipset}.source" dst=
# check
if [ -z "${info}" ]
then
ipset_warning "${ipset}" "INTERNAL ERROR (update): no info supplied"
info="${category}"
fi
case "${ipv}" in
ipv4)
post_filter2="filter_invalid4"
case "${limit}" in
ip|ips) # output is single ipv4 IPs without /
hash="ip"
limit="ip"
pre_filter="$CAT_CMD"
filter="filter_ip4" # without this, '${IPRANGE_CMD} -1' may output huge number of IPs
post_filter="${IPRANGE_CMD} -1"
;;
net|nets) # output is full CIDRs without any single IPs (/32)
hash="net"
limit="net"
pre_filter="filter_all4"
filter="${IPRANGE_CMD}"
post_filter="filter_net4"
;;
both|all) # output is full CIDRs with single IPs in CIDR notation (with /32)
hash="net"
limit=""
pre_filter="filter_all4"
filter="${IPRANGE_CMD}"
post_filter="$CAT_CMD"
;;
split) ;;
*) ipset_error "${ipset}" "unknown limit '${limit}'."
return 1
;;
esac
;;
ipv6)
ipset_error "${ipset}" "IPv6 is not yet supported."
return 1
;;
*) ipset_error "${ipset}" "unknown IP version '${ipv}'."
return 1
;;
esac
# the destination file
# it must be a relative file (no path)
dst="${ipset}.${hash}set"
# check if it is enabled
ipset_shall_be_run "${ipset}"
case "$?" in
0) ;;
1) [ -d "${BASE_DIR}/.git" ] && echo >"${BASE_DIR}/${ipset}.setinfo" "${ipset}|${info}|${ipv} hash:${hash}|disabled|`if [ ! -z "${url}" ]; then echo "updated every $(mins_to_text ${mins}) from [this link](${url})"; fi`"
return 1
;;
*) return 1
;;
esac
# do we have something to download?
if [ ! -z "${url}" ]
then
# download it
download_manager "${ipset}" "${mins}" "${url}"
ret=$?
if [ \( -z "${IPSET_FILE[${ipset}]}" -o ! -f "${BASE_DIR}/${dst}" \) -a -s "${BASE_DIR}/${src}" ]
then
force=1
ipset_silent "${ipset}" "forced reprocessing (ignoring download status)"
elif [ ${ret} -eq ${DOWNLOAD_FAILED} ]
then
ipset_silent "${ipset}" "download manager reports failure"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
elif [ ${ret} -eq ${DOWNLOAD_NOT_UPDATED} -a ! -f "${BASE_DIR}/${dst}" ]
then
force=1
ipset_silent "${ipset}" "download is the same, but we need to re-process it"
elif [ ${ret} -eq ${DOWNLOAD_NOT_UPDATED} -a ${force} -eq 0 ]
then
ipset_silent "${ipset}" "download manager reports not updated source"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
fi
[ ! -z "${IPSET_PUBLIC_URL}" ] && url="${IPSET_PUBLIC_URL}"
fi
if [ -f "${BASE_DIR}/${dst}" ]
then
# check if the source file has been updated
if [ ${force} -eq 0 -a ! "${BASE_DIR}/${src}" -nt "${BASE_DIR}/${dst}" ]
then
ipset_notupdated "${ipset}" "source file has not been updated"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 0
fi
if [ "${BASE_DIR}/${src}" -nt "${BASE_DIR}/${dst}" ]
then
ipset_silent "${ipset}" "source file has been updated"
fi
fi
# support for older systems where hash:net cannot get hash:ip entries
# if the .split file exists, create 2 ipsets, one for IPs and one for subnets
if [ "${limit}" = "split" -o \( -z "${limit}" -a -f "${BASE_DIR}/${ipset}.split" \) ]
then
ipset_info "${ipset}" "spliting IPs and subnets..."
test -f "${BASE_DIR}/${ipset}_ip.source" && $RM_CMD "${BASE_DIR}/${ipset}_ip.source"
test -f "${BASE_DIR}/${ipset}_net.source" && $RM_CMD "${BASE_DIR}/${ipset}_net.source"
(
cd "${BASE_DIR}"
$LN_CMD -s "${src}" "${ipset}_ip.source"
$LN_CMD -s "${src}" "${ipset}_net.source"
)
update "${ipset}_ip" "${mins}" "${history_mins}" "${ipv}" ip \
"" \
"${processor}" \
"${category}" \
"${info}" \
"${maintainer}" "${maintainer_url}" \
"${@}"
update "${ipset}_net" "${mins}" "${history_mins}" "${ipv}" net \
"" \
"${processor}" \
"${category}" \
"${info}" \
"${maintainer}" "${maintainer_url}" \
"${@}"
return $?
fi
# convert it
ipset_silent "${ipset}" "converting with '${processor}'"
tmp=`$MKTEMP_CMD "${RUN_DIR}/${ipset}.tmp-XXXXXXXXXX"` || return 1
${processor} <"${BASE_DIR}/${src}" |\
trim |\
${pre_filter} |\
${filter} |\
${post_filter} |\
${post_filter2} >"${tmp}"
if [ $? -ne 0 ]
then
ipset_error "${ipset}" "failed to convert file."
$RM_CMD "${tmp}"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
fi
# if the downloaded file is empty, but we don't accept empty files
if [ ! -s "${tmp}" -a -z "${IPSET_TMP_ACCEPT_EMPTY[${ipset}]}" ]
then
ipset_error "${ipset}" "converted file is empty."
$RM_CMD "${tmp}"
check_file_too_old "${ipset}" "${BASE_DIR}/${dst}"
return 1
fi
local h= hmax=-1
[ "${history_mins}" = "0" ] && history_mins=
if [ ! -z "${history_mins}" ]
then
history_keep "${ipset}" "${tmp}"
fi
ret=0
for h in 0 ${history_mins/,/ }
do
local hmins=${h/\/*/}
hmins=$[ hmins + 0 ]
local htag=
if [ ${hmins} -gt 0 ]
then
if [ ${hmins} -gt ${hmax} ]
then
hmax=${hmins}
fi
if [ ${hmins} -ge $[24 * 60] ]
then
local hd=$[ hmins / (24 * 60) ]
htag="_${hd}d"
if [ $[ hd * (24 * 60) ] -ne ${hmins} ]
then
htag="${htag}$[hmins - (hd * 1440)]h"
fi
else
htag="_$[hmins/60]h"
fi
ipset_silent "${ipset}${htag}" "merging history files (${hmins} mins)"
history_get "${ipset}" "${hmins}" >"${tmp}${htag}"
$CP_CMD "${tmp}${htag}" "${BASE_DIR}/${ipset}${htag}.source"
$TOUCH_CMD -r "${BASE_DIR}/${src}" "${BASE_DIR}/${ipset}${htag}.source"
fi
finalize "${ipset}${htag}" "${tmp}${htag}" \
"${ipset}${htag}.source" "${ipset}${htag}.${hash}set" \
"${mins}" "${hmins}" "${ipv}" "${limit}" "${hash}" \
"${url}" \
"${category}" \
"${info}" \
"${maintainer}" "${maintainer_url}" \
"${@}"
[ $? -ne 0 ] && ret=$[ ret + 1 ]
done
if [ ! -z "${history_mins}" ]
then
history_cleanup "${ipset}" "${hmax}"
fi
return $ret
}
# -----------------------------------------------------------------------------
# IPSETS RENAMING
# FIXME
# Cannot rename ipsets in subdirectories
rename_ipset() {
local old="${1}" new="${2}"
[ ! -f "${BASE_DIR}/${old}.source" -o -f "${BASE_DIR}/${new}.source" ] && return 1
cd "${BASE_DIR}" || return 1
local x=
for x in ipset netset
do
if [ -f "${BASE_DIR}/${old}.${x}" -a ! -f "${BASE_DIR}/${new}.${x}" ]
then
if [ -d "${BASE_DIR}/.git" -a ! -z "$($GIT_CMD -C "${BASE_DIR}" ls-files "${old}.${x}")" ]
then
ipset_info "${old}" "git renaming ${old}.${x} to ${new}.${x}..."
$GIT_CMD -C "${BASE_DIR}" mv "${old}.${x}" "${new}.${x}" || exit 1
$GIT_CMD -C "${BASE_DIR}" commit "${old}.${x}" "${new}.${x}" -m 'renamed from ${old}.${x} to ${new}.${x}'
fi
if [ -f "${BASE_DIR}/${old}.${x}" -a ! -f "${BASE_DIR}/${new}.${x}" ]
then
ipset_info "${old}" "renaming ${old}.${x} to ${new}.${x}..."
$MV_CMD "${BASE_DIR}/${old}.${x}" "${BASE_DIR}/${new}.${x}" || exit 1
fi
# keep a link for the firewall
ipset_info "${old}" "Linking ${new}.${x} to ${old}.${x}..."
( cd "${BASE_DIR}" ; $LN_CMD -s "${new}.${x}" "${old}.${x}" )
# now delete it, in order to be re-created this run
$RM_CMD "${BASE_DIR}/${new}.${x}"
# FIXME:
# the ipset in kernel is wrong and will not be updated.
# Probably the solution is to create an list:set ipset
# which will link the old name with the new
fi
done
for x in source split setinfo
do
if [ -f "${BASE_DIR}/${old}.${x}" -a ! -f "${BASE_DIR}/${new}.${x}" ]
then
$MV_CMD "${BASE_DIR}/${old}.${x}" "${BASE_DIR}/${new}.${x}" || exit 1
fi
done
if [ -d "${HISTORY_DIR}/${old}" -a ! -d "${HISTORY_DIR}/${new}" ]
then
ipset_info "${old}" "renaming ${HISTORY_DIR}/${old} ${HISTORY_DIR}/${new}"
$MV_CMD "${HISTORY_DIR}/${old}" "${HISTORY_DIR}/${new}"
fi
if [ ! -z "${LIB_DIR}" -a -d "${LIB_DIR}" -a -d "${LIB_DIR}/${old}" -a ! -d "${LIB_DIR}/${new}" ]
then
ipset_info "${old}" "renaming ${LIB_DIR}/${old} ${LIB_DIR}/${new}"
$MV_CMD -f "${LIB_DIR}/${old}" "${LIB_DIR}/${new}" || exit 1
fi
if [ -d "${WEB_DIR}" ]
then
for x in _comparison.json _geolite2_country.json _ipdeny_country.json _ip2location_country.json _ipip_country.json _history.csv retention.json .json .html
do
if [ -f "${WEB_DIR}/${old}${x}" -a ! -f "${WEB_DIR}/${new}${x}" ]
then
if [ -d "${WEB_DIR}/.git" ]
then
ipset_info "${old}" "git renaming ${WEB_DIR}/${old}${x} ${WEB_DIR}/${new}${x}"
$GIT_CMD -C "${WEB_DIR}" mv -f "${old}${x}" "${new}${x}"
$GIT_CMD -C "${WEB_DIR}" commit "${old}${x}" "${new}${x}" -m "renamed from ${old}${x} to ${new}${x}"
else
ipset_info "${old}" "renaming ${WEB_DIR}/${old}${x} ${WEB_DIR}/${new}${x}"
$MV_CMD -f "${old}${x}" "${new}${x}"
fi
fi
done
fi
# rename the cache
[ -z "${IPSET_INFO[${new}]}" ] && IPSET_INFO[${new}]="${IPSET_INFO[${old}]}"
[ -z "${IPSET_SOURCE[${new}]}" ] && IPSET_SOURCE[${new}]="${IPSET_SOURCE[${old}]}"
[ -z "${IPSET_URL[${new}]}" ] && IPSET_URL[${new}]="${IPSET_URL[${old}]}"
[ -z "${IPSET_FILE[${new}]}" ] && IPSET_FILE[${new}]="${IPSET_FILE[${old}]}"
[ -z "${IPSET_IPV[${new}]}" ] && IPSET_IPV[${new}]="${IPSET_IPV[${old}]}"
[ -z "${IPSET_HASH[${new}]}" ] && IPSET_HASH[${new}]="${IPSET_HASH[${old}]}"
[ -z "${IPSET_MINS[${new}]}" ] && IPSET_MINS[${new}]="${IPSET_MINS[${old}]}"
[ -z "${IPSET_HISTORY_MINS[${new}]}" ] && IPSET_HISTORY_MINS[${new}]="${IPSET_HISTORY_MINS[${old}]}"
[ -z "${IPSET_ENTRIES[${new}]}" ] && IPSET_ENTRIES[${new}]="${IPSET_ENTRIES[${old}]}"
[ -z "${IPSET_IPS[${new}]}" ] && IPSET_IPS[${new}]="${IPSET_IPS[${old}]}"
[ -z "${IPSET_SOURCE_DATE[${new}]}" ] && IPSET_SOURCE_DATE[${new}]="${IPSET_SOURCE_DATE[${old}]}"
[ -z "${IPSET_CHECKED_DATE[${new}]}" ] && IPSET_CHECKED_DATE[${new}]="${IPSET_CHECKED_DATE[${old}]}"
[ -z "${IPSET_PROCESSED_DATE[${new}]}" ] && IPSET_PROCESSED_DATE[${new}]="${IPSET_PROCESSED_DATE[${old}]}"
[ -z "${IPSET_CATEGORY[${new}]}" ] && IPSET_CATEGORY[${new}]="${IPSET_CATEGORY[${old}]}"
[ -z "${IPSET_MAINTAINER[${new}]}" ] && IPSET_MAINTAINER[${new}]="${IPSET_MAINTAINER[${old}]}"
[ -z "${IPSET_MAINTAINER_URL[${new}]}" ] && IPSET_MAINTAINER_URL[${new}]="${IPSET_MAINTAINER_URL[${old}]}"
[ -z "${IPSET_LICENSE[${new}]}" ] && IPSET_LICENSE[${new}]="${IPSET_LICENSE[${old}]}"
[ -z "${IPSET_GRADE[${new}]}" ] && IPSET_GRADE[${new}]="${IPSET_GRADE[${old}]}"
[ -z "${IPSET_PROTECTION[${new}]}" ] && IPSET_PROTECTION[${new}]="${IPSET_PROTECTION[${old}]}"
[ -z "${IPSET_INTENDED_USE[${new}]}" ] && IPSET_INTENDED_USE[${new}]="${IPSET_INTENDED_USE[${old}]}"
[ -z "${IPSET_FALSE_POSITIVES[${new}]}" ] && IPSET_FALSE_POSITIVES[${new}]="${IPSET_FALSE_POSITIVES[${old}]}"
[ -z "${IPSET_POISONING[${new}]}" ] && IPSET_POISONING[${new}]="${IPSET_POISONING[${old}]}"
[ -z "${IPSET_SERVICES[${new}]}" ] && IPSET_SERVICES[${new}]="${IPSET_SERVICES[${old}]}"
[ -z "${IPSET_ENTRIES_MIN[${new}]}" ] && IPSET_ENTRIES_MIN[${new}]="${IPSET_ENTRIES_MIN[${old}]}"
[ -z "${IPSET_ENTRIES_MAX[${new}]}" ] && IPSET_ENTRIES_MAX[${new}]="${IPSET_ENTRIES_MAX[${old}]}"
[ -z "${IPSET_IPS_MIN[${new}]}" ] && IPSET_IPS_MIN[${new}]="${IPSET_IPS_MIN[${old}]}"
[ -z "${IPSET_IPS_MAX[${new}]}" ] && IPSET_IPS_MAX[${new}]="${IPSET_IPS_MAX[${old}]}"
[ -z "${IPSET_STARTED_DATE[${new}]}" ] && IPSET_STARTED_DATE[${new}]="${IPSET_STARTED_DATE[${old}]}"
[ -z "${IPSET_CLOCK_SKEW[${new}]}" ] && IPSET_CLOCK_SKEW[${new}]="${IPSET_CLOCK_SKEW[${old}]}"
[ -z "${IPSET_DOWNLOAD_FAILURES[${new}]}" ] && IPSET_DOWNLOAD_FAILURES[${new}]="${IPSET_DOWNLOAD_FAILURES[${old}]}"
[ -z "${IPSET_VERSION[${new}]}" ] && IPSET_VERSION[${new}]="${IPSET_VERSION[${old}]}"
[ -z "${IPSET_AVERAGE_UPDATE_TIME[${new}]}" ] && IPSET_AVERAGE_UPDATE_TIME[${new}]="${IPSET_AVERAGE_UPDATE_TIME[${old}]}"
[ -z "${IPSET_MIN_UPDATE_TIME[${new}]}" ] && IPSET_MIN_UPDATE_TIME[${new}]="${IPSET_MIN_UPDATE_TIME[${old}]}"
[ -z "${IPSET_MAX_UPDATE_TIME[${new}]}" ] && IPSET_MAX_UPDATE_TIME[${new}]="${IPSET_MAX_UPDATE_TIME[${old}]}"
[ -z "${IPSET_DOWNLOADER[${new}]}" ] && IPSET_DOWNLOADER[${new}]="${IPSET_DOWNLOADER[${old}]}"
[ -z "${IPSET_DOWNLOADER_OPTIONS[${new}]}" ] && IPSET_DOWNLOADER_OPTIONS[${new}]="${IPSET_DOWNLOADER_OPTIONS[${old}]}"
cache_remove_ipset "${old}" # this also saves the cache
cd "${RUN_DIR}"
return 0
}
delete_ipset() {
local ipset="${1}"
[ -z "${ipset}" ] && return 1
[ "${CLEANUP_OLD}" != "1" ] && return 0
cd "${BASE_DIR}" || return 1
for x in ipset netset source split setinfo
do
if [ -f "${BASE_DIR}/${ipset}.${x}" ]
then
if [ -d "${BASE_DIR}/.git" ]
then
ipset_info "${ipset}" "git deleting ${BASE_DIR}/${ipset}.${x}"
$GIT_CMD -C "${BASE_DIR}" rm "${ipset}.${x}"
$GIT_CMD -C "${BASE_DIR}" commit "${ipset}.${x}" -m "deleted ${ipset}.${x}"
fi
if [ -f "${BASE_DIR}/${ipset}.${x}" ]
then
ipset_info "${ipset}" "deleting ${BASE_DIR}/${ipset}.${x}"
$RM_CMD "${BASE_DIR}/${ipset}.${x}" || exit 1
fi
fi
done
if [ -d "${HISTORY_DIR}/${ipset}" ]
then
ipset_info "${ipset}" "deleting ${HISTORY_DIR}/${ipset}"
cd "${HISTORY_DIR}" && $RM_CMD -rf "${ipset}"
cd "${BASE_DIR}" || return 1
fi
if [ ! -z "${LIB_DIR}" -a -d "${LIB_DIR}" -a -d "${LIB_DIR}/${ipset}" ]
then
ipset_info "${ipset}" "deleting ${LIB_DIR}/${ipset}"
cd "${LIB_DIR}" && $RM_CMD -rf "${ipset}"
cd "${BASE_DIR}" || return 1
fi
if [ -d "${WEB_DIR}" ]
then
for x in _comparison.json _geolite2_country.json _ipdeny_country.json _ip2location_country.json _ipip_country.json _history.csv retention.json .json .html
do
if [ -f "${WEB_DIR}/${ipset}${x}" ]
then
if [ -d "${WEB_DIR}/.git" ]
then
ipset_info "${ipset}&quo