0,0 → 1,129 |
#!/bin/bash |
|
# $Id: alcasar-bl.sh 2688 2019-01-18 23:15:49Z lucas.echard $ |
|
# alcasar-autoupdate.sh |
# by Sven RATH and Rexy |
# This script is distributed under the Gnu General Public License (GPL) |
|
# Gestion de la BL pour le filtrage de domaine (via unbound) et d'URL (via E2guardian) |
# Manage the BL for DnsBlackHole (unbound) and URL filtering (E2guardian) |
|
FILE_tmp="/tmp/filesfilter.txt" |
FILE_ip_tmp="/tmp/filesipfilter.txt" |
DIR_DG="/etc/e2guardian/lists" |
DIR_DG_BL="$DIR_DG/blacklists" |
DIR_SHARE="/usr/local/share" |
DIR_DNS_BL="$DIR_SHARE/unbound-bl" # all the BL in the Unbound format |
DIR_DNS_WL="$DIR_SHARE/unbound-wl" # all the WL ' ' ' |
DIR_IP_BL="$DIR_SHARE/iptables-bl" # all the IP addresses of the BL |
DIR_IP_WL="$DIR_SHARE/iptables-wl" # IP ossi disabled WL |
CNC_BL_NAME="ossi-bl-candc" |
CNC_URL="https://osint.bambenekconsulting.com/feeds/" |
CNC_DNS_BL_URL=${CNC_URL}c2-dommasterlist-high.txt |
CNC_IP_BL_URL=${CNC_URL}c2-ipmasterlist-high.txt |
SED="/bin/sed -i" |
CURL="/usr/bin/curl" |
|
# cleaning file and split it ("domains" in $FILE_tmp & "IP" in $FILE_ip_tmp) |
function clean_split (){ |
$SED '/^#.*/d' $FILE_tmp # remove commented lines |
$SED '/^\s*$/d' $FILE_tmp # remove empty lines |
$SED '/[äâëêïîöôüû@,]/d' $FILE_tmp # remove line with "chelou" characters |
# extract ip addresses for iptables. |
awk '/^([0-9]{1,3}\.){3}[0-9]{1,3}$/{print "add bl_ip_blocked " $0}' $FILE_tmp > $FILE_ip_tmp |
# extract domain names for unbound. |
$SED -n '/^\([0-9]\{1,3\}\.\)\{3\}[0-9]\{1,3\}/!p' $FILE_tmp |
# Retrieve max Top Level Domain for domain name synthax |
#MAX_TLD=$(curl http://data.iana.org/TLD/tlds-alpha-by-domain.txt | grep -v '-' | grep -v '#' | wc -L) |
#if [ $(echo $MAX_TLD | wc -c) -eq 0 ];then |
# MAX_TLD=18 |
#fi |
# search for correction egrep "([a-zA-Z0-9_-.]+\.){1,2}[a-zA-Z]{2,$MAX_TLD}" $ossi_custom_dir/domains > $FILE_tmp |
} |
|
usage="Usage: alcasar-bl-autoupdate.sh { -update_cat or --update_cat | -update_ossi-bl-candc or --update_ossi-bl-candc }" |
nb_args=$# |
args=$1 |
if [ $nb_args -eq 0 ] |
then |
args="-h" |
fi |
case $args in |
-\? | -h* | --h*) |
echo "$usage" |
exit 0 |
;; |
# Update the categories of Toulouse BL listed in "/usr/local/etc/update_cat.conf" (via rsync). Cron runs this function every 12h |
-update_cat | --update_cat) |
if [ $(cat /usr/local/etc/update_cat.conf | wc -l) -ne 0 ] |
then |
echo -n "Updating categories in /usr/local/etc/update_cat.conf ..." |
cat /usr/local/etc/update_cat.conf | while read LIGNE_RSYNC |
do |
CATEGORIE=$(echo $LIGNE_RSYNC | cut -d' ' -f1) |
URL=$(echo $LIGNE_RSYNC | cut -d' ' -f2) |
PATH_FILE=$(find $DIR_DG_BL/ -type d -name $CATEGORIE) # retrieve directory name of the category |
rsync -rv $URL $(dirname $PATH_FILE ) #rsync inside of the blacklist directory |
# Creation of unbound and Iptables BL and WL |
DOMAIN=$(basename $PATH_FILE) |
cp $PATH_FILE/domains $FILE_tmp |
clean_split # clean ossi custom files & split them for unbound and for iptables |
black=`grep black $PATH_FILE/usage |wc -l` |
if [ $black == "1" ] |
then |
# adapt to the unbound syntax for the blacklist |
$SED "s?.*?local-zone: & typetransparent\nlocal-zone-tag: & blacklist?g" $FILE_tmp |
mv $FILE_tmp $DIR_DNS_BL/$DOMAIN.conf |
mv $FILE_ip_tmp $DIR_IP_BL/$DOMAIN |
else |
# adapt to the unbound syntax for the whitelist |
$SED "s?.*?local-zone: & transparent?g" $FILE_tmp |
mv $FILE_tmp $DIR_DNS_WL/$DOMAIN.conf |
mv $FILE_ip_tmp $DIR_IP_WL/$DOMAIN |
fi |
rm -f $FILE_tmp $FILE_ip_tmp |
done |
/usr/local/bin/alcasar-bl.sh --reload |
else |
echo -n "/usr/local/etc/update_cat.conf is empty ..." |
fi |
echo |
;; |
# Update C&C-Server Blacklist (TODO : check that there is a difference between two downloads) |
-update_ossi-bl-candc | --update_ossi-bl-candc) |
# check availability of the lists |
echo "Downloading blacklists from ${CNC_URL}..." |
STATUS_URL_BL=$(${CURL} --connect-timeout 5 --write-out %{http_code} --silent --output /dev/null ${CNC_DNS_BL_URL}) |
STATUS_IP_BL=$(${CURL} --connect-timeout 5 --write-out %{http_code} --silent --output /dev/null ${CNC_IP_BL_URL}) |
# if downloaded successfully |
if [ $STATUS_URL_BL = 200 ] && [ $STATUS_IP_BL = 200 ]; then |
## parse domain names and ips from feed (cut first 19 lines (comments) and extract first column) |
CNC_URLS=$($CURL $CNC_DNS_BL_URL | tail -n +19 | awk -F, '{print $1}') |
CNC_IPS=$($CURL $CNC_IP_BL_URL | tail -n +19 | awk -F, '{print $1}') |
## create files and adapt downloaded data to alcasar structure (add newlines after each ip/domain) |
BL_DIR=${DIR_DG_BL}/${CNC_BL_NAME} |
rm -rf ${BL_DIR} |
mkdir $BL_DIR |
echo $CNC_URLS | tr " " "\n" > ${BL_DIR}/urls |
echo $CNC_IPS | tr " " "\n" > ${BL_DIR}/domains |
## reload ossi-blacklists to add the created blacklist to ALCASAR |
echo "Download successfull." |
/usr/local/bin/alcasar-bl.sh --reload |
exit 0 |
# if server responded with a code different than 200 |
else |
## 000 means that curl failed |
if [ $STATUS_URL_BL = 000 ] || [ $STATUS_IP_BL = 000 ]; then |
echo "ERROR: curl could not access the internet to download blacklists." |
echo "This appears to be an error on your side: please check the connection to the internet." |
else |
echo "ERROR: could not donwload blacklists: Server returned non-200 codes:" |
echo "${CNC_DNS_BL_URL} returned ${STATUS_URL_BL}" |
echo "${CNC_IP_BL_URL} returned ${STATUS_IP_BL}" |
echo "Check the availability of the sites. Maybe the server removed its content or changed its address." |
fi |
exit 1 |
fi |
;; |
esac |
Property changes: |
Added: svn:eol-style |
+native |
\ No newline at end of property |
Added: svn:executable |
+* |
\ No newline at end of property |