1+ #! /bin/bash
2+ # # Author: Michael Ramsey
3+ # # Objective Find A Cyberpanel Users Domlogs Stats for last 5 days for all of their domains. v2
4+ # # https://gitlab.com/cyberpaneltoolsnscripts/snapshotbycyberpaneluser
5+ # # How to use.
6+ # ./CyberpanelSnapshotByCyberpanelUser.sh username
7+ # ./CyberpanelSnapshotCyberpanelUser.sh exampleuserbob
8+ #
9+ # #bash <(curl -s https://gitlab.com/cyberpaneltoolsnscripts/snapshotbycyberpaneluser/-/raw/master/CyberpanelSnapshotByCyberpanelUser.sh || wget -qO - https://gitlab.com/cyberpaneltoolsnscripts/snapshotbycyberpaneluser/-/raw/master/CyberpanelSnapshotByCyberpanelUser.sh) exampleuserbob;
10+ # #
11+ Username=$1
12+
13+ # CURRENTDATE=$(date +"%Y-%m-%d %T") # 2019-02-09 06:47:56
14+ # PreviousDay1=$(date --date='1 day ago' +"%Y-%m-%d") # 2019-02-08
15+ # PreviousDay2=$(date --date='2 days ago' +"%Y-%m-%d") # 2019-02-07
16+ # PreviousDay3=$(date --date='3 days ago' +"%Y-%m-%d") # 2019-02-06
17+ # PreviousDay4=$(date --date='4 days ago' +"%Y-%m-%d") # 2019-02-05
18+
19+ # datetimeDom=$(date +"%d/%b/%Y") # 09/Feb/2019
20+ # datetimeDom1DaysAgo=$(date --date='1 day ago' +"%d/%b/%Y") # 08/Feb/2019
21+ # datetimeDom2DaysAgo=$(date --date='2 days ago' +"%d/%b/%Y") # 07/Feb/2019
22+ # datetimeDom3DaysAgo=$(date --date='3 days ago' +"%d/%b/%Y") # 06/Feb/2019
23+ # datetimeDom4DaysAgo=$(date --date='4 days ago' +"%d/%b/%Y") # 05/Feb/2019
24+
25+ # Domlog Date array for past 5 days
26+ declare -a datetimeDomLast5_array=($( date +" %d/%b/%Y" ) $( date --date=' 1 day ago' +" %d/%b/%Y" ) $( date --date=' 2 days ago' +" %d/%b/%Y" ) $( date --date=' 3 days ago' +" %d/%b/%Y" ) $( date --date=' 4 days ago' +" %d/%b/%Y" ) ); # for DATE in "${datetimeDomLast5_array[@]}"; do echo $DATE; done;
27+
28+ # Get users homedir path
29+ user_homedir=$( sudo egrep " ^${Username} :" /etc/passwd | cut -d: -f6)
30+
31+ # setup Domlogs/Accesslog path based off user_homedir/logs
32+ domlogs_path=" ${user_homedir} /logs/"
33+
34+ Now=$( date +" %Y-%m-%d_%T" )
35+
36+ user_CyberpanelSnapshot=" ${Username} -CyberpanelSnapshot_${Now} .txt" ;
37+
38+ # create logfile in user's homedirectory.
39+ # sudo touch "$user_CyberpanelSnapshot"
40+
41+ # chown logfile to user
42+ # sudo chown ${Username}:${Username} "$user_CyberpanelSnapshot";
43+
44+
45+ main_function () {
46+
47+
48+ echo " "
49+ echo " Web Traffic Stats Check" ;
50+
51+ echo " " ;
52+ for DATE in " ${datetimeDomLast5_array[@]} " ; do
53+ echo " =============================================================" ;
54+ echo " Apache Dom Logs POST Requests for ${DATE} for $Username " ;
55+
56+ sudo grep -r " $DATE " ${domlogs_path} | grep POST | awk ' {print $1}' | cut -d: -f1| sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | sort | uniq -c | sort -rn | head
57+ echo " "
58+ echo " HTTP Dom Logs GET Requests for ${DATE} for $Username "
59+ sudo grep -r " $DATE " ${domlogs_path} | grep GET | awk ' {print $1}' | cut -d: -f1 | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | sort | uniq -c | sort -rn | head
60+ echo " "
61+ echo " HTTP Dom Logs Top 10 bot/crawler requests per domain name for ${DATE} "
62+ sudo grep -r " $DATE " ${domlogs_path} | grep -Ei ' crawl|bot|spider|yahoo|bing|google' | awk ' {print $1}' | cut -d: -f1| sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | sort | uniq -c | sort -rn | head
63+ echo " "
64+ echo " HTTP Dom Logs top ten IPs for ${DATE} for $Username "
65+
66+ command=$(sudo grep -r "$DATE" ${domlogs_path} | grep POST | awk '{print $1}'|sed -e 's/^[^=:]*[=:]//' -e 's|"||g' | sort | uniq -c | sort -rn | head| column -t);readarray -t iparray < <( echo "${command}" | tr '/' '\n'); echo ""; for IP in "${iparray[@]}"; do echo "$IP"; done; echo ""; echo "Show unique IP's with whois IP, Country,and ISP"; echo ""; for IP in "${iparray[@]}"; do IP=$(echo "$IP" |grep -Eo '([0-9]{1,3}[.]){3}[0-9]{1,3}|(*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])){3}))|:)))(%.+)?\s*)'); whois -h whois.cymru.com " -c -p $IP"|cut -d"|" -f 2,4,5|grep -Ev 'IP|whois.cymru.com'; done
67+
68+ echo " "
69+ echo " Checking the IPs that Have Hit the Server Most and What Site they were hitting:"
70+ sudo grep -rs " $DATE " ${domlogs_path} | awk {' print $1' } | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log:/ /g' | sort | uniq -c | sort -n | tail -10| sort -rn| column -t
71+ echo " "
72+ echo " Checking the Top Hits Per Site Per IP:"
73+ sudo grep -rs " $DATE " ${domlogs_path} | awk {' print $1,$6,$7' } | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log:/ /g' | sort | uniq -c | sort -n | tail -15| sort -rn| column -t
74+ echo " "
75+ echo " Apache Dom Logs find the top number of uri's being requested for ${DATE} "
76+ sudo grep -r " $DATE " ${domlogs_path} | grep POST | awk ' {print $7}' | cut -d: -f2 | sed " s|$domlogs_path ||g" | sort | uniq -c | sort -rn | head| column -t
77+ echo " "
78+ echo " " ;
79+ echo " View Apache requests per hour for $Username " ;
80+ sudo grep -r " $DATE " ${domlogs_path} | cut -d[ -f 2 | cut -d ] -f1 | awk -F: ' {print $2":00"}' | sort -n | uniq -c| column -t
81+ echo " "
82+ echo " CMS Checks"
83+ echo " "
84+ echo " Wordpress Checks"
85+ echo " Wordpress Login Bruteforcing checks for wp-login.php for ${DATE} for $Username "
86+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " wp-login.php|wp-admin.php" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
87+ echo " "
88+ echo " Wordpress Cron wp-cron.php(virtual cron) checks for ${DATE} for $Username "
89+ sudo grep -r " $DATE " ${domlogs_path} | grep wp-cron.php| cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
90+ echo " "
91+ echo " Wordpress XMLRPC Attacks checks for xmlrpc.php for ${DATE} for $Username "
92+ sudo grep -r " $DATE " ${domlogs_path} | grep xmlrpc.php| cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
93+ echo " "
94+ echo " Wordpress Heartbeat API checks for admin-ajax.php for ${DATE} for $Username "
95+ sudo grep -r " $DATE " ${domlogs_path} | grep admin-ajax.php| cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn;
96+ echo " "
97+ echo " CMS Bruteforce Checks"
98+ echo " Drupal Login Bruteforcing checks for user/login/ for ${DATE} for $Username "
99+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " user/login/" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
100+ echo " "
101+ echo " Magento Login Bruteforcing checks for admin pages /admin_xxxxx/admin/index/index for ${DATE} for $Username "
102+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " admin_[a-zA-Z0-9_]*[/admin/index/index]" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
103+ echo " "
104+ echo " Joomla Login Bruteforcing checks for admin pages /administrator/index.php for ${DATE} for $Username "
105+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " admin_[a-zA-Z0-9_]*[/admin/index/index]" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
106+ echo " "
107+ echo " vBulletin Login Bruteforcing checks for admin pages admincp for ${DATE} for $Username "
108+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " admincp" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
109+ echo " "
110+ echo " Opencart Login Bruteforcing checks for admin pages /admin/index.php for ${DATE} for $Username "
111+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " /admin/index.php" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
112+ echo " "
113+ echo " Prestashop Login Bruteforcing checks for admin pages /adminxxxx for ${DATE} for $Username "
114+ sudo grep -r " $DATE " ${domlogs_path} | grep -E " /admin[a-zA-Z0-9_]*$" | cut -f 1 -d " :" | sed -e " s|$domlogs_path ||g" -e ' s|"||g' -e ' s/.access_log//g' | awk {' print $1,$6,$7' } | sort | uniq -c | sort -n| tail| sort -rn
115+ echo " "
116+
117+
118+ done ;
119+ echo " ============================================================="
120+
121+
122+ echo " Contents have been saved to ${user_CyberpanelSnapshot} "
123+ }
124+
125+ # log everything, but also output to stdout
126+ main_function 2>&1 | tee -a " ${user_CyberpanelSnapshot} "
0 commit comments