Skip to content

Commit

Permalink
1.71b: better duplicate node detection, new report diff tool and child
Browse files Browse the repository at this point in the history
signatures in report

  - Child signatures now exposed in the report,
  - Improvements to duplicate node detection,
  - sfscandiff tool added to compare reports.
  • Loading branch information
spinkham committed Nov 18, 2010
1 parent e5f6c3e commit 2e4f8fa
Show file tree
Hide file tree
Showing 9 changed files with 175 additions and 14 deletions.
9 changes: 9 additions & 0 deletions ChangeLog
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
Version 1.71b:
--------------

- Child signatures now exposed in the report,

- Improvements to duplicate node detection,

- sfscandiff tool added to compare reports.

Version 1.70b:
--------------

Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
#

PROGNAME = skipfish
VERSION = 1.70b
VERSION = 1.71b

OBJFILES = http_client.c database.c crawler.c analysis.c report.c
INCFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \
Expand Down
16 changes: 11 additions & 5 deletions README
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,12 @@ results over HTTP). The index.html file is static; actual results are stored
as a hierarchy of JSON files, suitable for machine processing or different
presentation frontends if needs be.

A simple companion script, sfscandiff, can be used to compute a delta for
two scans executed against the same target with the same flags. The newer
report will be non-destructively annotated by adding red background to all
new or changed nodes; and blue background to all new or changed issues
found.

Some sites may require authentication; for simple HTTP credentials, you can
try:

Expand Down Expand Up @@ -500,15 +506,15 @@ know:
currently employed by skipfish; but in the long run, should be provided
as a last-resort option.

* Scan resume option.
* Scan resume option.

* Option to limit document sampling or save samples directly to disk.
* Option to limit document sampling or save samples directly to disk.

* Standalone installation (make install) support.
* Standalone installation (make install) support.

* Config file support.
* Config file support.

* A database for banner / version checks?
* A database for banner / version checks?

-------------------------------------
9. Oy! Something went horribly wrong!
Expand Down
41 changes: 37 additions & 4 deletions assets/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,13 @@
font-weight: bold;
}

.name_diff {
font-weight: bold;
color: white;
background-color: red;
padding: 0.2em 0.5em 0.2em 0.5em;
}

span.sum_name {
font-weight: bold;
border: 1px solid white;
Expand Down Expand Up @@ -121,6 +128,13 @@
font-weight: bold;
}

.issue_desc_diff {
font-weight: bold;
color: white;
background-color: blue;
padding: 0.2em 0.5em 0.2em 0.5em;
}

.comment {
color: crimson;
font-size: 70%;
Expand Down Expand Up @@ -227,9 +241,10 @@

<script>

var c_count = 0;
var c_count = 0;
var ignore_click = false;
var max_samples = 100;
var max_samples = 100;
var diff_mode = false;

/* Descriptions for issues reported by the scanner. */

Expand Down Expand Up @@ -484,6 +499,12 @@
x.send(null);
eval(x.responseText);

if (diff_mode) {
x.open('GET', dir + 'diff_data.js', false);
x.send(null);
eval(x.responseText);
}

delete x;

next_opacity('c_' + tid, 0);
Expand All @@ -504,7 +525,11 @@
case 4: add_html += '<img src="i_high.png" title="High risk: system compromise">'; break;
}

add_html += '</td>\n<td><div style="issue_desc">' + issue_desc[i.type] + '</div>\n<ol>\n';
if (!diff_mode || diff_data[i.dir] == undefined) {
add_html += '</td>\n<td><div class="issue_desc">' + issue_desc[i.type] + '</div>\n<ol>\n';
} else {
add_html += '</td>\n<td><div class="issue_desc_diff">' + issue_desc[i.type] + '</div>\n<ol>\n';
}

for (var cno2 = cno; cno2 < issue.length; cno2++) {
var i2 = issue[cno2];
Expand Down Expand Up @@ -588,7 +613,15 @@

if (c.dupe) add_html += '<img src="n_clone.png" title="Suspected duplicate">' +
'<span class="dupe_name" title="' + H(c.url) + '">' + H(TRUNC(c.name)) + '</span>\n';
else add_html += '<span class="name" title="' + H(c.url) + '">' + H(TRUNC(c.name)) + '</span>\n';
else {
if (!diff_mode || diff_data[c.dir] == 0) {
add_html += '<span class="name" title="' + H(c.url) + '">' + H(TRUNC(c.name)) + '</span>\n';
} else if (diff_data[c.dir] == 1) {
add_html += '<span class="name_diff" title="' + H(c.url) + '">' + H(TRUNC(c.name)) + '</span>\n';
} else {
add_html += '<span class="name_diff" title="' + H(c.url) + '">' + H(TRUNC(c.name)) + ' (' + diff_data[c.dir] + ' more)</span>\n';
}
}

if (c.linked == 0)
add_html += '<img src="n_unlinked.png" title="Not linked (brute-forced)" class="i2">';
Expand Down
4 changes: 4 additions & 0 deletions database.c
Original file line number Diff line number Diff line change
Expand Up @@ -587,6 +587,10 @@ void problem(u32 type, struct http_request* req, struct http_response* res,

pv->issue_cnt++;

/* Propagate parent issue counts. */

do { pv->desc_issue_cnt++; } while ((pv = pv->parent));

}


Expand Down
1 change: 1 addition & 0 deletions database.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ struct pivot_desc {

struct issue_desc* issue; /* List of issues found */
u32 issue_cnt; /* Number of issues */
u32 desc_issue_cnt; /* Number of child issues */

struct http_response* res; /* HTTP response seen */

Expand Down
7 changes: 4 additions & 3 deletions report.c
Original file line number Diff line number Diff line change
Expand Up @@ -181,8 +181,8 @@ static inline u32 hash_extra(u8* str) {
/* Registers a new pivot signature, or updates an existing one. */

static void maybe_add_sig(struct pivot_desc* pv) {
u32 i, issue_sig = ~pv->issue_cnt,
child_sig = ~pv->child_cnt;
u32 i, issue_sig = ~(pv->issue_cnt | (pv->desc_issue_cnt << 16)),
child_sig = ~(pv->desc_cnt | (pv->child_cnt << 16));

if (!pv->res) return;

Expand Down Expand Up @@ -531,12 +531,13 @@ static void output_crawl_tree(struct pivot_desc* pv) {
describe_res(f, pv->child[i]->res);

fprintf(f,", 'missing': %s, 'csens': %s, 'child_cnt': %u, "
"'issue_cnt': [ %u, %u, %u, %u, %u ] }%s\n",
"'issue_cnt': [ %u, %u, %u, %u, %u ], 'sig': 0x%x }%s\n",
pv->child[i]->missing ? "true" : "false",
pv->child[i]->csens ? "true" : "false",
pv->child[i]->total_child_cnt, pv->child[i]->total_issues[1],
pv->child[i]->total_issues[2], pv->child[i]->total_issues[3],
pv->child[i]->total_issues[4], pv->child[i]->total_issues[5],
pv->child[i]->pv_sig,
(i == pv->child_cnt - 1) ? "" : ",");
}

Expand Down
107 changes: 107 additions & 0 deletions sfscandiff
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
#!/bin/bash

echo "sfscandiff - skipfish scan result comparator (lcamtuf@google.com)" 1>&2

if [ ! "$#" = "2" ]; then
echo "Usage: $0 /path/to/old/scan/ /path/to/new/scan/" 1>&2
exit 1
fi

if [ ! -s "$1/summary.js" ]; then
echo "ERROR: First parameter does not point to a valid skipfish scan directory." 1>&2
exit 1
fi

if [ ! -s "$2/summary.js" ]; then
echo "ERROR: Second parameter does not point to a valid skipfish scan directory." 1>&2
exit 1
fi

OLD_SCAN="$1"
NEW_SCAN="$2"

# Takes two parameters: old scan subdir and new scan subdir

function check_dir {

# echo "Comparing: old=[$1] new=[$2]..."

echo "0" >"$2/.diff_cnt"

echo "var diff_data = {" >"$2/diff_data.js"

grep "'dir':" "$2/child_index.js" | awk -F "'dir': " '{print $2}' | \
sed "s/,.*'sig'://" | sed "s/[,}]*$//" |sed "s/'//g" | \
while read -r dir sig; do

# echo " Checking dir=[$dir] sig=[$sig]"

# Find matching child node first.

MATCH_DIR=`grep -E "'sig': $sig[, ]" "$1/child_index.js" 2>/dev/null | \
awk -F "'dir': " '{print $2}' | cut -d"'" -f2 | head -1`

test "$MATCH_DIR" = "" && MATCH_DIR="not_found"

# Recurse into children first, to get an accurate count of differences
# for all descendants.

check_dir "$1/$MATCH_DIR" "$2/$dir"

# Read difference count from descendands. If node does not appear in
# old scan, add 1 to the count. Store count.

DIFF_CNT=`cat "$2/$dir/.diff_cnt" 2>/dev/null`
test "$DIFF_CNT" = "" && DIFF_CNT=0

test "$MATCH_DIR" = "not_found" && DIFF_CNT=$[DIFF_CNT+1]

echo " '$dir': $DIFF_CNT," >>"$2/diff_data.js"

# Update total count for parent node ($2)

TOTAL_DIFF_CNT=`cat "$2/.diff_cnt" 2>/dev/null`
TOTAL_DIFF_CNT=$[TOTAL_DIFF_CNT+DIFF_CNT]
echo "$TOTAL_DIFF_CNT" >"$2/.diff_cnt"

done

# Now, for every issue, see if a matching issue appears in old scan.
# If not, add it to diff_data.

grep "'severity':" "$2/issue_index.js" | while read -r line; do

LOOK_FOR=`echo "$line" | awk -F"'fetched':" '{print $1}'`
ISSUE_DIR=`echo "$line" | awk -F"'dir':" '{print $2}'|cut -d"'" -f2`

# echo " Checking issue=[$ISSUE_DIR]"

if ! grep -qF "$LOOK_FOR" "$1/issue_index.js" 2>/dev/null; then
echo " '$ISSUE_DIR': 1," >>"$2/diff_data.js"
fi

done

echo " '_eof': 0" >>"$2/diff_data.js"
echo "};" >>"$2/diff_data.js"

}

echo -n "Finding new results in $NEW_SCAN... "

check_dir "$OLD_SCAN" "$NEW_SCAN"

TOTAL=`cat "$NEW_SCAN/.diff_cnt"`

if [ "$TOTAL" = "0" ]; then
echo "no new findings."
elif [ "$TOTAL" = "1" ]; then
echo "one new or modified node found."
else
echo "$TOTAL new or modified nodes found."
fi

grep -qF "var diff_mode" "$NEW_SCAN/summary.js" ||
echo "var diff_mode = true;" >>"$NEW_SCAN/summary.js"

exit 0
2 changes: 1 addition & 1 deletion skipfish.c
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ int main(int argc, char** argv) {
}

gettimeofday(&tv, NULL);
en_time = tv.tv_sec * 1000L + tv.tv_usec / 1000L;
en_time = tv.tv_sec * 1000LL + tv.tv_usec / 1000;

SAY("\n");

Expand Down

0 comments on commit 2e4f8fa

Please sign in to comment.