Skip to content

Commit

Permalink
Reworked parsing a bit, it is now more efficient in resources to pars…
Browse files Browse the repository at this point in the history
…e xml files.

Fixed Nikto parser
Reworded parsers to use the new method
  • Loading branch information
vdbaan committed Jun 23, 2017
1 parent bdef3b5 commit 216196d
Show file tree
Hide file tree
Showing 15 changed files with 321 additions and 129 deletions.
91 changes: 91 additions & 0 deletions .gitignore
@@ -0,0 +1,91 @@
# Created by .ignore support plugin (hsz.mobi)
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839

# User-specific stuff:
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/dictionaries

# Sensitive or high-churn files:
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.xml
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml

# Gradle:
.idea/**/gradle.xml
.idea/**/libraries

# Mongo Explorer plugin:
.idea/**/mongoSettings.xml

## File-based project format:
*.iws

## Plugin-specific files:

# IntelliJ
/out/

# mpeltonen/sbt-idea plugin
.idea_modules/

# JIRA plugin
atlassian-ide-plugin.xml

# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
### Example user template template
### Example user template

# IntelliJ project files
.idea
*.iml
out
gen### Gradle template
.gradle
/build/

# Ignore Gradle GUI config
gradle-app.setting

# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
!gradle-wrapper.jar

# Cache of project
.gradletasknamecache

# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898
# gradle/wrapper/gradle-wrapper.properties
### Java template
# Compiled class file
*.class

# Log file
*.log

# BlueJ files
*.ctxt

# Mobile Tools for Java (J2ME)
.mtj.tmp/

# Package Files #
*.jar
*.war
*.ear
*.zip
*.tar.gz
*.rar

# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*

5 changes: 4 additions & 1 deletion build.gradle
Expand Up @@ -29,7 +29,7 @@ def getVersionName = { ->
return stdout.toString().trim()
}
catch (ignored) {
return null;
return null
}
}

Expand All @@ -47,6 +47,9 @@ dependencies {
compile 'net.java.dev.glazedlists:glazedlists_java15:1.9.1'
compile 'com.miglayout:miglayout-swing:5.0'
compile 'org.slf4j:slf4j-api:1.7.22'
// https://mvnrepository.com/artifact/org.apache.commons/commons-pool2
compile group: 'org.apache.commons', name: 'commons-pool2', version: '2.4.2'


testCompile 'junit:junit:4.12'
testCompile 'org.assertj:assertj-swing-junit:3.5.0'
Expand Down
108 changes: 75 additions & 33 deletions src/main/groovy/net/vdbaan/issuefinder/controller/MC.groovy
Expand Up @@ -39,30 +39,35 @@ import java.awt.datatransfer.Clipboard
import java.awt.datatransfer.StringSelection
import java.awt.event.MouseListener
import java.util.List
import java.util.concurrent.Callable
import java.util.concurrent.Executors
import java.util.concurrent.Future

class MC implements ListEventListener<Finding> {

int WARNING_LEVEL = 100000
SwingBuilder swing = new SwingBuilder()
MView main
IssuesLoader loader = new IssuesLoader()
private EventList<Finding> findingEventList = new BasicEventList<Finding>()
CompositeMatcherEditor<Finding> compositeFilter
FilterList<Finding> filteredFindings
EventList<Finding> threadProxyList



MC() {
findingEventList.getReadWriteLock().readLock().lock()
try {
compositeFilter = new CompositeMatcherEditor()
EventList<Finding> threadProxyList = GlazedListsSwing.swingThreadProxyList(findingEventList)
SortedList<Finding> sortedFindings = new SortedList<Finding>(threadProxyList, new FindingComparator())
filteredFindings = new FilterList<>(sortedFindings, compositeFilter)

FilterList<Finding> filteredFindings = new FilterList<>(findingEventList, compositeFilter)
SortedList<Finding> sortedFindings = new SortedList<Finding>(filteredFindings,null)

threadProxyList = GlazedListsSwing.swingThreadProxyList(sortedFindings)
AdvancedTableModel<Finding> findingTableModel =
GlazedListsSwing.eventTableModelWithThreadProxyList(filteredFindings, new FindingTableFormat())
GlazedListsSwing.eventTableModelWithThreadProxyList(threadProxyList, new FindingTableFormat())
main = new MView(swing, sortedFindings, findingTableModel)
filteredFindings.addListEventListener(this)
threadProxyList.addListEventListener(this)

} finally {
findingEventList.getReadWriteLock().readLock().unlock()
}
Expand Down Expand Up @@ -90,15 +95,15 @@ class MC implements ListEventListener<Finding> {
if (e.isAdjusting) return
int min = e.source.minSelectionIndex
int max = e.source.maxSelectionIndex
int size = filteredFindings.size()
int size = threadProxyList.size()
List<Finding> result = new ArrayList<>()
filteredFindings.getReadWriteLock().readLock().lock()
threadProxyList.getReadWriteLock().readLock().lock()
for (int i = min; i <= max; i++) {
if (i >= size) break
if (e.source.isSelectedIndex(i))
result << filteredFindings.get(i)
result << threadProxyList.get(i)
}
filteredFindings.getReadWriteLock().readLock().unlock()
threadProxyList.getReadWriteLock().readLock().unlock()
display(result)
})

Expand All @@ -121,7 +126,7 @@ class MC implements ListEventListener<Finding> {
copyIps.closure = {
swing.doLater {
Set<String> ips = new TreeSet<>()
filteredFindings.each { ips << it.ip }
threadProxyList.each { ips << it.ip }
ips.remove('none') // FIXME due to NetSparkerParser
def sorted = ips.sort {a,b ->
def ip1 = a.split("\\.")
Expand All @@ -137,7 +142,7 @@ class MC implements ListEventListener<Finding> {
copyIpPorts.closure = {
swing.doLater {
Set<String> ips = new TreeSet<>()
filteredFindings.each { f ->
threadProxyList.each { f ->
String port = f.port.split('/')[0]
if (port.isNumber() && port != '0') {
if (!f.ip.equalsIgnoreCase('none')) // FIXME due to NetSparkerParser
Expand Down Expand Up @@ -165,15 +170,15 @@ class MC implements ListEventListener<Finding> {
void setupAutoComplete() {
swing.doLater {
SortedSet<String> risks = new TreeSet<>()
filteredFindings.each { risks << it.severity.name() }
threadProxyList.each { risks << it.severity.name() }
setupAutoComplete(riskFilter, risks.asList())

SortedSet<String> plugins = new TreeSet<>()
filteredFindings.each { plugins << it.plugin }
threadProxyList.each { plugins << it.plugin }
setupAutoComplete(pluginFilter, plugins.asList())

SortedSet<String> services = new TreeSet<>()
filteredFindings.each { services << it.service }
threadProxyList.each { services << it.service }
setupAutoComplete(serviceFilter, services.asList())
}
}
Expand Down Expand Up @@ -210,12 +215,12 @@ class MC implements ListEventListener<Finding> {
def max = mainTable.selectionModel.maxSelectionIndex
(min..max).each {pos ->
if (mainTable.selectionModel.isSelectedIndex(pos)) {
filteredFindings.get(pos).scanner = scannerEdit.text?: filteredFindings.get(pos).scanner
filteredFindings.get(pos).hostName = hostnameEdit.text?:filteredFindings.get(pos).hostName
filteredFindings.get(pos).ip = ipEdit.text?: filteredFindings.get(pos).ip
filteredFindings.get(pos).port = portEdit.text?: filteredFindings.get(pos).port
filteredFindings.get(pos).service = serviceEdit.text?: filteredFindings.get(pos).service
filteredFindings.get(pos).severity = getSeverity(severityEdit.text)?: filteredFindings.get(pos).severity
threadProxyList.get(pos).scanner = scannerEdit.text?: filteredFindings.get(pos).scanner
threadProxyList.get(pos).hostName = hostnameEdit.text?:filteredFindings.get(pos).hostName
threadProxyList.get(pos).ip = ipEdit.text?: filteredFindings.get(pos).ip
threadProxyList.get(pos).port = portEdit.text?: filteredFindings.get(pos).port
threadProxyList.get(pos).service = serviceEdit.text?: filteredFindings.get(pos).service
threadProxyList.get(pos).severity = getSeverity(severityEdit.text)?: filteredFindings.get(pos).severity
}
}
mainTable.model.fireTableDataChanged()
Expand Down Expand Up @@ -278,7 +283,7 @@ class MC implements ListEventListener<Finding> {
fileName.withWriter { out ->
def xml = new MarkupBuilder(out)
xml.findings {
filteredFindings.each { f ->
threadProxyList.each { f ->
finding(scanner: f.scanner, ip: f.ip, port: f.port, service: f.service) {
plugin("" + f.plugin)
severity("" + f.severity)
Expand All @@ -292,35 +297,54 @@ class MC implements ListEventListener<Finding> {
void exportAsCSV(File fileName) {
fileName.withWriter { out ->
out.writeLine('"Scanner","ip","port","service","plugin","severity"')
filteredFindings.each { f ->
threadProxyList.each { f ->
out.writeLine("\"${f.scanner}\",\"${f.ip}\",\"${f.port}\",\"${f.service}\",\"${f.plugin}\",\"${f.severity}\"")
}
}
}

static int filesDone
static int filesTotal
void openFiles(List<String> files) {
swing.doLater {
if (files.size() == 0) return
filesTotal = files.size()
filesDone = 0
main.showLoading()
statusLabel.text = "Importing files"
statusLabel.text = String.format("Importing files %d/%d",filesDone,filesTotal)
loader.load(files, findingEventList,this)
}
}

void fileDone() {
swing.doLater {
filesDone += 1
statusLabel.text = String.format("Importing files %d/%d",filesDone,filesTotal)
}

}
void doneLoading() {
swing.doLater {
statusLabel.text = "Done"
main.hideLoading()
setupAutoComplete()
}
}

void warnToManyRows() {
swing.doLater {
main.showWarning()
}
}

@Override
void listChanged(ListEvent<Finding> listChanges) {
swing.doLater {
HashSet<String> ips = new HashSet<>()
filteredFindings.each { ips << it.ip }
threadProxyList.each { ips << it.ip }
ips.remove('none') // FIXME due to NetSparkerParser
ipLabel.text = ips.size()
ipLabel.text = String.format(' %6d unique IPs',ips.size())
rowLabel.text = String.format('%d findings',threadProxyList.size())
}
}
}
Expand Down Expand Up @@ -349,24 +373,42 @@ class IssuesLoader implements Runnable {
}

private parseFile(File file) {

if (file == null) return
def parser = Parser.getParser(file.text)
if (parser == null) {
println "No parser found for: "+file.getName()
return
}
List<Finding> result = parser.parse()
loadList.getReadWriteLock().writeLock().lock()
try {
List<Finding> result = Parser.getParser(file.text).parse()
loadList.addAll(result)
// println String.format("Added: %8d, new size: %8d rows",result.size(),loadList.size())
if (loadList.size() > mc.WARNING_LEVEL) {
mc.warnToManyRows()
}
} catch(Exception e) {
// pass (for now)
} finally {
loadList.getReadWriteLock().writeLock().unlock()
}
mc.fileDone()
}
void run() {
files.each { file ->
parseFile(file)

int numCores = Runtime.getRuntime().availableProcessors()
def threadPool = Executors.newFixedThreadPool(numCores)
try {
List<Future> futures = files.collect(){ file ->
threadPool.submit({->
parseFile file } as Callable)
}
futures.each{it.get()}
} finally {
threadPool.shutdown()
}
mc.doneLoading()
}

}

class FindingTableFormat implements TableFormat<Finding> {
Expand All @@ -386,7 +428,7 @@ class FindingTableFormat implements TableFormat<Finding> {
Object getColumnValue(Finding finding, int column) {
switch (column) {
case 0: return finding.scanner
case 1: return finding.ip
case 1: return String.format('%s (%s)',finding.ip,finding.hostName)
case 2: return finding.port
case 3: return finding.service
case 4: return finding.plugin
Expand Down
Expand Up @@ -86,7 +86,7 @@ class FindingMatcher implements Matcher<Finding> {

switch (filter) {
case 'scanner': return test(item.scanner, text)
case 'ip': return test(item.ip, text)
case 'ip': return test(String.format('%s (%s)',item.ip,item.hostName), text)
case 'port': return test(item.port, text)
case 'service': return test(item.service, text)
case 'plugin': return test(item.plugin, text)
Expand Down
Expand Up @@ -25,16 +25,11 @@ class ArachniParser extends Parser {
static String scanner = "Arachni"

ArachniParser(content) {
this.content = xmlslurper.parseText(content)
this.content = content
}

static boolean identify(contents) {
try {
def xml = xmlslurper.parseText(contents)
return IDENTIFIER.equalsIgnoreCase(xml.name())
} catch(Exception e) {
return false
}
return IDENTIFIER.equalsIgnoreCase(contents.name())
}

List<Finding> parse() {
Expand Down

0 comments on commit 216196d

Please sign in to comment.