New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

combine - a proper python package #138

Merged
merged 4 commits into from May 5, 2015
Jump to file or symbol
Failed to load files and symbols.
+259 −112
Diff settings

Always

Just for now

Next

Build combine as a package with tox and setup.py

  • Loading branch information...
Ivan Leichtling
Ivan Leichtling committed May 3, 2015
commit e7f1346306c730250233fcaafe16b42ff7a1813a
View
@@ -37,7 +37,7 @@ nosetests.xml
.pydevproject
# virtualenv
venv
venv*/
# IDE config
.idea
View
@@ -0,0 +1,17 @@
- repo: git://github.com/pre-commit/pre-commit-hooks
sha: cedcea550c495d536247ca23115035b17074cac7
hooks:
- id: autopep8-wrapper
args: ['-i', '--ignore=E501']
- id: check-json
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: name-tests-test
- id: requirements-txt-fixer
- id: flake8
- repo: git://github.com/asottile/reorder_python_imports
sha: 3d86483455ab5bd06cc1069fdd5ac57be5463f10
hooks:
- id: reorder-python-imports
View
@@ -0,0 +1,18 @@
.DELETE_ON_ERROR:
all:
echo >&2 "Must specify target."
venv:
tox -evenv
test:
tox
clean:
rm -rf build/ dist/ *.egg-info/ .tox/ venv-*/
rm -f .coverage
find . -name '*.pyc' -delete
find . -name '__pycache__' -delete
.PHONY: all test clean
View
@@ -42,7 +42,7 @@
}
}
}
},
"source": {
View

This file was deleted.

Oops, something went wrong.
View
@@ -0,0 +1 @@
# -*- coding: utf-8 -*-
@@ -1,17 +1,18 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ConfigParser
import datetime as dt
import gzip
import json
import logging
import os
import re
import requests
import sys
import threading
import time
from Queue import Queue
import requests
import unicodecsv
import threading
from logger import get_logger
from Queue import Queue
logger = get_logger('baler')
@@ -120,7 +121,7 @@ def bale_CRITs_indicator(base_url, data, indicator_que):
if source:
data['source'] = source[0]
res = requests.post(url, data=data, verify=False)
if not res.status_code in [201, 200, 400]:
if res.status_code not in [201, 200, 400]:
logger.info("Issues with adding: %s" % data['ip'])
elif indicator[1] == "FQDN":
# using the Domain API
@@ -133,7 +134,7 @@ def bale_CRITs_indicator(base_url, data, indicator_que):
if source:
data['source'] = source[0]
res = requests.post(url, data=data, verify=False)
if not res.status_code in [201, 200, 400]:
if res.status_code not in [201, 200, 400]:
logger.info("Issues with adding: %s" % data['domain'])
else:
logger.info("don't yet know what to do with: %s[%s]" % (indicator[1], indicator[0]))
@@ -217,5 +218,9 @@ def bale(input_file, output_file, output_format, is_regular):
format_funcs = {'csv': bale_enr_csv, 'crits': bale_CRITs}
format_funcs[output_format](harvest, output_file)
if __name__ == "__main__":
def main():
bale('crop.json', 'harvest.csv', 'csv', True)
if __name__ == "__main__":
main()
View
@@ -0,0 +1,56 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import sys
from baler import bale
from baler import tiq_output
from reaper import reap
from thresher import thresh
from winnower import winnow
# Combine components
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--type', help="Specify output type. Currently supported: CSV and exporting to CRITs")
parser.add_argument('-f', '--file', help="Specify output file. Defaults to harvest.FILETYPE")
parser.add_argument('-d', '--delete', help="Delete intermediate files", action="store_true")
parser.add_argument('-e', '--enrich', help="Enrich data", action="store_true")
parser.add_argument('--tiq-test', help="Output in tiq-test format", action="store_true")
args = parser.parse_args()
possible_types = ['csv', 'json', 'crits']
if not args.type:
out_type = 'csv'
elif args.type.lower() not in possible_types:
sys.exit('Invalid file type specified. Possible types are: %s' % possible_types)
else:
out_type = args.type.lower()
if args.file:
out_file = args.file
else:
out_file = 'harvest.' + out_type
reap('harvest.json')
thresh('harvest.json', 'crop.json')
bale('crop.json', out_file, out_type, True)
if args.enrich or args.tiq_test:
winnow('crop.json', 'crop.json', 'enrich.json')
bale('enrich.json', 'enriched.' + out_type, out_type, False)
if args.tiq_test:
tiq_output('crop.json', 'enrich.json')
if args.delete:
# be careful with this when we support a JSON output type
os.remove('harvest.json')
os.remove('crop.json')
os.remove('enrich.json')
if __name__ == "__main__":
main()
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 by Farsight Security, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar
import errno
import locale
@@ -37,7 +36,9 @@
locale.setlocale(locale.LC_ALL, '')
class DnsdbClient(object):
def __init__(self, server, apikey, limit=None):
self.server = server
self.apikey = apikey
@@ -84,9 +85,11 @@ def _query(self, path):
sys.stderr.write(str(e) + '\n')
return res
def sec_to_text(ts):
return time.strftime('%Y-%m-%d %H:%M:%S -0000', time.gmtime(ts))
def rrset_to_text(m):
s = StringIO()
@@ -113,13 +116,15 @@ def rrset_to_text(m):
s.seek(0)
return s.read()
def rdata_to_text(m):
return '%s IN %s %s' % (m['rrname'], m['rrtype'], m['rdata'])
def parse_config(cfg_fname):
config = {}
cfg_files = filter(os.path.isfile,
(cfg_fname, os.path.expanduser('~/.dnsdb-query.conf')))
(cfg_fname, os.path.expanduser('~/.dnsdb-query.conf')))
if not cfg_files:
raise IOError(errno.ENOENT, 'dnsdb_query: No config files found')
@@ -132,6 +137,7 @@ def parse_config(cfg_fname):
return config
def time_parse(s):
try:
epoch = int(s)
@@ -153,6 +159,7 @@ def time_parse(s):
raise ValueError('Invalid time: "%s"' % s)
def filter_before(res_list, before_time):
before_time = time_parse(before_time)
new_res_list = []
@@ -169,6 +176,7 @@ def filter_before(res_list, before_time):
return new_res_list
def filter_after(res_list, after_time):
after_time = time_parse(after_time)
new_res_list = []
@@ -185,26 +193,27 @@ def filter_after(res_list, after_time):
return new_res_list
def main():
global cfg
global options
parser = optparse.OptionParser()
parser.add_option('-c', '--config', dest='config', type='string',
help='config file', default=DEFAULT_CONFIG_FILE)
help='config file', default=DEFAULT_CONFIG_FILE)
parser.add_option('-r', '--rrset', dest='rrset', type='string',
help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]')
help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]')
parser.add_option('-n', '--rdataname', dest='rdata_name', type='string',
help='rdata name <NAME>[/<RRTYPE>]')
help='rdata name <NAME>[/<RRTYPE>]')
parser.add_option('-i', '--rdataip', dest='rdata_ip', type='string',
help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
parser.add_option('-s', '--sort', dest='sort', type='string', help='sort key')
parser.add_option('-R', '--reverse', dest='reverse', action='store_true', default=False,
help='reverse sort')
help='reverse sort')
parser.add_option('-j', '--json', dest='json', action='store_true', default=False,
help='output in JSON format')
help='output in JSON format')
parser.add_option('-l', '--limit', dest='limit', type='int', default=0,
help='limit number of results')
help='limit number of results')
parser.add_option('', '--before', dest='before', type='string', help='only output results seen before this time')
parser.add_option('', '--after', dest='after', type='string', help='only output results seen after this time')
@@ -220,10 +229,9 @@ def main():
sys.stderr.write(e.message)
sys.exit(1)
if not 'DNSDB_SERVER' in cfg:
if 'DNSDB_SERVER' not in cfg:
cfg['DNSDB_SERVER'] = DEFAULT_DNSDB_SERVER
if not 'APIKEY' in cfg:
if 'APIKEY' not in cfg:
sys.stderr.write('dnsdb_query: APIKEY not defined in config file\n')
sys.exit(1)
@@ -246,7 +254,7 @@ def main():
if len(res_list) > 0:
if options.sort:
if not options.sort in res_list[0]:
if options.sort not in res_list[0]:
sort_keys = res_list[0].keys()
sort_keys.sort()
sys.stderr.write('dnsdb_query: invalid sort key "%s". valid sort keys are %s\n' % (options.sort, ', '.join(sort_keys)))
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
import logging
def get_logger(name=None):
root_logger_name = 'combine'
@@ -9,17 +11,17 @@ def get_logger(name=None):
else:
name = root_logger_name
root_logger = logging.getLogger(root_logger_name)
root_logger = logging.getLogger(root_logger_name)
# If the root logger has no handlers, add them
# in any case return the sub-logger
if root_logger.handlers:
return logging.getLogger(name)
else:
root_logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler() # default to sys.stderr
ch.setLevel(logging.DEBUG) # todo: make it configurable
ch = logging.StreamHandler() # default to sys.stderr
ch.setLevel(logging.DEBUG) # todo: make it configurable
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root_logger.addHandler(ch)
return logging.getLogger(name)
return logging.getLogger(name)
Oops, something went wrong.
ProTip! Use n and p to navigate between commits in a pull request.