Skip to content

Commit

Permalink
update for new console_logger support without splunk by default
Browse files Browse the repository at this point in the history
  • Loading branch information
jay-johnson committed Jun 26, 2018
1 parent 0259e9a commit 817efcd
Show file tree
Hide file tree
Showing 27 changed files with 123 additions and 119 deletions.
6 changes: 3 additions & 3 deletions network_pipeline/build_training_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@
import json
import numpy as np
import pandas as pd
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.consts import VALID
from network_pipeline.consts import INVALID
from network_pipeline.consts import ERROR
from sklearn.model_selection import train_test_split


name = "training-utils"
log = build_colorized_logger(name=name)
log = console_logger(
name='build_training_request')


def build_training_request(
Expand Down
9 changes: 4 additions & 5 deletions network_pipeline/connect_forwarder.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import logging
import socket
import time
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger

setup_logging()
name = "connect-forwarder"
log = logging.getLogger(name)

log = console_logger(
name='connect_forwarder')


def connect_forwarder(forward_host=None,
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/convert_pkt_to_json.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import logging
import json
from collections import defaultdict
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger
from network_pipeline.utils import ppj
from network_pipeline.consts import DEBUG_PACKETS


setup_logging()
name = "ptoj"
log = logging.getLogger(name)
log = console_logger(
name='ptoj')


def convert_pkt_to_json(pkg):
Expand Down
9 changes: 4 additions & 5 deletions network_pipeline/create_layer_2_socket.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import logging
import socket
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger

setup_logging()
name = "create-layer2"
log = logging.getLogger(name)

log = console_logger(
name='create_l2_socket')


def create_layer_2_socket():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/handle_packets.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
import logging
from network_pipeline.consts import SOURCE
from network_pipeline.consts import FORWARD_EXCHANGE
from network_pipeline.consts import FORWARD_ROUTING_KEY
from network_pipeline.consts import FORWARD_QUEUE
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger
from network_pipeline.utils import rnow
from network_pipeline.convert_pkt_to_json import convert_pkt_to_json
from network_pipeline.publisher import pub
import scapy.all as scapy


setup_logging()
name = "proc"
log = logging.getLogger(name)
log = console_logger(
name='proc')


def handle_packets(pk):
Expand Down
8 changes: 8 additions & 0 deletions network_pipeline/network_packet_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,29 @@


class NetworkPacketTask(object):
"""NetworkPacketTask"""

def __init__(self,
source="locahost",
payload=None):
"""__init__
:param source:
:param payload:
"""

self.source = source
self.payload = payload
self.created = datetime.datetime.now().strftime(
"%Y-%m-%d %H:%M:%S")

def __call__(self):
"""__call__"""
return "{}-{}".format(self.source,
self.payload)

def __str__(self):
"""__str__"""
return str(("{}-{}")
.format(self.source,
self.payload))
Expand Down
9 changes: 4 additions & 5 deletions network_pipeline/parse_network_data.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import uuid
import socket
import json
Expand Down Expand Up @@ -31,11 +30,11 @@
from network_pipeline.consts import TCP_PROTO_IP
from network_pipeline.consts import UDP_PROTO_IP
from network_pipeline.consts import ICMP_PROTO_IP
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger

setup_logging()
name = "parser"
log = logging.getLogger(name)

log = console_logger(
name='parse_network_data')


# Get string of 6 characters as ethernet address into dash seperated hex string
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/publisher.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import logging
from network_pipeline.consts import SOURCE
from network_pipeline.consts import FORWARD_BROKER_URL
from network_pipeline.consts import FORWARD_SSL_OPTIONS
from network_pipeline.consts import FORWARD_ENDPOINT_TYPE
from network_pipeline.log.setup_logging import setup_logging
from spylunking.log.setup_logging import console_logger
from celery_connectors.publisher import Publisher


setup_logging()
name = "get_publisher"
log = logging.getLogger(name)
log = console_logger(
name='get_publisher')


def get_publisher():
Expand Down
7 changes: 3 additions & 4 deletions network_pipeline/record_packets_to_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pandas as pd
from pandas.io.json import json_normalize
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from spylunking.log.setup_logging import console_logger
from network_pipeline.utils import ppj
from network_pipeline.utils import rnow
from network_pipeline.build_packet_key import build_packet_key
Expand All @@ -22,9 +22,8 @@
from antinex_client.generate_ai_request import generate_ai_request


name = "csv"
log = build_colorized_logger(
name=name)
log = console_logger(
name='csv')


class RecordPacketsToCSV:
Expand Down
9 changes: 3 additions & 6 deletions network_pipeline/scripts/base_capture.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
#!/usr/bin/env python

import logging
import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from network_pipeline.log.setup_logging import setup_logging
from network_pipeline.handle_packets import handle_packets


setup_logging()
# scapy capture agent
name = "cap"
log = logging.getLogger(name)
log = console_logger(
name='base_capture')


def example_capture():
Expand Down
9 changes: 5 additions & 4 deletions network_pipeline/scripts/builders/prepare_dataset.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
#!/usr/bin/env python

import logging
import pandas as pd
import glob
import copy
import random
from spylunking.log.setup_logging import console_logger
from network_pipeline.consts import VALID
from network_pipeline.consts import INVALID
from network_pipeline.utils import ppj
from network_pipeline.utils import rnow
from network_pipeline.log.setup_logging import setup_logging
from celery_connectors.utils import ev


setup_logging(config_name="builders.json")
name = "builder"
log = logging.getLogger(name)
log = console_logger(
name=name)


log.info("start - {}".format(name))


Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_arp.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-arp"
log = build_colorized_logger(
name=name,
config="capture-arp-logging.json")
log = console_logger(
name='cap_arp')


def capture_arp_packets():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_icmp.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-icmp"
log = build_colorized_logger(
name=name,
config="capture-icmp-logging.json")
log = console_logger(
name='cap_icmp')


def capture_icmp_packets():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_ssh.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-ssh"
log = build_colorized_logger(
name=name,
config="capture-ssh-logging.json")
log = console_logger(
name='cap_ssh')


def capture_tcp_packets_over_ssh():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_tcp.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-tcp"
log = build_colorized_logger(
name=name,
config="capture-tcp-logging.json")
log = console_logger(
name='cap_tcp')


def capture_tcp_packets():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_telnet.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-telnet"
log = build_colorized_logger(
name=name,
config="capture-telnet-logging.json")
log = console_logger(
name='cap_telnet')


def capture_tcp_packets_over_telnet():
Expand Down
8 changes: 3 additions & 5 deletions network_pipeline/scripts/capture_udp.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import scapy.all as scapy
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from spylunking.log.setup_logging import build_colorized_logger
from network_pipeline.handle_packets import handle_packets


name = "cap-udp"
log = build_colorized_logger(
name=name,
config="capture-udp-logging.json")
log = console_logger(
name='cap_udp')


def capture_udp_packets():
Expand Down
10 changes: 3 additions & 7 deletions network_pipeline/scripts/consolidate_packets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,23 @@

import os
import sys
import logging
import socket
import datetime
import time
import json
from spylunking.log.setup_logging import console_logger
from network_pipeline.consts import INCLUDED_IGNORE_KEY
from network_pipeline.consts import VALID
from network_pipeline.consts import TCP
from network_pipeline.consts import UDP
from network_pipeline.consts import ICMP
from network_pipeline.consts import ARP
from network_pipeline.log.setup_logging import setup_logging
from network_pipeline.utils import ppj

# from celery_connectors.publisher import Publisher

setup_logging()

# consolidator - receives packets from network agents
name = "cdr"
log = logging.getLogger(name)
log = console_logger(
name='consolidate_packets')


def handle_processing_packets():
Expand Down
11 changes: 6 additions & 5 deletions network_pipeline/scripts/modelers/keras_dnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,19 @@

import os
import sys
import logging
from spylunking.log.setup_logging import console_logger
from celery_connectors.utils import ev
from network_pipeline.log.setup_logging import setup_logging
from network_pipeline.consts import VALID
from network_pipeline.build_training_request import build_training_request
from keras.models import Sequential
from keras.layers import Dense


setup_logging(config_name="modelers.json")
name = "keras-dnn"
log = logging.getLogger(name)
name = 'keras-dnn'
log = console_logger(
name=name)


log.info("start - {}".format(name))


Expand Down

0 comments on commit 817efcd

Please sign in to comment.