diff --git a/README.md b/README.md index 58039bb5e8..4079caef1f 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@

- + # Splunk Security Content ![security_content](docs/static/logo.png) ===== @@ -43,12 +43,16 @@ curl -s https://content.splunkresearch.com | jq ``` # Usage 🧰 -### contentctl.py -The Content Control tool allows you to manipulate Splunk Security Content via the following actions: +### contentctl.py +The Content Control tool allows you to manipulate Splunk Security Content via the following actions: +0. **init** - Initilialize a new repo from scratch so you can easily add your own content to a custom application. Note that this requires a large number of command line arguments, so use python _contentctl.py init --help_ for documentation around those arguments. 1. **new_content** - Creates new content (detection, story, baseline) 2. **validate** - Validates written content 3. **generate** - Generates a deployment package for different platforms (splunk_app) +4. **build** - Builds an application suitable for deployment on a search head using Slim, the Splunk Packaging Toolkit +5. **inspect** - Uses a local version of appinspect to ensure that the app you built meets basic quality standards. +6. **cloud_deploy** - Using ACS, deploy your custom app to a running Splunk Cloud Instance. ### pre-requisites Make sure you use python version 3.9. @@ -64,16 +68,16 @@ pip install -r requirements.txt ### Architecture details for the tooling - [WIKI](https://github.com/splunk/security_content/wiki/Security-Content-Code) -### create a new detection -`python contentctl.py -p . new_content -t detection` +### create a new detection +`python contentctl.py -p . new_content -t detection` for a more indepth write up on how to write content see our [guide](https://github.com/splunk/security_content/wiki/Developing-Content). -### validate security content -`python contentctl.py -p . validate -pr ESCU` +### validate security content +`python contentctl.py -p . validate -pr ESCU` ### generate a splunk app from current content -`python contentctl.py -p . generate -o dist/escu -pr ESCU` +`python contentctl.py -p . generate -o dist/escu -pr ESCU` # MITRE ATT&CK ⚔️ ### Detection Coverage @@ -129,4 +133,3 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - diff --git a/bin/contentctl_project/contentctl_core/application/use_cases/build.py b/bin/contentctl_project/contentctl_core/application/use_cases/build.py new file mode 100644 index 0000000000..bce78cfc02 --- /dev/null +++ b/bin/contentctl_project/contentctl_core/application/use_cases/build.py @@ -0,0 +1,93 @@ +import subprocess +import sys +import tarfile +import os +from typing import TextIO +class Build: + def __init__(self, args): + base_path = args.path + if args.product == "ESCU": + self.source = os.path.join(base_path, "dist","escu") + self.app_name = "DA-ESS-ContentUpdate" + elif args.product == "SSA": + raise(Exception(f"{args.product} build not supported")) + else: + self.source = os.path.join(base_path, "dist", args.product) + self.app_name = args.product + if not os.path.exists(self.source): + raise(Exception(f"Attemping to build app from {self.source}, but it does not exist.")) + + print(f"Building Splunk App from source {self.source}") + + + self.output_dir_base = args.output_dir + + + self.output_dir_source = os.path.join(self.output_dir_base, self.app_name) + + #self.output_package = os.path.join(self.output_dir_base, self.app_name+'.tar.gz') + + self.copy_app_source() + self.validate_splunk_app() + self.build_splunk_app() + #self.archive_splunk_app() + + def copy_app_source(self): + import shutil + + try: + if os.path.exists(self.output_dir_source): + print(f"The directory {self.output_dir_source} exists. Deleting it in preparation to build the app... ", end='', flush=True) + try: + shutil.rmtree(self.output_dir_source) + print("Done!") + except Exception as e: + raise(Exception(f"Unable to delete {self.output_dir_source}")) + + print(f"Copying Splunk App Source to {self.source} in preparation for building...", end='') + sys.stdout.flush() + shutil.copytree(self.source, self.output_dir_source, dirs_exist_ok=True) + print("done") + except Exception as e: + raise(Exception(f"Failed to copy Splunk app source from {self.source} -> {self.output_dir_source} : {str(e)}")) + + + def validate_splunk_app(self): + proc = "nothing..." + try: + print("Validating Splunk App...") + sys.stdout.flush() + nothing = subprocess.check_output(["slim", "validate", self.output_dir_source]) + + print("Package Validation Complete") + except Exception as e: + print(f"error: {str(e)} ") + raise(Exception(f"Error building Splunk App: {str(e)}")) + + + def build_splunk_app(self): + proc = "nothing..." + try: + print("Building Splunk App...") + sys.stdout.flush() + nothing = subprocess.check_output(["slim", "package", "-o", self.output_dir_base, self.output_dir_source]) + print("Package Generation Complete") + except Exception as e: + print("error") + raise(Exception(f"Error building Splunk App: {str(e)}")) + + ''' + def archive_splunk_app(self): + + try: + print(f"Creating Splunk app archive {self.output_package}...", end='') + sys.stdout.flush() + with tarfile.open(self.output_package, "w:gz") as tar: + tar.add(self.output_dir_build, arcname=os.path.basename(self.output_dir_build)) + print("done") + except Exception as e: + print("error") + raise(Exception(f"Error creating {self.output_package}: {str(e)}")) + ''' + + \ No newline at end of file diff --git a/bin/contentctl_project/contentctl_core/application/use_cases/deploy.py b/bin/contentctl_project/contentctl_core/application/use_cases/deploy.py new file mode 100644 index 0000000000..fcffff972c --- /dev/null +++ b/bin/contentctl_project/contentctl_core/application/use_cases/deploy.py @@ -0,0 +1,98 @@ +import splunklib.client as client +import multiprocessing +import http.server +import time +import sys +import subprocess +import os +class Deploy: + def __init__(self, args): + + + + #First, check to ensure that the legal ack is correct. If not, quit + if args.acs_legal_ack != "Y": + raise(Exception(f"Error - must supply 'acs-legal-ack=Y', not 'acs-legal-ack={args.acs_legal_ack}'")) + + self.acs_legal_ack = args.acs_legal_ack + self.app_package = args.app_package + if not os.path.exists(self.app_package): + raise(Exception(f"Error - app_package file {self.app_package} does not exist")) + self.username = args.username + self.password = args.password + self.server = args.server + + + + self.deploy_to_splunk_cloud() + #self.http_process = self.start_http_server() + + #self.install_app() + + + def deploy_to_splunk_cloud(self): + + commandline = f"acs apps install private --acs-legal-ack={self.acs_legal_ack} "\ + f"--app-package {self.app_package} --server {self.server} --username "\ + f"{self.username} --password {self.password}" + + + try: + res = subprocess.run(args = commandline.split(' '), ) + except Exception as e: + raise(Exception(f"Error deploying to Splunk Cloud Instance: {str(e)}")) + print(res.returncode) + if res.returncode != 0: + raise(Exception("Error deploying to Splunk Cloud Instance. Review output to diagnose error.")) + + ''' + def install_app_local(self) -> bool: + #Connect to the service + time.sleep(1) + #self.http_process.start() + #time.sleep(2) + + + print(f"Connecting to server {self.host}") + try: + service = client.connect(host=self.host, port=self.api_port, username=self.username, password=self.password) + assert isinstance(service, client.Service) + + except Exception as e: + raise(Exception(f"Failure connecting the Splunk Search Head: {str(e)}")) + + + #Install the app + try: + params = {'name': self.server_app_path} + res = service.post('apps/appinstall', **params) + #Check the result? + + print(f"Successfully installed {self.server_app_path}!") + + + + except Exception as e: + raise(Exception(f"Failure installing the app {self.server_app_path}: {str(e)}")) + + + #Query and list all of the installed apps + try: + all_apps = service.apps + except Exception as e: + print(f"Failed listing all apps: {str(e)}") + return False + + print("Installed apps:") + for count, app in enumerate(all_apps): + print("\t{count}. {app.name}") + + + print(f"Installing app {self.path}") + + self.http_process.terminate() + + return True + ''' + + \ No newline at end of file diff --git a/bin/contentctl_project/contentctl_core/application/use_cases/initialize.py b/bin/contentctl_project/contentctl_core/application/use_cases/initialize.py new file mode 100644 index 0000000000..6c3779e6f3 --- /dev/null +++ b/bin/contentctl_project/contentctl_core/application/use_cases/initialize.py @@ -0,0 +1,371 @@ +from logging import shutdown +import re +import glob +import os +import copy +import json +import shutil + +CONTENT_VERSION_FILE = ''' +[content-version] +version = {version} +''' + +APP_CONFIGURATION_FILE = ''' +## Splunk app configuration file + +[install] +is_configured = false +state = enabled +state_change_requires_restart = false +build = 7313 + +[triggers] +reload.analytic_stories = simple +reload.usage_searches = simple +reload.use_case_library = simple +reload.correlationsearches = simple +reload.analyticstories = simple +reload.governance = simple +reload.managed_configurations = simple +reload.postprocess = simple +reload.content-version = simple +reload.es_investigations = simple + +[launcher] +author = {author} +version = {version} +description = {description} + +[ui] +is_visible = true +label = {label} + +[package] +id = {id} +''' + +APP_MANIFEST_TEMPLATE = { + "schemaVersion": "1.0.0", + "info": { + "title": "TEMPLATE_TITLE", + "id": { + "group": None, + "name": "TEMPLATE_NAME", + "version": "TEMPLATE_VERSION" + }, + "author": [ + { + "name": "TEMPLATE_AUTHOR_NAME", + "email": "TEMPLATE_AUTHOR_EMAIL", + "company": "TEMPLATE_AUTHOR_COMPANY" + } + ], + "releaseDate": None, + "description": "TEMPLATE_DESCRIPTION", + "classification": { + "intendedAudience": None, + "categories": [], + "developmentStatus": None + }, + "commonInformationModels": None, + "license": { + "name": None, + "text": None, + "uri": None + }, + "privacyPolicy": { + "name": None, + "text": None, + "uri": None + }, + "releaseNotes": { + "name": None, + "text": "./README.md", + "uri": None + } + }, + "dependencies": None, + "tasks": None, + "inputGroups": None, + "incompatibleApps": None, + "platformRequirements": None +} + +#f-strings cannot include a backslash, so we include this as a constant +NEWLINE_INDENT = "\n\t" +class Initialize: + def __init__(self, args): + self.items_scanned = [] + self.items_deleted = [] + self.items_kept = [] + self.items_deleted_failed = [] + + + + #Information that will be used for generation of a custom manifest + self.app_title = args.title + self.app_name = args.name + self.app_version = args.version + self.app_description = args.description + self.app_author_name = args.author_name + self.app_author_email = args.author_email + self.app_author_company = args.author_company + self.app_description = args.description + self.path = args.path + self.dist_app_path = os.path.join(args.path, "dist", self.app_name) + self.escu_path = os.path.join(args.path, "dist", "escu") + + + self.copy_dist_escu_to_dist_app() + self.success = self.remove_all_content() + self.generate_files_and_directories() + self.print_results_summary() + + + def copy_dist_escu_to_dist_app(self): + print("Copying ESCU Template output dir to retain static app files...",end='') + shutil.copytree(self.escu_path, self.dist_app_path, dirs_exist_ok=True) + #delete all the contents in the lookups folder + lookups_path = os.path.join(self.dist_app_path, "lookups") + files = glob.glob(os.path.join(lookups_path, "*")) + for filename in files: + os.remove(filename) + print("done") + + def simple_replace_line(self, filename:str, original:str,updated:str): + print(f"Performing update on file {filename}") + with open(filename,'r') as data: + contents=data.read() + + updated_contents = contents.replace(original, updated) + with open(filename,'w') as data: + data.write(updated_contents) + + + def generate_files_and_directories(self): + #Generate files + self.generate_custom_manifest() + self.generate_app_configuration_file() + self.generate_readme() + self.generate_content_version_file() + + + raw = '''{app_name}''' + original = raw.format(app_name="DA-ESS-ContentUpdate") + updated = raw.format(app_name=self.app_name) + filename = os.path.join(self.dist_app_path,"default","data","ui","views","escu_summary.xml") + self.simple_replace_line(filename, original, updated) + + raw = '''{app_name}''' + original = raw.format(app_name="ESCU") + updated = raw.format(app_name=self.app_name) + filename = os.path.join(self.dist_app_path,"default","data","ui","views","escu_summary.xml") + self.simple_replace_line(filename, original, updated) + + + raw ='''[{app_name} - ''' + original = raw.format(app_name="ESCU") + updated = raw.format(app_name=self.app_name) + filename_root = os.path.join(self.path,"bin/contentctl_project/contentctl_infrastructure/adapter/templates/") + for fname in ["savedsearches_investigations.j2", "savedsearches_detections.j2", "analyticstories_investigations.j2", "analyticstories_detections.j2", "savedsearches_baselines.j2"]: + full_path = os.path.join(filename_root, fname) + self.simple_replace_line(full_path, original, updated) + #Generate directories? + + def generate_content_version_file(self): + new_content_version = CONTENT_VERSION_FILE.format(version=self.app_version) + content_version_path = os.path.join(self.dist_app_path, "default", "content-version.conf") + + try: + if not os.path.exists(os.path.dirname(content_version_path)): + os.makedirs(os.path.dirname(content_version_path), exist_ok = True) + + with open(content_version_path, "w") as readme_file: + readme_file.write(new_content_version) + except Exception as e: + raise(Exception(f"Error writing config to {content_version_path}: {str(e)}")) + print(f"Created Custom Content Version File at: {content_version_path}") + + + def generate_readme(self): + readme_file_path = os.path.join(self.dist_app_path, "README.md") + readme_stub_text = "Empty Readme file" + try: + if not os.path.exists(os.path.dirname(readme_file_path)): + os.makedirs(os.path.dirname(readme_file_path), exist_ok = True) + + with open(readme_file_path, "w") as readme_file: + readme_file.write(readme_stub_text) + except Exception as e: + raise(Exception(f"Error writing config to {readme_file_path}: {str(e)}")) + print(f"Created Custom App Configuration at: {readme_file_path}") + + + def generate_app_configuration_file(self): + + new_configuration = APP_CONFIGURATION_FILE.format(author = self.app_author_company, + version=self.app_version, + description=self.app_description, + label=self.app_title, + id=self.app_name) + app_configuration_file_path = os.path.join(self.dist_app_path, "default", "app.conf") + try: + if not os.path.exists(os.path.dirname(app_configuration_file_path)): + os.makedirs(os.path.dirname(app_configuration_file_path), exist_ok = True) + + with open(app_configuration_file_path, "w") as app_config: + app_config.write(new_configuration) + except Exception as e: + raise(Exception(f"Error writing config to {app_configuration_file_path}: {str(e)}")) + print(f"Created Custom App Configuration at: {app_configuration_file_path}") + + + def generate_custom_manifest(self): + #Set all the required fields + new_manifest = copy.copy(APP_MANIFEST_TEMPLATE) + try: + new_manifest['info']['title'] = self.app_title + new_manifest['info']['id']['name'] = self.app_name + new_manifest['info']['id']['version'] = self.app_version + new_manifest['info']['author'][0]['name'] = self.app_author_name + new_manifest['info']['author'][0]['email'] = self.app_author_email + new_manifest['info']['author'][0]['company'] = self.app_author_company + new_manifest['info']['description'] = self.app_description + except Exception as e: + raise(Exception(f"Failure setting field to generate custom manifest: {str(e)}")) + + #Output the new manifest file + manifest_path = os.path.join(self.dist_app_path, "app.manifest") + + try: + if not os.path.exists(os.path.dirname(manifest_path)): + os.makedirs(os.path.dirname(manifest_path), exist_ok = True) + + with open(manifest_path, 'w') as manifest_file: + json.dump(new_manifest, manifest_file, indent=3) + + except Exception as e: + raise(Exception(f"Failure writing manifest file {manifest_path}: {str(e)}")) + + print(f"Created Custom App Manifest at : {manifest_path}") + + def print_results_summary(self): + if self.success is True: + print(f"Repo has been initialized successfully for app [{self.app_name}] at path [{self.dist_app_path}]!\n" + "Ready for your custom constent!") + else: + print("**Failure(s) initializing repo - check log for details**") + ''' + print(f"Summary:" + f"\n\tItems Scanned : {len(self.items_scanned)}" + f"\n\tItems Kept : {len(self.items_kept)}" + f"\n\tItems Deleted : {len(self.items_deleted)}" + f"\n\tDeletion Failed: {len(self.items_deleted_failed)}" + ) + ''' + + def remove_all_content(self)-> bool: + errors = [] + + #List out all the steps we will have to take + steps = [(self.remove_detections,"Creating Detections"), + (self.remove_baselines,"Creating Baselines"), + (self.remove_investigations,"Creating Investigations"), + (self.remove_lookups,"Creating Lookups"), + (self.remove_macros,"Creating Macros"), + (self.remove_notebooks,"Creating Notebooks"), + (self.remove_playbooks,"Creating Playbooks"), + (self.remove_stories,"Creating Stores"), + (self.remove_tests,"Creating Tests"), + (self.remove_dist_lookups,"Creating Dist Lookups")] + #Sort the steps so they are performced alphabetically + steps.sort(key=lambda name: name[1]) + + for func, text in steps: + print(f"{text}...",end='') + success = func() + if success is True: + print("done") + else: + print("**ERROR!**") + errors.append(f"Error(s) in {func.__name__}") + + + + if len(errors) == 0: + return True + else: + print(f"Clean failed on the following steps:{NEWLINE_INDENT}{NEWLINE_INDENT.join(errors)}") + return False + + def remove_baselines(self, glob_patterns:list[str]=["baselines/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_dist_lookups(self, glob_patterns:list[str]=["dist/escu/lookups/**/*.yml","dist/escu/lookups/**/*.csv", "dist/escu/lookups/**/*.*"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_detections(self, glob_patterns:list[str]=["detections/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_investigations(self,glob_patterns:list[str]=["investigations/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_lookups(self, glob_patterns:list[str]=["lookups/**/*.yml","lookups/**/*.csv", "lookups/**/*.*"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_macros(self,glob_patterns:list[str]=["macros/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_notebooks(self, glob_patterns:list[str]=["notesbooks/**/*.ipynb"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_playbooks(self, glob_patterns:list[str]=["playbooks/**/*.*"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_stories(self, glob_patterns:list[str]=["stories/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_tests(self, glob_patterns:list[str]=["tests/**/*.yml"], keep:list[str]=[]) -> bool: + return self.remove_by_glob_patterns(glob_patterns, keep) + + def remove_by_glob_patterns(self, glob_patterns:list[str], keep:list[str]=[]) -> bool: + success = True + for pattern in glob_patterns: + success |= self.remove_by_glob_pattern(pattern, keep) + return success + def remove_by_glob_pattern(self, glob_pattern:str, keep:list[str]) -> bool: + success = True + try: + matched_filenames = glob.glob(glob_pattern, recursive=True) + for filename in matched_filenames: + self.items_scanned.append(filename) + success &= self.remove_file(filename, keep) + return success + except Exception as e: + print(f"Error running glob on the pattern {glob_pattern}: {str(e)}") + return False + + + + def remove_file(self, filename:str, keep:list[str]) -> bool: + for keep_pattern in keep: + if re.search(keep_pattern, filename) is not None: + print(f"Preserving file {filename} which conforms to the keep regex {keep_pattern}") + self.items_kept.append(filename) + return True + + #File will be deleted - it was not identified as a file to keep + #Note that, by design, we will not/cannot delete files with os.remove. We want to keep + #the folder hierarchy. If we want to delete folders, we will need to update this library + try: + os.remove(filename) + self.items_deleted.append(filename) + return True + except Exception as e: + print(f"Error deleting file {filename}: {str(e)}") + self.items_deleted_failed.append(filename) + return False + + + diff --git a/bin/contentctl_project/contentctl_core/application/use_cases/inspect.py b/bin/contentctl_project/contentctl_core/application/use_cases/inspect.py new file mode 100644 index 0000000000..f970ab586b --- /dev/null +++ b/bin/contentctl_project/contentctl_core/application/use_cases/inspect.py @@ -0,0 +1,32 @@ +import subprocess +import os +class Inspect: + def __init__(self, args): + try: + import splunk_appinspect + except Exception as e: + print("Failed to import libmagic. If you're on macOS, you probably need to run 'brew install libmagic'") + raise(Exception(f"AppInspect Failed to import magic: str(e)")) + + + #Splunk appinspect does not have a documented python API... so we run it + #using the Command Line interface + self.package_path = args.package_path + + proc = "no output produced..." + try: + proc = subprocess.run(["splunk-appinspect", "inspect", self.package_path]) + if proc.returncode != 0: + raise(Exception(f"splunk-appinspect failed with return code {proc.returncode}")) + except Exception as e: + raise(Exception(f"Error running appinspect on {self.package_path}: {str(e)}")) + + print(f"Appinspect on {self.package_path} was successful!") + + + + + + + + diff --git a/bin/contentctl_project/contentctl_core/domain/entities/enums/enums.py b/bin/contentctl_project/contentctl_core/domain/entities/enums/enums.py index 742bf7b8e1..4523550dc5 100644 --- a/bin/contentctl_project/contentctl_core/domain/entities/enums/enums.py +++ b/bin/contentctl_project/contentctl_core/domain/entities/enums/enums.py @@ -41,4 +41,5 @@ class SecurityContentType(enum.Enum): class SecurityContentProduct(enum.Enum): ESCU = 1 SSA = 2 - API = 3 \ No newline at end of file + API = 3 + CUSTOM = 4 diff --git a/bin/contentctl_project/contentctl_infrastructure/adapter/templates/savedsearches_investigations.j2 b/bin/contentctl_project/contentctl_infrastructure/adapter/templates/savedsearches_investigations.j2 index 5b674e6b79..c213e706de 100644 --- a/bin/contentctl_project/contentctl_infrastructure/adapter/templates/savedsearches_investigations.j2 +++ b/bin/contentctl_project/contentctl_infrastructure/adapter/templates/savedsearches_investigations.j2 @@ -35,4 +35,4 @@ search = {{ detection.search }} {% endfor %} -### END ESCU RESPONSE TASKS ### \ No newline at end of file +### END ESCU RESPONSE TASKS ### diff --git a/contentctl.py b/contentctl.py index 22ba35bcd9..b0ca086944 100644 --- a/contentctl.py +++ b/contentctl.py @@ -12,6 +12,10 @@ from bin.contentctl_project.contentctl_core.application.use_cases.doc_gen import DocGenInputDto, DocGen from bin.contentctl_project.contentctl_core.application.use_cases.new_content import NewContentInputDto, NewContent, NewAttackDataContent from bin.contentctl_project.contentctl_core.application.use_cases.reporting import ReportingInputDto, Reporting +from bin.contentctl_project.contentctl_core.application.use_cases.initialize import Initialize +from bin.contentctl_project.contentctl_core.application.use_cases.deploy import Deploy +from bin.contentctl_project.contentctl_core.application.use_cases.build import Build +from bin.contentctl_project.contentctl_core.application.use_cases.inspect import Inspect from bin.contentctl_project.contentctl_core.application.factory.factory import FactoryInputDto from bin.contentctl_project.contentctl_core.application.factory.ba_factory import BAFactoryInputDto from bin.contentctl_project.contentctl_core.application.factory.new_content_factory import NewContentFactoryInputDto @@ -38,7 +42,7 @@ def init(): print(""" -Running Splunk Security Content Control Tool (contentctl) +Running Splunk Security Content Control Tool (contentctl) starting program loaded for TIE Fighter... _ _ T T T T @@ -88,10 +92,14 @@ def content_changer(args) -> None: def generate(args) -> None: if not args.product: print("ERROR: missing parameter -p/--product .") - sys.exit(1) + sys.exit(1) + + #For now, the custom product is treated just like ESCU + if args.product == 'CUSTOM': + args.product = 'ESCU' if args.product not in ['ESCU', 'SSA', 'API']: - print("ERROR: invalid product. valid products are ESCU, SSA or API.") + print("ERROR: invalid product. valid products are ESCU, SSA or API. If you are building a custom app, use CUSTOM.") sys.exit(1) @@ -146,7 +154,7 @@ def generate(args) -> None: ba_factory_input_dto, ObjToYmlAdapter(args.path), SecurityContentProduct.SSA - ) + ) generate = Generate() generate.execute(generate_input_dto) @@ -156,12 +164,17 @@ def generate(args) -> None: def validate(args) -> None: if not args.product: print("ERROR: missing parameter -p/--product .") - sys.exit(1) + sys.exit(1) + + #For now, the custom product is treated just like ESCU + if args.product == 'CUSTOM': + args.product = 'ESCU' if args.product not in ['ESCU', 'SSA', 'all']: - print("ERROR: invalid product. valid products are all, ESCU or SSA.") + print("ERROR: invalid product. valid products are all, ESCU or SSA. If you are building a custom app, use CUSTOM.") sys.exit(1) + if args.cached_and_offline: LinkValidator.initialize_cache(args.cached_and_offline) @@ -187,7 +200,7 @@ def validate(args) -> None: SecurityContentDetectionBuilder(force_cached_or_offline = args.cached_and_offline, check_references=args.check_references, skip_enrichment=args.skip_enrichment), SecurityContentDirector() ) - + if args.product == "ESCU" or args.product == "all": validate_input_dto = ValidateInputDto( factory_input_dto, @@ -244,6 +257,7 @@ def new_content(args) -> None: print("ERROR: type " + args.type + " not supported") sys.exit(1) + new_content_factory_input_dto = NewContentFactoryInputDto(contentType) if args.type == 'attack_data': new_content_input_dto = NewContentInputDto(new_content_factory_input_dto, ObjToAttackDataYmlAdapter()) @@ -277,6 +291,19 @@ def reporting(args) -> None: reporting.execute(reporting_input_dto) +def initialize(args) -> None: + Initialize(args) + + +def build(args) -> None: + Build(args) + +def inspect(args) -> None: + Inspect(args) + +def cloud_deploy(args) -> None: + Deploy(args) + def main(args): init() @@ -284,7 +311,7 @@ def main(args): # grab arguments parser = argparse.ArgumentParser( description="Use `contentctl.py action -h` to get help with any Splunk Security Content action") - parser.add_argument("-p", "--path", required=True, + parser.add_argument("-p", "--path", required=True, help="path to the Splunk Security Content folder",) parser.add_argument("--cached_and_offline", action=argparse.BooleanOptionalAction, help="Force cached/offline resources. While this makes execution much faster, it may result in enrichment which is out of date. This is suitable for use only in development or disconnected environments.") @@ -295,15 +322,22 @@ def main(args): actions_parser = parser.add_subparsers(title="Splunk Security Content actions", dest="action") #new_parser = actions_parser.add_parser("new", help="Create new content (detection, story, baseline)") + init_parser = actions_parser.add_parser("init", help="Initialize a repo with scaffolding in place to build a custom app." + "This allows a user to easily add their own content and, eventually, " + "build a custom application consisting of their custom content.") + new_content_parser = actions_parser.add_parser("new_content", help="Create new security content object") + content_changer_parser = actions_parser.add_parser("content_changer", help="Change Security Content based on defined rules") validate_parser = actions_parser.add_parser("validate", help="Validates written content") generate_parser = actions_parser.add_parser("generate", help="Generates a deployment package for different platforms (splunk_app)") - content_changer_parser = actions_parser.add_parser("content_changer", help="Change Security Content based on defined rules") docgen_parser = actions_parser.add_parser("docgen", help="Generates documentation") - new_content_parser = actions_parser.add_parser("new_content", help="Create new security content object") + reporting_parser = actions_parser.add_parser("reporting", help="Create security content reporting") - + build_parser = actions_parser.add_parser("build", help="Build an application suitable for deployment to a search head") + inspect_parser = actions_parser.add_parser("inspect", help="Run appinspect to ensure that an app meets minimum requirements for deployment.") + cloud_deploy_parser = actions_parser.add_parser("cloud_deploy", help="Install an application on a target Splunk Cloud Instance.") + # # new arguments # new_parser.add_argument("-t", "--type", required=False, type=str, default="detection", @@ -312,13 +346,13 @@ def main(args): # help="Generates an example content UPDATE on the fields that need updating. Use `git status` to see what specific files are added. Skips new content wizard prompts.") # new_parser.set_defaults(func=new) - validate_parser.add_argument("-pr", "--product", required=True, type=str, default='all', + validate_parser.add_argument("-pr", "--product", required=True, type=str, default='all', help="Type of package to create, choose between all, `ESCU` or `SSA`.") validate_parser.add_argument('--check_references', action=argparse.BooleanOptionalAction, help="The number of threads to use to resolve references. " "Larger numbers will result in faster resolution, but will be more likely to hit rate limits or use a large amount of " "bandwidth. A larger number of threads is particularly useful on high-bandwidth connections, but does not improve " "performance on slow connections.") - + validate_parser.set_defaults(func=validate, check_references=False, epilog=""" Validates security manifest for correctness, adhering to spec and other common items.""") @@ -327,11 +361,11 @@ def main(args): generate_parser.add_argument("-pr", "--product", required=True, type=str, help="Type of package to create, choose between `ESCU`, `SSA` or `API`.") generate_parser.set_defaults(func=generate) - + content_changer_choices = ContentChanger.enumerate_content_changer_functions() - content_changer_parser.add_argument("-cf", "--change_function", required=True, metavar='{ ' + ', '.join(content_changer_choices) +' }' , type=str, choices=content_changer_choices, + content_changer_parser.add_argument("-cf", "--change_function", required=True, metavar='{ ' + ', '.join(content_changer_choices) +' }' , type=str, choices=content_changer_choices, help= "Choose from the functions above defined in \nbin/contentctl_core/contentctl/application/use_cases/content_changer.py") - + content_changer_parser.set_defaults(func=content_changer) docgen_parser.add_argument("-o", "--output", required=True, type=str, @@ -344,14 +378,38 @@ def main(args): reporting_parser.set_defaults(func=reporting) - - - + + init_parser.add_argument("-t", "--title", type=str, required=True, help="The title of the application to be built.") + init_parser.add_argument("-n", "--name", type=str, required=True, help="The name of the application to be built.") + init_parser.add_argument("-v", "--version", type=str, required=True, help="The version of the application to be built. It should be in MAJOR.MINOR.PATCH format.") + init_parser.add_argument("-a", "--author_name", type=str, required=True, help="The name of the application author.") + init_parser.add_argument("-e", "--author_email", type=str, required=True, help="The email of the application author.") + init_parser.add_argument("-c", "--author_company", type=str, required=True, help="The company of the application author.") + init_parser.add_argument("-d", "--description", type=str, required=True, help="A brief description of the app.") + init_parser.set_defaults(func=initialize) + + build_parser.add_argument("-o", "--output_dir", required=False, default="build", type=str, help="Directory to output the built package to (default is 'build')") + build_parser.add_argument("-pr", "--product", required=True, type=str, help="Name of the product to build. This is the name you created during init. To find the name of your app, look for the name of the folder created in the ./dist folder.") + build_parser.set_defaults(func=build) + + + inspect_parser.add_argument("-p", "--package_path", required=True, type=str, help="Path to the package to be inspected") + inspect_parser.set_defaults(func=inspect) + + + cloud_deploy_parser.add_argument("--app-package", required=True, type=str, help="Path to the package you wish to deploy") + cloud_deploy_parser.add_argument("--acs-legal-ack", required=True, type=str, help="specify '--acs-legal-ack=Y' to acknowledge your acceptance of any risks (required)") + cloud_deploy_parser.add_argument("--username", required=True, type=str, help="splunk.com username") + cloud_deploy_parser.add_argument("--password", required=True, type=str, help="splunk.com password") + cloud_deploy_parser.add_argument("--server", required=False, default="https://admin.splunk.com", type=str, help="Override server URL (default 'https://admin.splunk.com')") + cloud_deploy_parser.set_defaults(func=cloud_deploy) # # parse them args = parser.parse_args() - return args.func(args) - + try: + return args.func(args) + except Exception as e: + print(f"Error for function [{args.func.__name__}]: {str(e)}") if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/lookups/attacker_tools.csv b/lookups/attacker_tools.csv index 2f95dfb054..38d33513d5 100644 --- a/lookups/attacker_tools.csv +++ b/lookups/attacker_tools.csv @@ -24,4 +24,4 @@ KPortScan3.exe,This executable was delivered in the XMRig Crypto Miner and is co NLAChecker.exe,A scanner tool that checks for Windows hosts for Network Level Authentication. This tool allows attackers to detect Windows Servers with RDP without NLA enabled which facilitates the use of brute force non microsoft rdp tools or exploits ns.exe,A commonly used tool used by attackers to scan and map file shares SilverBullet.exe,Malware was discovered in our monitoring of honey pots that abuses this open source software for scanning and connecting to hosts. -kportscan3.exe, KPortScan 3.0 is a widely used port scanning tool on Hacking Forums, to perform network scanning on the internal networks. \ No newline at end of file +kportscan3.exe, KPortScan 3.0 is a widely used port scanning tool on Hacking Forums to perform network scanning on the internal networks. diff --git a/requirements.txt b/requirements.txt index db54162106..dae96ec7ae 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,6 @@ PyYAML questionary requests xmltodict +splunk-sdk +https://download.splunk.com/misc/packaging-toolkit/splunk-packaging-toolkit-1.0.1.tar.gz +splunk-appinspect