Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 25 additions & 10 deletions contentctl/actions/release_notes.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,27 @@ def create_notes(self,repo_path:pathlib.Path, file_paths:List[pathlib.Path], hea

if data['status'] == "validation":
updates.append("- "+f"{data['name']}"+" (Validation Mode)")


# Check and create detection link
if 'name' in data and 'id' in data and 'detections' in file_path.parts and not 'ssa_detections' in file_path.parts:
temp_link = "https://research.splunk.com" + str(file_path).replace(str(repo_path),"")
pattern = r'(?<=/)[^/]*$'
detection_link = re.sub(pattern, data['id'], temp_link)
detection_link = detection_link.replace("detections","" )
detection_link = detection_link.replace(".com//",".com/" )
updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")

if 'name' in data and 'id' in data and 'detections' in file_path.parts and not 'ssa_detections' in file_path.parts and 'detections/deprecated' not in file_path.parts:

if data['status'] == "production":
temp_link = "https://research.splunk.com" + str(file_path).replace(str(repo_path),"")
pattern = r'(?<=/)[^/]*$'
detection_link = re.sub(pattern, data['id'], temp_link)
detection_link = detection_link.replace("detections","" )
detection_link = detection_link.replace(".com//",".com/" )
updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")

if data['status'] == "deprecated":
temp_link = "https://research.splunk.com" + str(file_path).replace(str(repo_path),"")
pattern = r'(?<=/)[^/]*$'
detection_link = re.sub(pattern, data['id'], temp_link)
detection_link = detection_link.replace("detections","" )
detection_link = detection_link.replace(".com//",".com/" )
updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")

except yaml.YAMLError as exc:
raise Exception(f"Error parsing YAML file for release_notes {file_path}: {str(exc)}")
else:
Expand Down Expand Up @@ -136,11 +147,14 @@ def release_notes(self, config:release_notes) -> None:
macros_modified:List[pathlib.Path] = []
lookups_modified:List[pathlib.Path] = []
playbooks_modified:List[pathlib.Path] = []
detections_deprecated:List[pathlib.Path] = []

for file in modified_files:
file= config.path / file
if 'detections' in file.parts and 'ssa_detections' not in file.parts:
if 'detections' in file.parts and 'ssa_detections' not in file.parts and 'deprecated' not in file.parts:
detections_modified.append(file)
if 'detections' in file.parts and 'ssa_detections' not in file.parts and 'deprecated' in file.parts:
detections_deprecated.append(file)
if 'stories' in file.parts:
stories_modified.append(file)
if 'macros' in file.parts:
Expand Down Expand Up @@ -187,7 +201,8 @@ def release_notes(self, config:release_notes) -> None:
self.create_notes(config.path,lookups_added, header="Lookups Added"),
self.create_notes(config.path,lookups_modified, header="Lookups Updated"),
self.create_notes(config.path,playbooks_added, header="Playbooks Added"),
self.create_notes(config.path,playbooks_modified, header="Playbooks Updated")]
self.create_notes(config.path,playbooks_modified, header="Playbooks Updated"),
self.create_notes(config.path,detections_deprecated, header="Deprecated Analytics")]

#generate and show ba_notes in a different section
ba_notes = [self.create_notes(config.path,ba_detections_added, header="New BA Analytics"),
Expand Down