Skip to content
This repository has been archived by the owner on Feb 3, 2023. It is now read-only.

Commit

Permalink
Merge pull request #341 from guilhemmarchand/testing
Browse files Browse the repository at this point in the history
Version 1.2.48
  • Loading branch information
guilhemmarchand committed Jun 20, 2021
2 parents 78d0ec6 + da09e08 commit c1b0a6a
Show file tree
Hide file tree
Showing 8 changed files with 177 additions and 54 deletions.
23 changes: 23 additions & 0 deletions docs/releasenotes.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,29 @@
Release notes
#############

Version 1.2.48
==============

**CAUTION:**

This is a new main release branch, TrackMe 1.2.x requires the deployment of the following dependencies:

- Semicircle Donut Chart Viz, Splunk Base: https://splunkbase.splunk.com/app/4378
- Splunk Machine Learning Toolkit, Splunk Base: https://splunkbase.splunk.com/app/2890
- Splunk Timeline - Custom Visualization, Splunk Base: https://splunkbase.splunk.com/app/3120
- Splunk SA CIM - Splunk Common Information Model, Splunk Base: https://splunkbase.splunk.com/app/1621

TrackMe requires a summary index (defaults to trackme_summary) and a metric index (defaults to trackme_metrics):
https://trackme.readthedocs.io/en/latest/configuration.html

- Enhancement - Issue #335 - addresses memory overhead of the metric trackers using span=1s by default
- Fix - Issue #336 - Fix - SmartStatus - future tolerance macro is not taken into account by the endpoint
- Fix - Issue #333 - Nav - Wrong search for metric hosts allow list collection
- Fix - Issue #337 - Data sources - Short term tracker run via the UI should use latest=+4h, long term tracker should match savedsearch earliest=-24h latest=-4h
- Fix - Issue #338 - Splunk 8.2 regression in rootUri for UI TrackMe manage drilldowns to macro due to a root URL change in manager
- Fix - Issue #339 - Data sources - Data source overview chart tab should honor the trackme_tstats_main_filter macro
- Change - Nav - remaining whitelist and blocklists terms

Version 1.2.47
==============

Expand Down
4 changes: 2 additions & 2 deletions trackme/app.manifest
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
"id": {
"group": null,
"name": "trackme",
"version": "1.2.47"
"version": "1.2.48"
},
"author": [
{
"name": "Guilhem Marchand",
"email": "guilhem.marchand@gmail.com",
"company": "Octamis"
"company": null
}
],
"releaseDate": null,
Expand Down
79 changes: 70 additions & 9 deletions trackme/bin/trackme_rest_handler_smart_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import json
import re
import datetime, time
import requests

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -69,6 +70,11 @@ def get_ds_smart_status(self, request_info, **kwargs):
namespace='trackme', sessionKey=request_info.session_key, owner='-')
splunkd_port = entity['mgmtHostPort']

# Define an header for requests authenticated communications with splunkd
header = {
'Authorization': 'Splunk %s' % request_info.session_key,
'Content-Type': 'application/json'}

try:

collection_name = "kv_trackme_data_source_monitoring"
Expand All @@ -93,6 +99,21 @@ def get_ds_smart_status(self, request_info, **kwargs):
# Render result
if key is not None and len(key)>2:

# Get the definition for the tolerance of data in the future
record_url = 'https://localhost:' + str(splunkd_port) \
+ '/servicesNS/-/-/admin/macros/trackme_future_indexing_tolerance'

response = requests.get(record_url, headers=header, verify=False)
if response.status_code == 200:
splunk_response = response.text
future_tolerance_match = re.search('name=\"definition\"\>\"{0,1}(\-{0,1}\d*)\"{0,1}\<', splunk_response, re.IGNORECASE)
if future_tolerance_match:
future_tolerance = future_tolerance_match.group(1)
else:
future_tolerance = "-600"
else:
future_tolerance = "-600"

# inititate the smart_code status, we start at 0 then increment using the following rules:
# - TBD

Expand Down Expand Up @@ -301,7 +322,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
searchquery = "| tstats max(_time) as data_last_time_seen "\
+ "where " + str(where_constraint) + " by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\"duration\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . " (event_lag: " . event_lag . ")"'\
Expand All @@ -322,7 +343,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
searchquery = "| tstats max(_time) as data_last_time_seen "\
+ "where (" + str(elastic_source_search_constraint) + ") by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\"duration\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . " (event_lag: " . event_lag . ")"'\
Expand All @@ -343,7 +364,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
searchquery = "search " + str(elastic_source_search_constraint)\
+ " | stats max(_time) as data_last_time_seen by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\"duration\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . " (event_lag: " . event_lag . ")"'\
Expand All @@ -364,7 +385,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
searchquery = "| mstats latest(_value) as value where " + str(elastic_source_search_constraint) + " by host, metric_name span=1s"\
+ " | stats max(_time) as lastTime by metric_name"\
+ " | eval metric_lag=now()-lastTime"\
+ " | where (metric_lag><-600)"\
+ " | where (metric_lag><" + str(future_tolerance) + ")"\
+ " | sort - limit=10 metric_lag"\
+ " | foreach metric_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\"duration\"), round('<<FIELD>>', 0)) ]"\
+ " | fields metric_name, metric_lag"\
Expand All @@ -390,7 +411,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
searchquery = "| from " + str(elastic_source_search_constraint)\
+ " | stats max(_time) as data_last_time_seen by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\"duration\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . " (event_lag: " . event_lag . ")"'\
Expand Down Expand Up @@ -424,7 +445,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
+ "| tstats max(_time) as data_last_time_seen "\
+ "where (" + str(elastic_source_from_part2) + ") by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\\\"duration\\\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . \\" (event_lag: \\" . event_lag . \\")\\"'\
Expand All @@ -447,7 +468,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
+ " search " + str(elastic_source_from_part2)\
+ " | stats max(_time) as data_last_time_seen by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\\\"duration\\\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . \\" (event_lag: \\" . event_lag . \\")\\"'\
Expand All @@ -474,7 +495,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
+ " | from " + str(elastic_source_search_constraint)\
+ " | stats max(_time) as data_last_time_seen by host"\
+ " | eval event_lag=now()-data_last_time_seen"\
+ " | where (event_lag<-600)"\
+ " | where (event_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 event_lag"\
+ " | foreach event_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\\\"duration\\\"), round('<<FIELD>>', 0)) ]"\
+ ' | eval summary = host . \\" (event_lag: \\" . event_lag . \\")"'\
Expand Down Expand Up @@ -510,7 +531,7 @@ def get_ds_smart_status(self, request_info, **kwargs):
+ " | mstats latest(_value) as value where " + str(elastic_source_from_part2) + " by host, metric_name span=1s"\
+ " | stats max(_time) as lastTime by metric_name"\
+ " | eval metric_lag=now()-lastTime"\
+ " | where (metric_lag<-600)"\
+ " | where (metric_lag<" + str(future_tolerance) + ")"\
+ " | sort - limit=10 metric_lag"\
+ " | foreach metric_lag [ eval <<FIELD>> = if('<<FIELD>>'>60, tostring(round('<<FIELD>>',0),\\\"duration\\\"), round('<<FIELD>>', 0)) ]"\
+ " | fields metric_name, metric_lag"\
Expand Down Expand Up @@ -1843,6 +1864,11 @@ def get_dh_smart_status(self, request_info, **kwargs):
namespace='trackme', sessionKey=request_info.session_key, owner='-')
splunkd_port = entity['mgmtHostPort']

# Define an header for requests authenticated communications with splunkd
header = {
'Authorization': 'Splunk %s' % request_info.session_key,
'Content-Type': 'application/json'}

try:

collection_name = "kv_trackme_host_monitoring"
Expand All @@ -1867,6 +1893,21 @@ def get_dh_smart_status(self, request_info, **kwargs):
# Render result
if key is not None and len(key)>2:

# Get the definition for the tolerance of data in the future
record_url = 'https://localhost:' + str(splunkd_port) \
+ '/servicesNS/-/-/admin/macros/trackme_future_indexing_tolerance'

response = requests.get(record_url, headers=header, verify=False)
if response.status_code == 200:
splunk_response = response.text
future_tolerance_match = re.search('name=\"definition\"\>\"{0,1}(\-{0,1}\d*)\"{0,1}\<', splunk_response, re.IGNORECASE)
if future_tolerance_match:
future_tolerance = future_tolerance_match.group(1)
else:
future_tolerance = "-600"
else:
future_tolerance = "-600"

# inititate the smart_code status, we start at 0 then increment using the following rules:
# - TBD

Expand Down Expand Up @@ -2343,6 +2384,11 @@ def get_mh_smart_status(self, request_info, **kwargs):
namespace='trackme', sessionKey=request_info.session_key, owner='-')
splunkd_port = entity['mgmtHostPort']

# Define an header for requests authenticated communications with splunkd
header = {
'Authorization': 'Splunk %s' % request_info.session_key,
'Content-Type': 'application/json'}

try:

collection_name = "kv_trackme_metric_host_monitoring"
Expand All @@ -2367,6 +2413,21 @@ def get_mh_smart_status(self, request_info, **kwargs):
# Render result
if key is not None and len(key)>2:

# Get the definition for the tolerance of data in the future
record_url = 'https://localhost:' + str(splunkd_port) \
+ '/servicesNS/-/-/admin/macros/trackme_future_indexing_tolerance'

response = requests.get(record_url, headers=header, verify=False)
if response.status_code == 200:
splunk_response = response.text
future_tolerance_match = re.search('name=\"definition\"\>\"{0,1}(\-{0,1}\d*)\"{0,1}\<', splunk_response, re.IGNORECASE)
if future_tolerance_match:
future_tolerance = future_tolerance_match.group(1)
else:
future_tolerance = "-600"
else:
future_tolerance = "-600"

# inititate the smart_code status, we start at 0 then increment using the following rules:
# - TBD

Expand Down
2 changes: 1 addition & 1 deletion trackme/default/app.conf
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ label = TrackMe
[launcher]
author = Guilhem Marchand
description = Data tracking system for Splunk
version = 1.2.47
version = 1.2.48
14 changes: 7 additions & 7 deletions trackme/default/data/ui/html/TrackMe.html
Original file line number Diff line number Diff line change
Expand Up @@ -19661,13 +19661,13 @@ <h2 class="panel-title">Click on a table row to access object contextual actions
setToken("tk_cribl_pipe", TokenUtils.replaceTokenNames(cribl_pipe, _.extend(submittedTokenModel.toJSON(), e.data)));

// define the root search
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" cribl_pipe::" + cribl_pipe + " by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" cribl_pipe::" + cribl_pipe + " `trackme_tstats_main_filter` by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
tk_data_source_raw_search = "null";
setToken("tk_data_source_timechart_count_aggreg", TokenUtils.replaceTokenNames("sum", _.extend(submittedTokenModel.toJSON(), e.data)));

}
else {
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" `trackme_tstats_main_filter` by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
tk_data_source_raw_search = "null";
setToken("tk_data_source_timechart_count_aggreg", TokenUtils.replaceTokenNames("sum", _.extend(submittedTokenModel.toJSON(), e.data)));
}
Expand Down Expand Up @@ -19766,7 +19766,7 @@ <h2 class="panel-title">Click on a table row to access object contextual actions

else
{
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
tk_data_source_overview_root_search = "| `trackme_tstats` dc(host) as dcount_host count latest(_indextime) as indextime max(_time) as maxtime where index=\"" + tk_data_index + "\" sourcetype=\"" + tk_data_sourcetype + "\" `trackme_tstats_main_filter` by _time, index, sourcetype span=1s | eval delta=(indextime-_time), event_lag=(now() - maxtime)";
setToken("tk_data_source_timechart_count_aggreg", TokenUtils.replaceTokenNames("sum", _.extend(submittedTokenModel.toJSON(), e.data)));
}

Expand Down Expand Up @@ -32408,7 +32408,7 @@ <h2 class="panel-title">Click on a table row to access object contextual actions
// Set the search parameters--specify a time range
var searchParams = {
earliest_time: "-4h",
latest_time: "now"
latest_time: "+4h"
};

// Run a normal search that immediately returns the job's SID
Expand Down Expand Up @@ -32496,7 +32496,7 @@ <h2 class="panel-title">Click on a table row to access object contextual actions
// Set the search parameters--specify a time range
var searchParams = {
earliest_time: "-4h",
latest_time: "now"
latest_time: "+4h"
};

// Run a normal search that immediately returns the job's SID
Expand Down Expand Up @@ -32585,8 +32585,8 @@ <h2 class="panel-title">Click on a table row to access object contextual actions

// Set the search parameters--specify a time range
var searchParams = {
earliest_time: "-7d",
latest_time: "+4h"
earliest_time: "-24h",
latest_time: "-4h"
};

// Run a normal search that immediately returns the job's SID
Expand Down

0 comments on commit c1b0a6a

Please sign in to comment.