Skip to content

Commit

Permalink
Merge pull request #668 from prancer-io/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
vatsalgit5118 committed Apr 6, 2023
2 parents 17bb3ac + 4cceaf3 commit 08fe87f
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 6 deletions.
8 changes: 6 additions & 2 deletions docs/docs/extra.css
Expand Up @@ -52,7 +52,7 @@ a:active, a:hover, a:visited {
}

.toctree-l1:hover {
background: #e7f2fa;
background: #08408e;
color: #9d0406;
}

Expand Down Expand Up @@ -104,4 +104,8 @@ a:active, a:hover, a:visited {

.wy-nav-content {
max-width: 100% !important;
}
}

code, .rst-content tt, .rst-content code {
white-space: pre;
}
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -18,7 +18,7 @@
setup(
name='prancer-basic',
# also update the version in processor.__init__.py file
version='3.0.3',
version='3.0.4',
description='Prancer Basic, http://prancer.io/',
long_description=LONG_DESCRIPTION,
license = "BSD",
Expand Down
2 changes: 1 addition & 1 deletion src/processor/__init__.py
@@ -1,3 +1,3 @@
# Prancer Basic

__version__ = '3.0.3'
__version__ = '3.0.4'
9 changes: 8 additions & 1 deletion src/processor/helper/config/config_utils.py
Expand Up @@ -4,6 +4,7 @@
import os
import random
import string
import datetime
import threading
from processor.helper.file.file_utils import exists_file, exists_dir

Expand Down Expand Up @@ -92,7 +93,13 @@ def framework_currentdata():
global CURRENTDATA
if CURRENTDATA:
return CURRENTDATA
CURRENTDATA = '%s/rundata_%d_%s' % (framework_dir(), int(time.time() * 100000), generateid(None))
timestamp_now = int(time.time())
dt_object = datetime.datetime.fromtimestamp(timestamp_now)
path_add = '/%s/%s/%s' %(dt_object.year, dt_object.month, dt_object.day)
full_path = "".join([framework_dir(), "/rundata", path_add])
if not os.path.exists(full_path):
os.makedirs(full_path)
CURRENTDATA = '%s/rundata_%d_%s' % (full_path, (timestamp_now * 100000), generateid(None))
return CURRENTDATA


Expand Down
10 changes: 9 additions & 1 deletion src/processor/logging/log_handler.py
Expand Up @@ -379,7 +379,15 @@ def add_file_logging(fwconfigfile, dbargs):
return
dblogname = os.getenv('DBLOG_NAME', None)
logname = dblogname if dblogname else datetime.datetime.today().strftime('%Y%m%d-%H%M%S')
FWLOGFILENAME = '%s/%s.log' % (log_config['logpath'], logname)
timestamp_now = int(time.time())
dt_object = datetime.datetime.fromtimestamp(timestamp_now)
path_add = '/%s/%s/%s' %(dt_object.year, dt_object.month, dt_object.day)
full_path = "".join([log_config['logpath'], path_add])
if not os.path.exists(full_path):
os.makedirs(full_path)
FWLOGFILENAME = '%s/%s.log' % (full_path, logname)
FWLOGFILENAME = FWLOGFILENAME.replace("//", "/")

if not FWLOGGER:
FWLOGGER = default_logging()
handler = DefaultRoutingFileHandler(
Expand Down
6 changes: 6 additions & 0 deletions src/processor/reporting/json_output.py
Expand Up @@ -45,6 +45,7 @@ def create_output_entry(container, test_file="", filesystem=False):
od["remote_run"] = isremote
dblog = get_dblogger()
od["log"] = dblog if dblog else ""
od["cloud_type"] = ""
if not filesystem:
od["test"] = test_file
od["results"] = []
Expand Down Expand Up @@ -103,7 +104,12 @@ def dump_output_results(results, container, test_file, snapshot, filesystem=True
else:
update_value = {}
if results:
result = results[0]
cloud_tags = result.get("tags", [])
cloud_type = cloud_tags[0].get("cloud", "") if cloud_tags else ""
update_value["$push"] = { "json.results": { "$each" : results }}
if cloud_type:
update_value["$set"]= {"json.cloud_type": cloud_type}

if status:
update_value["$set"] = { "json.status": status }
Expand Down
Expand Up @@ -114,6 +114,7 @@ def create_database_record(self):
"node": self.node,
"snapshotId": self.node['snapshotId'],
"collection": collection.replace('.', '').lower(),
"container" : self.container,
"json": self.processed_template,
"session_id": session_id
# "resourceTypes" : self.node.get("resourceTypes", [])
Expand Down

0 comments on commit 08fe87f

Please sign in to comment.