Skip to content

Commit

Permalink
Merge pull request #1297 from pierotofy/log
Browse files Browse the repository at this point in the history
JSON logging
  • Loading branch information
pierotofy committed Jun 10, 2021
2 parents 035d4cb + a993329 commit c20ac4c
Show file tree
Hide file tree
Showing 16 changed files with 202 additions and 52 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.5.4
2.5.5
9 changes: 7 additions & 2 deletions opendm/dem/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,12 @@ def process_tile(q):

# Create virtual raster
tiles_vrt_path = os.path.abspath(os.path.join(outdir, "tiles.vrt"))
run('gdalbuildvrt "%s" "%s"' % (tiles_vrt_path, '" "'.join(map(lambda t: t['filename'], tiles))))
tiles_file_list = os.path.abspath(os.path.join(outdir, "tiles_list.txt"))
with open(tiles_file_list, 'w') as f:
for t in tiles:
f.write(t['filename'] + '\n')

run('gdalbuildvrt -input_file_list "%s" "%s" ' % (tiles_file_list, tiles_vrt_path))

merged_vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'tiles.tmp.tif'))
Expand Down Expand Up @@ -266,7 +271,7 @@ def process_tile(q):
else:
os.replace(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))

for cleanup_file in [tiles_vrt_path, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
for cleanup_file in [tiles_vrt_path, tiles_file_list, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
if os.path.exists(cleanup_file): os.remove(cleanup_file)
for t in tiles:
if os.path.exists(t['filename']): os.remove(t['filename'])
Expand Down
110 changes: 107 additions & 3 deletions opendm/log.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,14 @@
import sys
import threading
import os
import json
import datetime
import dateutil.parser
import shutil
import multiprocessing

from opendm.loghelpers import double_quote, args_to_dict
from vmem import virtual_memory

if sys.platform == 'win32':
# No colors on Windows, sorry!
Expand All @@ -18,16 +28,100 @@
FAIL = '\033[91m'
ENDC = '\033[0m'

# logging has too many quirks...
lock = threading.Lock()

def odm_version():
with open(os.path.join(os.path.dirname(__file__), "..", "VERSION")) as f:
return f.read().split("\n")[0].strip()

def memory():
mem = virtual_memory()
return {
'total': round(mem.total / 1024 / 1024),
'available': round(mem.available / 1024 / 1024)
}

class ODMLogger:
def __init__(self):
self.show_debug = False
self.json = None
self.json_output_file = None
self.start_time = datetime.datetime.now()

def log(self, startc, msg, level_name):
level = ("[" + level_name + "]").ljust(9)
print("%s%s %s%s" % (startc, level, msg, ENDC))
sys.stdout.flush()
with lock:
print("%s%s %s%s" % (startc, level, msg, ENDC))
sys.stdout.flush()
if self.json is not None:
self.json['stages'][-1]['messages'].append({
'message': msg,
'type': level_name.lower()
})

def init_json_output(self, output_files, args):
self.json_output_files = output_files
self.json_output_file = output_files[0]
self.json = {}
self.json['odmVersion'] = odm_version()
self.json['memory'] = memory()
self.json['cpus'] = multiprocessing.cpu_count()
self.json['images'] = -1
self.json['options'] = args_to_dict(args)
self.json['startTime'] = self.start_time.isoformat()
self.json['stages'] = []
self.json['processes'] = []
self.json['success'] = False

def log_json_stage_run(self, name, start_time):
if self.json is not None:
self.json['stages'].append({
'name': name,
'startTime': start_time.isoformat(),
'messages': [],
})

def log_json_images(self, count):
if self.json is not None:
self.json['images'] = count

def log_json_stage_error(self, error, exit_code, stack_trace = ""):
if self.json is not None:
self.json['error'] = {
'code': exit_code,
'message': error
}
self.json['stackTrace'] = list(map(str.strip, stack_trace.split("\n")))
self._log_json_end_time()

def log_json_success(self):
if self.json is not None:
self.json['success'] = True
self._log_json_end_time()

def log_json_process(self, cmd, exit_code, output = []):
if self.json is not None:
d = {
'command': cmd,
'exitCode': exit_code,
}
if output:
d['output'] = output

self.json['processes'].append(d)

def _log_json_end_time(self):
if self.json is not None:
end_time = datetime.datetime.now()
self.json['endTime'] = end_time.isoformat()
self.json['totalTime'] = round((end_time - self.start_time).total_seconds(), 2)

if self.json['stages']:
last_stage = self.json['stages'][-1]
last_stage['endTime'] = end_time.isoformat()
start_time = dateutil.parser.isoparse(last_stage['startTime'])
last_stage['totalTime'] = round((end_time - start_time).total_seconds(), 2)

def info(self, msg):
self.log(DEFAULT, msg, "INFO")

Expand All @@ -44,6 +138,16 @@ def debug(self, msg):
if self.show_debug:
self.log(OKGREEN, msg, "DEBUG")

def close(self):
if self.json is not None and self.json_output_file is not None:
try:
with open(self.json_output_file, 'w') as f:
f.write(json.dumps(self.json, indent=4))
for f in self.json_output_files[1:]:
shutil.copy(self.json_output_file, f)
except Exception as e:
print("Cannot write log.json: %s" % str(e))

logger = ODMLogger()

ODM_INFO = logger.info
Expand Down
28 changes: 28 additions & 0 deletions opendm/loghelpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from shlex import _find_unsafe

def double_quote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return '""'
if _find_unsafe(s) is None:
return s

# use double quotes, and prefix double quotes with a \
# the string $"b is then quoted as "$\"b"
return '"' + s.replace('"', '\\\"') + '"'

def args_to_dict(args):
args_dict = vars(args)
result = {}
for k in sorted(args_dict.keys()):
# Skip _is_set keys
if k.endswith("_is_set"):
continue

# Don't leak token
if k == 'sm_cluster' and args_dict[k] is not None:
result[k] = True
else:
result[k] = args_dict[k]

return result
3 changes: 2 additions & 1 deletion opendm/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples =
'--pointWeight {pointWeight} '
'--samplesPerNode {samples} '
'--threads {threads} '
'--bType 2 '
'--linearFit '
'{verbose}'.format(**poissonReconArgs))

Expand All @@ -182,7 +183,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples =

system.run('"{reconstructmesh}" -i "{infile}" '
'-o "{outfile}" '
'--remove-spikes 0 --remove-spurious 0 --smooth 0 '
'--remove-spikes 0 --remove-spurious 20 --smooth 0 '
'--target-face-num {max_faces} '.format(**cleanupArgs))

# Delete intermediate results
Expand Down
3 changes: 1 addition & 2 deletions opendm/osfm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,12 @@ def reconstruct(self, rerun=False):

# Check that a reconstruction file has been created
if not self.reconstructed():
log.ODM_ERROR("The program could not process this dataset using the current settings. "
raise system.ExitException("The program could not process this dataset using the current settings. "
"Check that the images have enough overlap, "
"that there are enough recognizable features "
"and that the images are in focus. "
"You could also try to increase the --min-num-features parameter."
"The program will now exit.")
exit(1)


def setup(self, args, images_path, reconstruction, append_config = [], rerun=False):
Expand Down
3 changes: 1 addition & 2 deletions opendm/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,7 @@ def __init__(self, nodeUrl, rerun = False):
log.ODM_WARNING("LRE: The node seems to be offline! We'll still process the dataset, but it's going to run entirely locally.")
self.node_online = False
except Exception as e:
log.ODM_ERROR("LRE: An unexpected problem happened while opening the node connection: %s" % str(e))
exit(1)
raise system.ExitException("LRE: An unexpected problem happened while opening the node connection: %s" % str(e))

def set_projects(self, paths):
self.project_paths = paths
Expand Down
18 changes: 17 additions & 1 deletion opendm/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import subprocess
import string
import signal
import io
from collections import deque

from opendm import context
from opendm import log
Expand All @@ -15,6 +17,9 @@ def __init__(self, msg, errorCode):
super().__init__(msg)
self.errorCode = errorCode

class ExitException(Exception):
pass

def get_ccd_widths():
"""Return the CCD Width of the camera listed in the JSON defs file."""
with open(context.ccd_widths_path) as f:
Expand Down Expand Up @@ -80,9 +85,20 @@ def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}, packages_path
for k in env_vars:
env[k] = str(env_vars[k])

p = subprocess.Popen(cmd, shell=True, env=env, start_new_session=True)
p = subprocess.Popen(cmd, shell=True, env=env, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
running_subprocesses.append(p)
lines = deque()
for line in io.TextIOWrapper(p.stdout):
print(line, end="")

lines.append(line.strip())
if len(lines) == 11:
lines.popleft()

retcode = p.wait()

log.logger.log_json_process(cmd, retcode, list(lines))

running_subprocesses.remove(p)
if retcode < 0:
raise SubprocessException("Child was terminated by signal {}".format(-retcode), -retcode)
Expand Down
4 changes: 3 additions & 1 deletion opendm/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,10 @@ def rerun(self):

def run(self, outputs = {}):
start_time = system.now_raw()
log.ODM_INFO('Running %s stage' % self.name)
log.logger.log_json_stage_run(self.name, start_time)

log.ODM_INFO('Running %s stage' % self.name)

self.process(self.args, outputs)

# The tree variable should always be populated at this point
Expand Down
14 changes: 2 additions & 12 deletions opendm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from opendm import log
from opendm.photo import find_largest_photo_dim
from osgeo import gdal
from shlex import _find_unsafe
from opendm.loghelpers import double_quote

def get_depthmap_resolution(args, photos):
if 'depthmap_resolution_is_set' in args:
Expand Down Expand Up @@ -43,17 +43,6 @@ def get_raster_stats(geotiff):

return stats

def double_quote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return '""'
if _find_unsafe(s) is None:
return s

# use double quotes, and prefix double quotes with a \
# the string $"b is then quoted as "$\"b"
return '"' + s.replace('"', '\\\"') + '"'

def get_processing_results_paths():
return [
"odm_georeferencing",
Expand All @@ -67,6 +56,7 @@ def get_processing_results_paths():
"orthophoto_tiles",
"images.json",
"cameras.json",
"log.json",
]

def copy_paths(paths, destination, rerun):
Expand Down
15 changes: 4 additions & 11 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from opendm import io
from opendm.progress import progressbc
from opendm.utils import double_quote, get_processing_results_paths
from opendm.loghelpers import args_to_dict

import os

Expand All @@ -23,18 +24,10 @@
log.ODM_INFO('Initializing ODM - %s' % system.now())

# Print args
args_dict = vars(args)
args_dict = args_to_dict(args)
log.ODM_INFO('==============')
for k in sorted(args_dict.keys()):
# Skip _is_set keys
if k.endswith("_is_set"):
continue

# Don't leak token
if k == 'sm_cluster' and args_dict[k] is not None:
log.ODM_INFO('%s: True' % k)
else:
log.ODM_INFO('%s: %s' % (k, args_dict[k]))
for k in args_dict.keys():
log.ODM_INFO('%s: %s' % (k, args_dict[k]))
log.ODM_INFO('==============')

progressbc.set_project_name(args.name)
Expand Down
7 changes: 3 additions & 4 deletions stages/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,7 @@ def find_mask(photo_path, masks):
images_database_file = os.path.join(tree.root_path, 'images.json')
if not io.file_exists(images_database_file) or self.rerun():
if not os.path.exists(images_dir):
log.ODM_ERROR("There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s" % (images_dir, args.project_path))
exit(1)
raise system.ExitException("There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s" % (images_dir, args.project_path))

files, rejects = get_images(images_dir)
if files:
Expand Down Expand Up @@ -130,13 +129,13 @@ def find_mask(photo_path, masks):
# Save image database for faster restart
save_images_database(photos, images_database_file)
else:
log.ODM_ERROR('Not enough supported images in %s' % images_dir)
exit(1)
raise system.ExitException('Not enough supported images in %s' % images_dir)
else:
# We have an images database, just load it
photos = load_images_database(images_database_file)

log.ODM_INFO('Found %s usable images' % len(photos))
log.logger.log_json_images(len(photos))

# Create reconstruction object
reconstruction = types.ODM_Reconstruction(photos)
Expand Down
Loading

0 comments on commit c20ac4c

Please sign in to comment.