Skip to content

Commit

Permalink
Merge pull request #585 from happycube/chad-12.20.20
Browse files Browse the repository at this point in the history
Logging changes, mostly
  • Loading branch information
happycube authored Dec 26, 2020
2 parents ecb2b78 + 4dd6dfa commit 89e206f
Show file tree
Hide file tree
Showing 8 changed files with 241 additions and 69 deletions.
56 changes: 20 additions & 36 deletions ld-decode
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,13 @@ import argparse
import json
import traceback

from multiprocessing import Process, Pool, Queue, JoinableQueue, Pipe
import threading
import queue

from lddecode.core import *
from lddecode.utils import *
from lddecode.utils_logging import init_logging

options_epilog = """FREQ can be a bare number in MHz, or a number with one of the case-insensitive suffixes Hz, kHz, MHz, GHz, fSC (meaning NTSC) or fSCPAL."""
parser = argparse.ArgumentParser(description='Extracts audio and video from raw RF laserdisc captures', epilog=options_epilog)
Expand Down Expand Up @@ -83,34 +88,13 @@ except ValueError as e:

system = 'PAL' if args.pal else 'NTSC'

# Configure logger to write to a file
# Modified from https://docs.python.org/3.8/howto/logging-cookbook.html#logging-cookbook

logger = logging.getLogger('lddecode')
logger.setLevel(logging.DEBUG)

# Delete old logfile if it exists
try:
os.unlink(outname + '.log')
except:
pass

logger_file = logging.FileHandler(outname + '.log')
logger_file.setLevel(logging.DEBUG)

logger_fileformatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger_file.setFormatter(logger_fileformatter)

logger_stderr = logging.StreamHandler()
logger_stderr.setLevel(logging.INFO)

logger.addHandler(logger_stderr)
logger.addHandler(logger_file)

# Wrap the LDdecode creation so that the signal handler is not taken by sub-threads,
# allowing SIGINT/control-C's to be handled cleanly
original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)

logger = init_logging(outname + '.log')
ldd = LDdecode(filename, outname, loader, logger, est_frames=req_frames, analog_audio = 0 if args.daa else 44.100, digital_audio = not args.noefm, system=system, doDOD = not args.nodod, threads=args.threads, extra_options=extra_options)

signal.signal(signal.SIGINT, original_sigint_handler)

if args.start_fileloc != -1:
Expand Down Expand Up @@ -149,35 +133,35 @@ if args.verboseVITS:

done = False

jsondumper = jsondump_thread(ldd, outname)

def cleanup():
jsondumper.put(ldd.build_json(ldd.curfield))
#logger.flush()
ldd.close()
jsondumper.put(None)

while not done and ldd.fields_written < (req_frames * 2):
try:
f = ldd.readfield()
except KeyboardInterrupt as kbd:
print("\nTerminated, saving JSON and exiting", file=sys.stderr)
write_json(ldd, outname)
ldd.close()
cleanup()
exit(1)
except Exception as err:
print("\nERROR - please paste the following into a bug report:", file=sys.stderr)
print("current sample:", ldd.fdoffset, file=sys.stderr)
print("arguments:", args, file=sys.stderr)
print("Exception:", err, " Traceback:", file=sys.stderr)
traceback.print_tb(err.__traceback__)
write_json(ldd, outname)
ldd.close()
logger_file.flush()
cleanup()
exit(1)

if f is None or (args.ignoreleadout == False and ldd.leadOut == True):
done = True

# print(ldd.fields_written)

if ldd.fields_written < 100 or ((ldd.fields_written % 500) == 0):
#print('write json')
write_json(ldd, outname)
jsondumper.put(ldd.build_json(ldd.curfield))

print("\nCompleted: saving JSON and exiting", file=sys.stderr)
write_json(ldd, outname)
logger_file.flush()
ldd.close()
cleanup()
2 changes: 2 additions & 0 deletions ld-ldf-reader.c
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,8 @@ int main (int argc, char **argv)
}

fprintf(stderr, "RATE:%d\n", audio_dec_ctx->sample_rate);
// From fmt_ctx, this is the approximate length in ms. (divide by 1000 for actual time)
fprintf(stderr, "DURATION:%ld\n", fmt_ctx->duration);

frame = av_frame_alloc();
if (!frame) {
Expand Down
3 changes: 2 additions & 1 deletion lddecode/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
'efm_pll',
'fdls',
'fft8',
'plot_utils',
'utils',
'utils_logging',
'utils_plotting',
]
27 changes: 16 additions & 11 deletions lddecode/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,6 +529,9 @@ def computeaudiofilters(self):
SP = self.SysParams
DP = self.DecoderParams

# Low pass filter for 'new' audio code


# first stage audio filters
if self.freq >= 32:
audio_fdiv1 = 32 # this is good for 40mhz - 16 should be ideal for 28mhz
Expand Down Expand Up @@ -922,7 +925,8 @@ def end(self):
def __del__(self):
self.end()

def flush(self):
def prune_cache(self):
''' Prune the LRU cache. Typically run when a new field is loaded '''
if len(self.lru) < self.lrusize:
return

Expand All @@ -934,7 +938,8 @@ def flush(self):

self.lru = self.lru[:self.lrusize]

def flushvideo(self):
def flush_demod(self):
''' Flush all demodulation data. This is called by the field class after calibration (i.e. MTF) is determined to be off '''
for k in self.blocks.keys():
if self.blocks[k] is None:
pass
Expand Down Expand Up @@ -1089,7 +1094,7 @@ def read(self, begin, length, MTF=0, dodemod=True):
raw.append(self.blocks[-1]['rawinput'][:end % self.blocksize])

rv = np.concatenate(raw)
self.flush()
self.prune_cache()
return rv

while need_blocks is not None and len(need_blocks):
Expand All @@ -1108,7 +1113,7 @@ def read(self, begin, length, MTF=0, dodemod=True):
elif k in self.blocks[b]:
t[k].append(self.blocks[b][k])

self.flush()
self.prune_cache()

rv = {}
for k in t.keys():
Expand All @@ -1125,12 +1130,14 @@ def read(self, begin, length, MTF=0, dodemod=True):
return rv

def setparams(self, params):
# XXX: This should flush out the data, but right now this isn't used at all
for p in self.threadpipes:
p[0].send(('NEWPARAMS', params))

# Apply params to the core thread, so they match up with the decoders
self.apply_newparams(params)


# Downscales to 16bit/44.1khz. It might be nice when analog audio is better to support 24/96,
# but if we only support one output type, matching CD audio/digital sound is greatly preferable.
def downscale_audio(audio, lineinfo, rf, linecount, timeoffset = 0, freq = 48000.0, scale=64):
Expand Down Expand Up @@ -2695,7 +2702,8 @@ class LDdecode:

def __init__(self, fname_in, fname_out, freader, _logger, est_frames = None, analog_audio = 0, digital_audio = False, system = 'NTSC', doDOD = True, threads=4, extra_options = {}):
global logger
logger = _logger
self.logger = _logger
logger = self.logger
self.demodcache = None

self.branch, self.commit = get_git_info()
Expand Down Expand Up @@ -2780,8 +2788,7 @@ def __init__(self, fname_in, fname_out, freader, _logger, est_frames = None, ana
self.frameNumber = None

self.autoMTF = True
# Python 3.6 doesn't support .get with default=
self.useAGC = extra_options['useAGC'] if 'useAGC' in extra_options else True
self.useAGC = extra_options.get('useAGC', True)

self.verboseVITS = False

Expand Down Expand Up @@ -2951,7 +2958,7 @@ def readfield(self, initphase = False):
self.rf.SysParams['hz_ire'] = (sync_hz - ire0_hz) / self.rf.SysParams['vsync_ire']

if adjusted == False and redo == True:
self.demodcache.flushvideo()
self.demodcache.flush_demod()
adjusted = True
self.fdoffset -= offset
else:
Expand Down Expand Up @@ -3278,9 +3285,7 @@ def buildmetadata(self, f):
if special is not None:
outstr += special

print(outstr, file=sys.stderr, end='\r')
logger.debug(outstr)
sys.stderr.flush()
self.logger.status(outstr)

# Prepare JSON fields
if self.frameNumber is not None:
Expand Down
56 changes: 53 additions & 3 deletions lddecode/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# A collection of helper functions used in dev notebooks and lddecode_core.py

import atexit
from base64 import b64encode
from collections import namedtuple
import copy
Expand All @@ -12,6 +13,10 @@
import sys
import subprocess

from multiprocessing import Process, Pool, Queue, JoinableQueue, Pipe
import threading
import queue

from numba import jit, njit

# standard numeric/scientific libraries
Expand Down Expand Up @@ -274,7 +279,7 @@ def _read_data(self, count):

return data

def __call__(self, infile, sample, readlen):
def read(self, infile, sample, readlen):
sample_bytes = sample * 2
readlen_bytes = readlen * 2

Expand Down Expand Up @@ -317,8 +322,11 @@ def __call__(self, infile, sample, readlen):
assert len(data) == readlen * 2
return np.fromstring(data, '<i2')

def __call__(self, infile, sample, readlen):
return self.read(infile, sample, readlen)

class LoadLDF:
"""Load samples from a wide variety of formats using ffmpeg."""
"""Load samples from an .ldf file, using ld-ldf-reader which itself uses ffmpeg."""

def __init__(self, filename, input_args=[], output_args=[]):
self.input_args = input_args
Expand Down Expand Up @@ -377,7 +385,7 @@ def _open(self, sample):

return ldfreader

def __call__(self, infile, sample, readlen):
def read(self, infile, sample, readlen):
sample_bytes = sample * 2
readlen_bytes = readlen * 2

Expand Down Expand Up @@ -420,6 +428,9 @@ def __call__(self, infile, sample, readlen):
assert len(data) == readlen * 2
return np.frombuffer(data, '<i2')

def __call__(self, infile, sample, readlen):
return self.read(infile, sample, readlen)

# Git helpers

def get_git_info():
Expand Down Expand Up @@ -781,5 +792,44 @@ def write_json(ldd, outname):

os.rename(outname + '.tbc.json.tmp', outname + '.tbc.json')

# Write the .tbc.json file (used by lddecode and notebooks)
def write_json(ldd, jsondict, outname):

fp = open(outname + '.tbc.json.tmp', 'w')
json.dump(jsondict, fp, indent=4 if ldd.verboseVITS else None)
fp.write('\n')
fp.close()

os.rename(outname + '.tbc.json.tmp', outname + '.tbc.json')

def jsondump_thread(ldd, outname):
'''
This creates a background thread to write a json dict to a file.
Probably had a bit too much fun here - this returns a queue that is
fed into a thread created by the function itself. Feed it json
dictionaries during runtime and None when done.
'''

def consume(q):
while True:
jsondict = q.get()

if jsondict is None:
q.task_done()
return

write_json(ldd, jsondict, outname)

q.task_done()

q = JoinableQueue()

# Start the self-contained thread
t = threading.Thread(target=consume, args=(q, ))
t.start()

return q

if __name__ == "__main__":
print("Nothing to see here, move along ;)")
Loading

0 comments on commit 89e206f

Please sign in to comment.