forked from Unidata/MetPy
/
nexrad.py
2424 lines (2121 loc) · 114 KB
/
nexrad.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright (c) 2009,2015,2016,2017 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Support reading information from various NEXRAD formats."""
import bz2
from collections import defaultdict, namedtuple, OrderedDict
import contextlib
import datetime
import logging
import pathlib
import re
import struct
from struct import Struct
from xdrlib import Unpacker
import numpy as np
from scipy.constants import day, milli
from ._tools import (Array, BitField, Bits, DictStruct, Enum, IOBuffer, NamedStruct,
open_as_needed, zlib_decompress_all_frames)
from ..package_tools import Exporter
exporter = Exporter(globals())
log = logging.getLogger(__name__)
def version(val):
"""Calculate a string version from an integer value."""
if val / 100. > 2.:
ver = val / 100.
else:
ver = val / 10.
return f'{ver:.1f}'
def scaler(scale):
"""Create a function that scales by a specific value."""
def inner(val):
return val * scale
return inner
def angle(val):
"""Convert an integer value to a floating point angle."""
return val * 360. / 2**16
def az_rate(val):
"""Convert an integer value to a floating point angular rate."""
return val * 90. / 2**16
def bzip_blocks_decompress_all(data):
"""Decompress all of the bzip2-ed blocks.
Returns the decompressed data as a `bytearray`.
"""
frames = bytearray()
offset = 0
while offset < len(data):
size_bytes = data[offset:offset + 4]
offset += 4
block_cmp_bytes = abs(Struct('>l').unpack(size_bytes)[0])
try:
frames.extend(bz2.decompress(data[offset:offset + block_cmp_bytes]))
offset += block_cmp_bytes
except OSError:
# If we've decompressed any frames, this is an error mid-stream, so warn, stop
# trying to decompress and let processing proceed
if frames:
logging.warning('Error decompressing bz2 block stream at offset: %d',
offset - 4)
break
else: # Otherwise, this isn't a bzip2 stream, so bail
raise ValueError('Not a bz2 stream.')
return frames
def nexrad_to_datetime(julian_date, ms_midnight):
"""Convert NEXRAD date time format to python `datetime.datetime`."""
# Subtracting one from julian_date is because epoch date is 1
return datetime.datetime.utcfromtimestamp((julian_date - 1) * day + ms_midnight * milli)
def remap_status(val):
"""Convert status integer value to appropriate bitmask."""
status = 0
bad = BAD_DATA if val & 0xF0 else 0
val &= 0x0F
if val == 0:
status = START_ELEVATION
elif val == 1:
status = 0
elif val == 2:
status = END_ELEVATION
elif val == 3:
status = START_ELEVATION | START_VOLUME
elif val == 4:
status = END_ELEVATION | END_VOLUME
elif val == 5:
status = START_ELEVATION | LAST_ELEVATION
return status | bad
START_ELEVATION = 0x1
END_ELEVATION = 0x2
START_VOLUME = 0x4
END_VOLUME = 0x8
LAST_ELEVATION = 0x10
BAD_DATA = 0x20
@exporter.export
class Level2File:
r"""Handle reading the NEXRAD Level 2 data and its various messages.
This class attempts to decode every byte that is in a given data file.
It supports both external compression, as well as the internal BZ2
compression that is used.
Attributes
----------
stid : str
The ID of the radar station
dt : Datetime instance
The date and time of the data
vol_hdr : namedtuple
The unpacked volume header
sweeps : list of tuples
Data for each of the sweeps found in the file
rda_status : namedtuple, optional
Unpacked RDA status information, if found
maintenance_data : namedtuple, optional
Unpacked maintenance data information, if found
maintenance_data_desc : dict, optional
Descriptions of maintenance data fields, if maintenance data present
vcp_info : namedtuple, optional
Unpacked VCP information, if found
clutter_filter_bypass_map : dict, optional
Unpacked clutter filter bypass map, if present
rda : dict, optional
Unpacked RDA adaptation data, if present
rda_adaptation_desc : dict, optional
Descriptions of RDA adaptation data, if adaptation data present
Notes
-----
The internal data structure that things are decoded into is still to be
determined.
"""
# Number of bytes
AR2_BLOCKSIZE = 2432 # 12 (CTM) + 2416 (Msg hdr + data) + 4 (FCS)
CTM_HEADER_SIZE = 12
MISSING = float('nan')
RANGE_FOLD = float('nan') # TODO: Need to separate from missing
def __init__(self, filename, *, has_volume_header=True):
r"""Create instance of `Level2File`.
Parameters
----------
filename : str or file-like object
If str, the name of the file to be opened. Gzip-ed files are
recognized with the extension '.gz', as are bzip2-ed files with
the extension `.bz2` If `filename` is a file-like object,
this will be read from directly.
"""
fobj = open_as_needed(filename)
with contextlib.closing(fobj):
self._buffer = IOBuffer.fromfile(fobj)
# Try to read the volume header. If this fails, or we're told we don't have one
# then we fall back and try to just read messages, assuming we have e.g. one of
# the real-time chunks.
try:
if has_volume_header:
self._read_volume_header()
except ValueError:
log.error('testing')
log.warning('Unable to read volume header. Attempting to read messages.')
self._buffer.reset()
# See if we need to apply bz2 decompression
start = self._buffer.set_mark()
try:
self._buffer = IOBuffer(self._buffer.read_func(bzip_blocks_decompress_all))
except ValueError:
self._buffer.jump_to(start)
# Now we're all initialized, we can proceed with reading in data
self._read_data()
vol_hdr_fmt = NamedStruct([('version', '9s'), ('vol_num', '3s'),
('date', 'L'), ('time_ms', 'L'), ('stid', '4s')], '>', 'VolHdr')
def _read_volume_header(self):
self.vol_hdr = self._buffer.read_struct(self.vol_hdr_fmt)
self.dt = nexrad_to_datetime(self.vol_hdr.date, self.vol_hdr.time_ms)
self.stid = self.vol_hdr.stid
msg_hdr_fmt = NamedStruct([('size_hw', 'H'),
('rda_channel', 'B', BitField('Redundant Channel 1',
'Redundant Channel 2',
None, 'ORDA')),
('msg_type', 'B'), ('seq_num', 'H'), ('date', 'H'),
('time_ms', 'I'), ('num_segments', 'H'), ('segment_num', 'H')],
'>', 'MsgHdr')
def _read_data(self):
self._msg_buf = {}
self.sweeps = []
self.rda_status = []
while not self._buffer.at_end():
# Clear old file book marks and set the start of message for
# easy jumping to the end
self._buffer.clear_marks()
msg_start = self._buffer.set_mark()
# Skip CTM
self._buffer.skip(self.CTM_HEADER_SIZE)
# Read the message header
msg_hdr = self._buffer.read_struct(self.msg_hdr_fmt)
log.debug('Got message: %s (at offset %d)', str(msg_hdr), self._buffer._offset)
# The AR2_BLOCKSIZE accounts for the CTM header before the
# data, as well as the Frame Check Sequence (4 bytes) after
# the end of the data.
msg_bytes = self.AR2_BLOCKSIZE
# If the size is 0, this is just padding, which is for certain
# done in the metadata messages. Let the default block size handle rather
# than any specific heuristic to skip.
if msg_hdr.size_hw:
# For new packets, the message size isn't on the fixed size boundaries,
# so we use header to figure out. For these, we need to include the
# CTM header but not FCS, in addition to the size.
# As of 2620002P, this is a special value used to indicate that the segment
# number/count bytes are used to indicate total size in bytes.
if msg_hdr.size_hw == 65535:
msg_bytes = (msg_hdr.num_segments << 16 | msg_hdr.segment_num
+ self.CTM_HEADER_SIZE)
elif msg_hdr.msg_type in (29, 31):
msg_bytes = self.CTM_HEADER_SIZE + 2 * msg_hdr.size_hw
log.debug('Total message size: %d', msg_bytes)
# Try to handle the message. If we don't handle it, skipping
# past it is handled at the end anyway.
decoder = f'_decode_msg{msg_hdr.msg_type:d}'
if hasattr(self, decoder):
getattr(self, decoder)(msg_hdr)
else:
log.warning('Unknown message: %d', msg_hdr.msg_type)
# Jump to the start of the next message. This depends on whether
# the message was legacy with fixed block size or not.
self._buffer.jump_to(msg_start, msg_bytes)
# Check if we have any message segments still in the buffer
if self._msg_buf:
log.warning('Remaining buffered messages segments for message type(s): %s',
' '.join(map(str, self._msg_buf)))
del self._msg_buf
msg1_fmt = NamedStruct([('time_ms', 'L'), ('date', 'H'),
('unamb_range', 'H', scaler(0.1)), ('az_angle', 'H', angle),
('az_num', 'H'), ('rad_status', 'H', remap_status),
('el_angle', 'H', angle), ('el_num', 'H'),
('surv_first_gate', 'h', scaler(0.001)),
('doppler_first_gate', 'h', scaler(0.001)),
('surv_gate_width', 'H', scaler(0.001)),
('doppler_gate_width', 'H', scaler(0.001)),
('surv_num_gates', 'H'), ('doppler_num_gates', 'H'),
('cut_sector_num', 'H'), ('calib_dbz0', 'f'),
('ref_offset', 'H'), ('vel_offset', 'H'), ('sw_offset', 'H'),
('dop_res', 'H', BitField(None, 0.5, 1.0)), ('vcp', 'H'),
(None, '14x'), ('nyq_vel', 'H', scaler(0.01)),
('atmos_atten', 'H', scaler(0.001)), ('tover', 'H', scaler(0.1)),
('spot_blanking', 'B', BitField('Radial', 'Elevation', 'Volume')),
(None, '32x')], '>', 'Msg1Fmt')
msg1_data_hdr = namedtuple('Msg1DataHdr',
'name first_gate gate_width num_gates scale offset')
def _decode_msg1(self, msg_hdr):
msg_start = self._buffer.set_mark()
hdr = self._buffer.read_struct(self.msg1_fmt)
data_dict = {}
# Process all data pointers:
read_info = []
if hdr.surv_num_gates and hdr.ref_offset:
read_info.append((hdr.ref_offset,
self.msg1_data_hdr('REF', hdr.surv_first_gate,
hdr.surv_gate_width,
hdr.surv_num_gates, 2.0, 66.0)))
if hdr.vel_offset:
read_info.append((hdr.vel_offset,
self.msg1_data_hdr('VEL', hdr.doppler_first_gate,
hdr.doppler_gate_width,
hdr.doppler_num_gates,
1. / hdr.dop_res, 129.0)))
if hdr.sw_offset:
read_info.append((hdr.sw_offset,
self.msg1_data_hdr('SW', hdr.doppler_first_gate,
hdr.doppler_gate_width,
hdr.doppler_num_gates, 2.0, 129.0)))
for ptr, data_hdr in read_info:
# Jump and read
self._buffer.jump_to(msg_start, ptr)
vals = self._buffer.read_array(data_hdr.num_gates, 'B')
# Scale and flag data
scaled_vals = (vals - data_hdr.offset) / data_hdr.scale
scaled_vals[vals == 0] = self.MISSING
scaled_vals[vals == 1] = self.RANGE_FOLD
# Store
data_dict[data_hdr.name] = (data_hdr, scaled_vals)
self._add_sweep(hdr)
self.sweeps[-1].append((hdr, data_dict))
msg2_fmt = NamedStruct([
('rda_status', 'H', BitField('None', 'Start-Up', 'Standby', 'Restart',
'Operate', 'Spare', 'Off-line Operate')),
('op_status', 'H', BitField('Disabled', 'On-Line',
'Maintenance Action Required',
'Maintenance Action Mandatory',
'Commanded Shut Down', 'Inoperable',
'Automatic Calibration')),
('control_status', 'H', BitField('None', 'Local Only',
'RPG (Remote) Only', 'Either')),
('aux_power_gen_state', 'H', BitField('Switch to Aux Power',
'Utility PWR Available',
'Generator On',
'Transfer Switch Manual',
'Commanded Switchover')),
('avg_tx_pwr', 'H'), ('ref_calib_cor', 'h', scaler(0.01)),
('data_transmission_enabled', 'H', BitField('None', 'None',
'Reflectivity', 'Velocity', 'Width')),
('vcp_num', 'h'), ('rda_control_auth', 'H', BitField('No Action',
'Local Control Requested',
'Remote Control Enabled')),
('rda_build', 'H', version), ('op_mode', 'H', BitField('None', 'Test',
'Operational', 'Maintenance')),
('super_res_status', 'H', BitField('None', 'Enabled', 'Disabled')),
('cmd_status', 'H', Bits(6)),
('avset_status', 'H', BitField('None', 'Enabled', 'Disabled')),
('rda_alarm_status', 'H', BitField('No Alarms', 'Tower/Utilities',
'Pedestal', 'Transmitter', 'Receiver',
'RDA Control', 'Communication',
'Signal Processor')),
('command_acknowledge', 'H', BitField('Remote VCP Received',
'Clutter Bypass map received',
'Redundant Chan Ctrl Cmd received')),
('channel_control_status', 'H'),
('spot_blanking', 'H', BitField('Enabled', 'Disabled')),
('bypass_map_gen_date', 'H'), ('bypass_map_gen_time', 'H'),
('clutter_filter_map_gen_date', 'H'), ('clutter_filter_map_gen_time', 'H'),
('refv_calib_cor', 'h', scaler(0.01)),
('transition_pwr_src_state', 'H', BitField('Off', 'OK')),
('RMS_control_status', 'H', BitField('RMS in control', 'RDA in control')),
# See Table IV-A for definition of alarms
(None, '2x'), ('alarms', '28s', Array('>14H'))], '>', 'Msg2Fmt')
msg2_additional_fmt = NamedStruct([
('sig_proc_options', 'H', BitField('CMD RhoHV Test')),
(None, '36x'), ('status_version', 'H')], '>', 'Msg2AdditionalFmt')
def _decode_msg2(self, msg_hdr):
msg_start = self._buffer.set_mark()
self.rda_status.append(self._buffer.read_struct(self.msg2_fmt))
remaining = (msg_hdr.size_hw * 2 - self.msg_hdr_fmt.size
- self._buffer.offset_from(msg_start))
# RDA Build 18.0 expanded the size
if remaining >= self.msg2_additional_fmt.size:
self.rda_status.append(self._buffer.read_struct(self.msg2_additional_fmt))
remaining -= self.msg2_additional_fmt.size
if remaining:
log.info('Padding detected in message 2. Length encoded as %d but offset when '
'done is %d', 2 * msg_hdr.size_hw, self._buffer.offset_from(msg_start))
def _decode_msg3(self, msg_hdr):
from ._nexrad_msgs.msg3 import descriptions, fields
self.maintenance_data_desc = descriptions
msg_fmt = DictStruct(fields, '>')
self.maintenance_data = self._buffer.read_struct(msg_fmt)
self._check_size(msg_hdr, msg_fmt.size)
vcp_fmt = NamedStruct([('size_hw', 'H'), ('pattern_type', 'H'),
('num', 'H'), ('num_el_cuts', 'H'),
('vcp_version', 'B'), ('clutter_map_group', 'B'),
('dop_res', 'B', BitField(None, 0.5, 1.0)),
('pulse_width', 'B', BitField('None', 'Short', 'Long')),
(None, '4x'), ('vcp_sequencing', 'H'),
('vcp_supplemental_info', 'H'), (None, '2x'),
('els', None)], '>', 'VCPFmt')
vcp_el_fmt = NamedStruct([('el_angle', 'H', angle),
('channel_config', 'B', Enum('Constant Phase', 'Random Phase',
'SZ2 Phase')),
('waveform', 'B', Enum('None', 'Contiguous Surveillance',
'Contig. Doppler with Ambiguity Res.',
'Contig. Doppler without Ambiguity Res.',
'Batch', 'Staggered Pulse Pair')),
('super_res', 'B', BitField('0.5 azimuth and 0.25km range res.',
'Doppler to 300km',
'Dual Polarization Control',
'Dual Polarization to 300km')),
('surv_prf_num', 'B'), ('surv_pulse_count', 'H'),
('az_rate', 'h', az_rate),
('ref_thresh', 'h', scaler(0.125)),
('vel_thresh', 'h', scaler(0.125)),
('sw_thresh', 'h', scaler(0.125)),
('zdr_thresh', 'h', scaler(0.125)),
('phidp_thresh', 'h', scaler(0.125)),
('rhohv_thresh', 'h', scaler(0.125)),
('sector1_edge', 'H', angle),
('sector1_doppler_prf_num', 'H'),
('sector1_pulse_count', 'H'), ('supplemental_data', 'H'),
('sector2_edge', 'H', angle),
('sector2_doppler_prf_num', 'H'),
('sector2_pulse_count', 'H'), ('ebc_angle', 'H', angle),
('sector3_edge', 'H', angle),
('sector3_doppler_prf_num', 'H'),
('sector3_pulse_count', 'H'), (None, '2x')], '>', 'VCPEl')
def _decode_msg5(self, msg_hdr):
vcp_info = self._buffer.read_struct(self.vcp_fmt)
els = [self._buffer.read_struct(self.vcp_el_fmt) for _ in range(vcp_info.num_el_cuts)]
self.vcp_info = vcp_info._replace(els=els)
self._check_size(msg_hdr,
self.vcp_fmt.size + vcp_info.num_el_cuts * self.vcp_el_fmt.size)
def _decode_msg13(self, msg_hdr):
data = self._buffer_segment(msg_hdr)
if data:
data = list(Struct('>{:d}h'.format(len(data) // 2)).unpack(data))
bmap = {}
date, time, num_el = data[:3]
bmap['datetime'] = nexrad_to_datetime(date, time)
offset = 3
bmap['data'] = []
bit_conv = Bits(16)
for e in range(num_el):
seg_num = data[offset]
offset += 1
if seg_num != (e + 1):
log.warning('Message 13 segments out of sync -- read {} but on {}'.format(
seg_num, e + 1))
az_data = []
for _ in range(360):
gates = []
for _ in range(32):
gates.extend(bit_conv(data[offset]))
offset += 1
az_data.append(gates)
bmap['data'].append(az_data)
self.clutter_filter_bypass_map = bmap
if offset != len(data):
log.warning('Message 13 left data -- Used: %d Avail: %d', offset, len(data))
msg15_code_map = {0: 'Bypass Filter', 1: 'Bypass map in Control',
2: 'Force Filter'}
def _decode_msg15(self, msg_hdr):
# buffer the segments until we have the whole thing. The data
# will be returned concatenated when this is the case
data = self._buffer_segment(msg_hdr)
if data:
data = list(Struct('>{:d}h'.format(len(data) // 2)).unpack(data))
cmap = {}
date, time, num_el = data[:3]
cmap['datetime'] = nexrad_to_datetime(date, time)
offset = 3
cmap['data'] = []
for _ in range(num_el):
az_data = []
for _ in range(360):
num_rng = data[offset]
offset += 1
codes = data[offset:2 * num_rng + offset:2]
offset += 1
ends = data[offset:2 * num_rng + offset:2]
offset += 2 * num_rng - 1
az_data.append(list(zip(ends, codes)))
cmap['data'].append(az_data)
self.clutter_filter_map = cmap
if offset != len(data):
log.warning('Message 15 left data -- Used: %d Avail: %d', offset, len(data))
def _decode_msg18(self, msg_hdr):
# buffer the segments until we have the whole thing. The data
# will be returned concatenated when this is the case
data = self._buffer_segment(msg_hdr)
if data:
from ._nexrad_msgs.msg18 import descriptions, fields
self.rda_adaptation_desc = descriptions
# Can't use NamedStruct because we have more than 255 items--this
# is a CPython limit for arguments.
msg_fmt = DictStruct(fields, '>')
self.rda = msg_fmt.unpack(data)
for num in (11, 21, 31, 32, 300, 301):
attr = 'VCPAT' + str(num)
dat = self.rda[attr]
vcp_hdr = self.vcp_fmt.unpack_from(dat, 0)
off = self.vcp_fmt.size
els = []
for _ in range(vcp_hdr.num_el_cuts):
els.append(self.vcp_el_fmt.unpack_from(dat, off))
off += self.vcp_el_fmt.size
self.rda[attr] = vcp_hdr._replace(els=els)
msg31_data_hdr_fmt = NamedStruct([('stid', '4s'), ('time_ms', 'L'),
('date', 'H'), ('az_num', 'H'),
('az_angle', 'f'), ('compression', 'B'),
(None, 'x'), ('rad_length', 'H'),
('az_spacing', 'B', Enum(0, 0.5, 1.0)),
('rad_status', 'B', remap_status),
('el_num', 'B'), ('sector_num', 'B'),
('el_angle', 'f'),
('spot_blanking', 'B', BitField('Radial', 'Elevation',
'Volume')),
('az_index_mode', 'B', scaler(0.01)),
('num_data_blks', 'H')], '>', 'Msg31DataHdr')
msg31_vol_const_fmt = NamedStruct([('type', 's'), ('name', '3s'),
('size', 'H'), ('major', 'B'),
('minor', 'B'), ('lat', 'f'), ('lon', 'f'),
('site_amsl', 'h'), ('feedhorn_agl', 'H'),
('calib_dbz', 'f'), ('txpower_h', 'f'),
('txpower_v', 'f'), ('sys_zdr', 'f'),
('phidp0', 'f'), ('vcp', 'H'),
('processing_status', 'H', BitField('RxR Noise',
'CBT'))],
'>', 'VolConsts')
msg31_el_const_fmt = NamedStruct([('type', 's'), ('name', '3s'),
('size', 'H'), ('atmos_atten', 'h', scaler(0.001)),
('calib_dbz0', 'f')], '>', 'ElConsts')
rad_const_fmt_v1 = NamedStruct([('type', 's'), ('name', '3s'), ('size', 'H'),
('unamb_range', 'H', scaler(0.1)),
('noise_h', 'f'), ('noise_v', 'f'),
('nyq_vel', 'H', scaler(0.01)),
(None, '2x')], '>', 'RadConstsV1')
rad_const_fmt_v2 = NamedStruct([('type', 's'), ('name', '3s'), ('size', 'H'),
('unamb_range', 'H', scaler(0.1)),
('noise_h', 'f'), ('noise_v', 'f'),
('nyq_vel', 'H', scaler(0.01)),
(None, '2x'), ('calib_dbz0_h', 'f'),
('calib_dbz0_v', 'f')], '>', 'RadConstsV2')
data_block_fmt = NamedStruct([('type', 's'), ('name', '3s'),
('reserved', 'L'), ('num_gates', 'H'),
('first_gate', 'H', scaler(0.001)),
('gate_width', 'H', scaler(0.001)),
('tover', 'H', scaler(0.1)),
('snr_thresh', 'h', scaler(0.1)),
('recombined', 'B', BitField('Azimuths', 'Gates')),
('data_size', 'B'),
('scale', 'f'), ('offset', 'f')], '>', 'DataBlockHdr')
Sweep = namedtuple('Sweep', 'header vol_consts elev_consts radial_consts moments')
def _decode_msg31(self, msg_hdr):
msg_start = self._buffer.set_mark()
data_hdr = self._buffer.read_struct(self.msg31_data_hdr_fmt)
if data_hdr.compression:
log.warning('Compressed message 31 not supported!')
# Read all the block pointers. While the ICD specifies that at least the vol, el, rad
# constant blocks as well as REF moment block are present, it says "the pointers are
# not order or location dependent."
sweep = self.Sweep(data_hdr, None, None, None, {})
block_count = 0
for ptr in self._buffer.read_binary(data_hdr.num_data_blks, '>L'):
if ptr:
block_count += 1
self._buffer.jump_to(msg_start, ptr)
info = self._buffer.get_next(6)
if info.startswith(b'RVOL'):
sweep = sweep._replace(
vol_consts=self._buffer.read_struct(self.msg31_vol_const_fmt))
elif info.startswith(b'RELV'):
sweep = sweep._replace(
elev_consts=self._buffer.read_struct(self.msg31_el_const_fmt))
elif info.startswith(b'RRAD'):
# Relies on the fact that the struct is small enough for its size
# to fit in a single byte
if int(info[-1]) == self.rad_const_fmt_v2.size:
rad_consts = self._buffer.read_struct(self.rad_const_fmt_v2)
else:
rad_consts = self._buffer.read_struct(self.rad_const_fmt_v1)
sweep = sweep._replace(radial_consts=rad_consts)
elif info.startswith(b'D'):
hdr = self._buffer.read_struct(self.data_block_fmt)
# TODO: The correctness of this code is not tested
vals = self._buffer.read_array(count=hdr.num_gates,
dtype=f'>u{hdr.data_size // 8}')
scaled_vals = (vals - hdr.offset) / hdr.scale
scaled_vals[vals == 0] = self.MISSING
scaled_vals[vals == 1] = self.RANGE_FOLD
sweep.moments[hdr.name.strip()] = (hdr, scaled_vals)
else:
log.warning('Unknown Message 31 block type: %s', str(info[:4]))
self._add_sweep(data_hdr)
self.sweeps[-1].append(sweep)
if data_hdr.num_data_blks != block_count:
log.warning('Incorrect number of blocks detected -- Got %d'
' instead of %d', block_count, data_hdr.num_data_blks)
if data_hdr.rad_length != self._buffer.offset_from(msg_start):
log.info('Padding detected in message. Length encoded as %d but offset when '
'done is %d', data_hdr.rad_length, self._buffer.offset_from(msg_start))
def _buffer_segment(self, msg_hdr):
# Add to the buffer
bufs = self._msg_buf.setdefault(msg_hdr.msg_type, {})
bufs[msg_hdr.segment_num] = self._buffer.read(2 * msg_hdr.size_hw
- self.msg_hdr_fmt.size)
# Warn for badly formatted data
if len(bufs) != msg_hdr.segment_num:
log.warning('Segment out of order (Got: %d Count: %d) for message type %d.',
msg_hdr.segment_num, len(bufs), msg_hdr.msg_type)
# If we're complete, return the full collection of data
if msg_hdr.num_segments == len(bufs):
self._msg_buf.pop(msg_hdr.msg_type)
return b''.join(bytes(item[1]) for item in sorted(bufs.items()))
def _add_sweep(self, hdr):
if not self.sweeps and not hdr.rad_status & START_VOLUME:
log.warning('Missed start of volume!')
if hdr.rad_status & START_ELEVATION:
self.sweeps.append([])
if len(self.sweeps) != hdr.el_num:
log.warning('Missed elevation -- Have %d but data on %d.'
' Compensating...', len(self.sweeps), hdr.el_num)
while len(self.sweeps) < hdr.el_num:
self.sweeps.append([])
def _check_size(self, msg_hdr, size):
hdr_size = msg_hdr.size_hw * 2 - self.msg_hdr_fmt.size
if size != hdr_size:
log.warning('Message type %d should be %d bytes but got %d',
msg_hdr.msg_type, size, hdr_size)
def reduce_lists(d):
"""Replace single item lists in a dictionary with the single item."""
for field in d:
old_data = d[field]
if len(old_data) == 1:
d[field] = old_data[0]
def two_comp16(val):
"""Return the two's-complement signed representation of a 16-bit unsigned integer."""
if val >> 15:
val = -(~val & 0x7fff) - 1
return val
def float16(val):
"""Convert a 16-bit floating point value to a standard Python float."""
# Fraction is 10 LSB, Exponent middle 5, and Sign the MSB
frac = val & 0x03ff
exp = (val >> 10) & 0x1F
sign = val >> 15
if exp:
value = 2 ** (exp - 16) * (1 + float(frac) / 2**10)
else:
value = float(frac) / 2**9
if sign:
value *= -1
return value
def float32(short1, short2):
"""Unpack a pair of 16-bit integers as a Python float."""
# Masking below in python will properly convert signed values to unsigned
return struct.unpack('>f', struct.pack('>HH', short1 & 0xFFFF, short2 & 0xFFFF))[0]
def date_elem(ind_days, ind_minutes):
"""Create a function to parse a datetime from the product-specific blocks."""
def inner(seq):
return nexrad_to_datetime(seq[ind_days], seq[ind_minutes] * 60 * 1000)
return inner
def scaled_elem(index, scale):
"""Create a function to scale a certain product-specific block."""
def inner(seq):
return seq[index] * scale
return inner
def combine_elem(ind1, ind2):
"""Create a function to combine two specified product-specific blocks into a single int."""
def inner(seq):
shift = 2**16
if seq[ind1] < 0:
seq[ind1] += shift
if seq[ind2] < 0:
seq[ind2] += shift
return (seq[ind1] << 16) | seq[ind2]
return inner
def float_elem(ind1, ind2):
"""Create a function to combine two specified product-specific blocks into a float."""
return lambda seq: float32(seq[ind1], seq[ind2])
def high_byte(ind):
"""Create a function to return the high-byte of a product-specific block."""
def inner(seq):
return seq[ind] >> 8
return inner
def low_byte(ind):
"""Create a function to return the low-byte of a product-specific block."""
def inner(seq):
return seq[ind] & 0x00FF
return inner
def delta_time(ind):
"""Create a function to return the delta time from a product-specific block."""
def inner(seq):
return seq[ind] >> 5
return inner
def supplemental_scan(ind):
"""Create a function to return the supplement scan type from a product-specific block."""
def inner(seq):
# ICD says 1->SAILS, 2->MRLE, but testing on 2020-08-17 makes this seem inverted
# given what's being reported by sites in the GSM.
return {0: 'Non-supplemental scan',
2: 'SAILS scan', 1: 'MRLE scan'}.get(seq[ind] & 0x001F, 'Unknown')
return inner
# Data mappers used to take packed data and turn into physical units
# Default is to use numpy array indexing to use LUT to change data bytes
# into physical values. Can also have a 'labels' attribute to give
# categorical labels
class DataMapper:
"""Convert packed integer data into physical units."""
# Need to find way to handle range folded
# RANGE_FOLD = -9999
RANGE_FOLD = float('nan')
MISSING = float('nan')
def __init__(self, num=256):
self.lut = np.full(num, self.MISSING, dtype=np.float)
def __call__(self, data):
"""Convert the values."""
return self.lut[data]
class DigitalMapper(DataMapper):
"""Maps packed integers to floats using a scale and offset from the product."""
_min_scale = 0.1
_inc_scale = 0.1
_min_data = 2
_max_data = 255
range_fold = False
def __init__(self, prod):
"""Initialize the mapper and the lookup table."""
super().__init__()
min_val = two_comp16(prod.thresholds[0]) * self._min_scale
inc = prod.thresholds[1] * self._inc_scale
num_levels = prod.thresholds[2]
# Generate lookup table -- sanity check on num_levels handles
# the fact that DHR advertises 256 levels, which *includes*
# missing, differing from other products
num_levels = min(num_levels, self._max_data - self._min_data + 1)
for i in range(num_levels):
self.lut[i + self._min_data] = min_val + i * inc
class DigitalRefMapper(DigitalMapper):
"""Mapper for digital reflectivity products."""
units = 'dBZ'
class DigitalVelMapper(DigitalMapper):
"""Mapper for digital velocity products."""
units = 'm/s'
range_fold = True
class DigitalSPWMapper(DigitalVelMapper):
"""Mapper for digital spectrum width products."""
_min_data = 129
# ICD says up to 152, but also says max value is 19, which implies 129 + 19/0.5 -> 167
_max_data = 167
class PrecipArrayMapper(DigitalMapper):
"""Mapper for precipitation array products."""
_inc_scale = 0.001
_min_data = 1
_max_data = 254
units = 'dBA'
class DigitalStormPrecipMapper(DigitalMapper):
"""Mapper for digital storm precipitation products."""
units = 'inches'
_inc_scale = 0.01
class DigitalVILMapper(DataMapper):
"""Mapper for digital VIL products."""
def __init__(self, prod):
"""Initialize the VIL mapper."""
super().__init__()
lin_scale = float16(prod.thresholds[0])
lin_offset = float16(prod.thresholds[1])
log_start = prod.thresholds[2]
log_scale = float16(prod.thresholds[3])
log_offset = float16(prod.thresholds[4])
# VIL is allowed to use 2 through 254 inclusive. 0 is thresholded,
# 1 is flagged, and 255 is reserved
ind = np.arange(255)
self.lut[2:log_start] = (ind[2:log_start] - lin_offset) / lin_scale
self.lut[log_start:-1] = np.exp((ind[log_start:] - log_offset) / log_scale)
class DigitalEETMapper(DataMapper):
"""Mapper for digital echo tops products."""
def __init__(self, prod):
"""Initialize the mapper."""
super().__init__()
data_mask = prod.thresholds[0]
scale = prod.thresholds[1]
offset = prod.thresholds[2]
topped_mask = prod.thresholds[3]
self.topped_lut = [False] * 256
for i in range(2, 256):
self.lut[i] = ((i & data_mask) - offset) / scale
self.topped_lut[i] = bool(i & topped_mask)
self.topped_lut = np.array(self.topped_lut)
def __call__(self, data_vals):
"""Convert the data values."""
return self.lut[data_vals], self.topped_lut[data_vals]
class GenericDigitalMapper(DataMapper):
"""Maps packed integers to floats using a scale and offset from the product.
Also handles special data flags.
"""
def __init__(self, prod):
"""Initialize the mapper by pulling out all the information from the product."""
# Values will be [0, max] inclusive, so need to add 1 to max value to get proper size.
max_data_val = prod.thresholds[5]
super().__init__(max_data_val + 1)
scale = float32(prod.thresholds[0], prod.thresholds[1])
offset = float32(prod.thresholds[2], prod.thresholds[3])
leading_flags = prod.thresholds[6]
trailing_flags = prod.thresholds[7]
if leading_flags > 1:
self.lut[1] = self.RANGE_FOLD
# Need to add 1 to the end of the range so that it's inclusive
for i in range(leading_flags, max_data_val - trailing_flags + 1):
self.lut[i] = (i - offset) / scale
class DigitalHMCMapper(DataMapper):
"""Mapper for hydrometeor classification products.
Handles assigning string labels based on values.
"""
labels = ['ND', 'BI', 'GC', 'IC', 'DS', 'WS', 'RA', 'HR',
'BD', 'GR', 'HA', 'LH', 'GH', 'UK', 'RF']
def __init__(self, prod):
"""Initialize the mapper."""
super().__init__()
for i in range(10, 256):
self.lut[i] = i // 10
self.lut[150] = self.RANGE_FOLD
# 156, 157
class EDRMapper(DataMapper):
"""Mapper for eddy dissipation rate products."""
def __init__(self, prod):
"""Initialize the mapper based on the product."""
data_levels = prod.thresholds[2]
super().__init__(data_levels)
scale = prod.thresholds[0] / 1000.
offset = prod.thresholds[1] / 1000.
leading_flags = prod.thresholds[3]
for i in range(leading_flags, data_levels):
self.lut = scale * i + offset
class LegacyMapper(DataMapper):
"""Mapper for legacy products."""
lut_names = ['Blank', 'TH', 'ND', 'RF', 'BI', 'GC', 'IC', 'GR', 'WS',
'DS', 'RA', 'HR', 'BD', 'HA', 'UK']
def __init__(self, prod):
"""Initialize the values and labels from the product."""
# Don't worry about super() since we're using our own lut assembled sequentially
self.labels = []
self.lut = []
for t in prod.thresholds:
codes, val = t >> 8, t & 0xFF
label = ''
if codes >> 7:
label = self.lut_names[val]
if label in ('Blank', 'TH', 'ND'):
val = self.MISSING
elif label == 'RF':
val = self.RANGE_FOLD
elif codes >> 6:
val *= 0.01
label = f'{val:.2f}'
elif codes >> 5:
val *= 0.05
label = f'{val:.2f}'
elif codes >> 4:
val *= 0.1
label = f'{val:.1f}'
if codes & 0x1:
val *= -1
label = '-' + label
elif (codes >> 1) & 0x1:
label = '+' + label
if (codes >> 2) & 0x1:
label = '<' + label