/
statements.py
3536 lines (3167 loc) · 133 KB
/
statements.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import copy
import re
from . import util
from . import types
from . import syntax
from . import grammar
from . import xpath
from .error import err_add
### Functions that plugins can use
def add_validation_phase(phase, before=None, after=None):
"""Add a validation phase to the framework.
Can be used by plugins to do special validation of extensions."""
idx = 0
for x in _validation_phases:
if x == before:
_validation_phases.insert(idx, phase)
return
elif x == after:
_validation_phases.insert(idx+1, phase)
return
idx = idx + 1
# otherwise append at the end
_validation_phases.append(phase)
def _sequence(one, two):
"""Return function calling two functions in order"""
if one is None:
return two
elif two is None:
return one
return lambda *args, **kargs: (one(*args, **kargs), two(*args, **kargs))[1]
def add_validation_fun(phase, keywords, fun):
"""Add a validation function to some phase in the framework.
Function `fun` is called for each valid occurence of each keyword in
`keywords`.
Can be used by plugins to do special validation of extensions."""
for keyword in keywords:
_validation_map[phase, keyword] = _sequence(
_validation_map.get((phase, keyword)), fun)
def add_validation_var(var_name, var_fun):
"""Add a validation variable to the framework.
Can be used by plugins to do special validation of extensions."""
_validation_variables.append((var_name, var_fun))
def set_phase_i_children(phase):
"""Marks that the phase is run over the expanded i_children.
Default is to run over substmts."""
_v_i_children[phase] = True
def add_keyword_phase_i_children(phase, keyword):
"""Marks that the stmt is run in the expanded i_children phase."""
_v_i_children_keywords[(phase, keyword)] = True
def add_data_keyword(keyword):
"""Can be used by plugins to register extensions as data keywords."""
data_keywords.append(keyword)
def add_keyword_with_children(keyword):
_keyword_with_children[keyword] = True
def is_keyword_with_children(keyword):
return keyword in _keyword_with_children
def add_keywords_with_no_explicit_config(keyword):
_keywords_with_no_explicit_config.append(keyword)
def add_copy_uses_keyword(keyword):
_copy_uses_keywords.append(keyword)
def add_copy_augment_keyword(keyword):
_copy_augment_keywords.append(keyword)
def add_xpath_function(name, input_params, output_param):
xpath.add_extra_xpath_function(name, input_params, output_param)
def add_refinement_element(keyword, element, merge=False, v_fun=None):
"""Add an element to the <keyword>'s list of refinements"""
for key, valid_keywords, _, _ in _refinements:
if key == keyword:
valid_keywords.append(element)
return
_refinements.append((keyword, [element], merge, v_fun))
def add_deviation_element(keyword, element):
"""Add an element to the <keyword>'s list of deviations.
Can be used by plugins that add support for specific extension
statements."""
if keyword in _valid_deviations:
_valid_deviations[keyword].append(element)
else:
_valid_deviations[keyword] = [element]
### Exceptions
class NotFound(Exception):
"""used when a referenced item is not found"""
pass
class Abort(Exception):
"""used to abort an iteration"""
pass
### Constants
re_path = re.compile('(.*)/(.*)')
re_deref = re.compile(r'deref\s*\(\s*(.*)\s*\)/\.\./(.*)')
re_and_or = re.compile(r'\band\b|\bor\b')
data_definition_keywords = ['container', 'leaf', 'leaf-list', 'list', 'case',
'choice', 'anyxml', 'anydata', 'uses', 'augment']
_validation_phases = [
# init phase:
# initalizes the module/submodule statement, and maps
# the prefix in all extensions to their modulename
# from this point, extensions will be validated just as the
# other statements
'init',
# second init phase initializes statements, including extensions
'init2',
# grammar phase:
# verifies that the statement hierarchy is correct
# and that all arguments are of correct type
# complex arguments are parsed and saved in statement-specific
# variables
'grammar',
# import and include phase:
# tries to load each imported and included (sub)module
'import',
# type and grouping phase:
# verifies all typedefs, types and groupings
'type',
'type_2',
# expansion phases:
# first expansion: copy data definition stmts into i_children
'expand_1',
# inherit properties phase:
# set i_config
'inherit_properties',
# second expansion: expand augmentations into i_children
'expand_2',
'expand_3',
# unique name check phase:
'unique_name',
# reference phase:
# verifies all references; e.g. leafref, unique, key for config
'reference_1',
'reference_2',
'reference_3',
'reference_4',
# unused definitions phase:
# add warnings for unused definitions
'unused',
# strict phase: check YANG strictness
'strict',
]
_validation_map = {
('init', 'module'):lambda ctx, s: v_init_module(ctx, s),
('init', 'submodule'):lambda ctx, s: v_init_module(ctx, s),
('init', '$extension'):lambda ctx, s: v_init_extension(ctx, s),
('init2', 'import'):lambda ctx, s: v_init_import(ctx, s),
('init2', '$has_children'):lambda ctx, s: v_init_has_children(ctx, s),
('init2', '*'):lambda ctx, s: v_init_stmt(ctx, s),
('grammar', 'module'):lambda ctx, s: v_grammar_module(ctx, s),
('grammar', 'submodule'):lambda ctx, s: v_grammar_module(ctx, s),
('grammar', 'typedef'):lambda ctx, s: v_grammar_typedef(ctx, s),
('grammar', '*'):lambda ctx, s: v_grammar_all(ctx, s),
('import', 'module'):lambda ctx, s: v_import_module(ctx, s),
('import', 'submodule'):lambda ctx, s: v_import_module(ctx, s),
('type', 'grouping'):lambda ctx, s: v_type_grouping(ctx, s),
('type', 'augment'):lambda ctx, s: v_type_augment(ctx, s),
('type', 'uses'):lambda ctx, s: v_type_uses(ctx, s),
('type', 'feature'):lambda ctx, s: v_type_feature(ctx, s),
('type', 'if-feature'):lambda ctx, s: v_type_if_feature(ctx, s),
('type', 'identity'):lambda ctx, s: v_type_identity(ctx, s),
('type', 'status'):lambda ctx, s: v_type_status(ctx, s),
('type', 'base'):lambda ctx, s: v_type_base(ctx, s),
('type', 'must'):lambda ctx, s: v_type_must(ctx, s),
('type', 'when'):lambda ctx, s: v_type_when(ctx, s),
('type', '$extension'): lambda ctx, s: v_type_extension(ctx, s),
('type_2', 'type'):lambda ctx, s: v_type_type(ctx, s),
('type_2', 'typedef'):lambda ctx, s: v_type_typedef(ctx, s),
('type_2', 'leaf'):lambda ctx, s: v_type_leaf(ctx, s),
('type_2', 'leaf-list'):lambda ctx, s: v_type_leaf_list(ctx, s),
('expand_1', 'module'):lambda ctx, s: v_expand_1_children(ctx, s),
('expand_1', 'submodule'):lambda ctx, s: v_expand_1_children(ctx, s),
('inherit_properties', 'module'): \
lambda ctx, s: v_inherit_properties(ctx, s),
('inherit_properties', 'submodule'): \
lambda ctx, s: v_inherit_properties(ctx, s),
('expand_2', 'augment'):lambda ctx, s: v_expand_2_augment(ctx, s),
('expand_3', 'augment'):lambda ctx, s: v_expand_3_augment(ctx, s),
('unique_name', 'module'): \
lambda ctx, s: v_unique_name_defintions(ctx, s),
('unique_name', '$has_children'): \
lambda ctx, s: v_unique_name_children(ctx, s),
('unique_name', 'leaf-list'): \
lambda ctx, s: v_unique_name_leaf_list(ctx, s),
('reference_1', 'list'):lambda ctx, s:v_reference_list(ctx, s),
('reference_1', 'action'):lambda ctx, s:v_reference_action(ctx, s),
('reference_1', 'notification'):lambda ctx, s:v_reference_action(ctx, s),
('reference_1', 'choice'):lambda ctx, s: v_reference_choice(ctx, s),
('reference_2', 'leaf'):lambda ctx, s:v_reference_leaf_leafref(ctx, s),
('reference_2', 'leaf-list'):lambda ctx, s:v_reference_leaf_leafref(ctx, s),
('reference_2', 'must'):lambda ctx, s:v_reference_must(ctx, s),
('reference_2', 'when'):lambda ctx, s:v_reference_when(ctx, s),
## since we just check in reference_2, it means that we won't check
## xpaths in unused groupings. the xpath is checked when the grouping is
## used. the same is true for leafrefs
# ('reference_3', 'must'):lambda ctx, s:v_reference_must(ctx, s),
# ('reference_3', 'when'):lambda ctx, s:v_reference_when(ctx, s),
('reference_3', 'typedef'):lambda ctx, s:v_reference_leaf_leafref(ctx, s),
('reference_3', 'deviation'):lambda ctx, s:v_reference_deviation(ctx, s),
('reference_3', 'deviate'):lambda ctx, s:v_reference_deviate(ctx, s),
('reference_4', 'deviation'):lambda ctx, s:v_reference_deviation_4(ctx, s),
('reference_4', 'revision'):lambda ctx, s:v_reference_revision(ctx, s),
('unused', 'module'):lambda ctx, s: v_unused_module(ctx, s),
('unused', 'submodule'):lambda ctx, s: v_unused_module(ctx, s),
('unused', 'typedef'):lambda ctx, s: v_unused_typedef(ctx, s),
('unused', 'grouping'):lambda ctx, s: v_unused_grouping(ctx, s),
}
_v_i_children = {
'unique_name':True,
'expand_2':True,
'expand_3':True,
'reference_1':True,
'reference_2':True,
}
"""Phases in this dict are run over the stmts that have i_children.
Note that the tests are not run in grouping definitions."""
_v_i_children_keywords = {
('reference_2', 'when'): True,
('reference_2', 'must'): True,
}
"""Keywords in this dict are iterated over in a phase in _v_i_children."""
_keyword_with_children = {
'module':True,
'submodule':True,
'container':True,
'list':True,
'case':True,
'choice':True,
'grouping':True,
'uses':True,
'augment':True,
'input':True,
'output':True,
'notification':True,
'rpc':True,
'action':True,
}
_validation_variables = [
('$has_children', lambda keyword: keyword in _keyword_with_children),
('$extension', lambda keyword: util.is_prefixed(keyword)),
]
data_keywords = ['leaf', 'leaf-list', 'container', 'list', 'choice', 'case',
'anyxml', 'anydata', 'action', 'rpc', 'notification']
_keywords_with_no_explicit_config = ['action', 'rpc', 'notification']
_copy_uses_keywords = []
_copy_augment_keywords = []
_refinements = [
# (<keyword>, <list of keywords for which <keyword> can be refined>,
# <merge>, <validation function>)
('description',
['container', 'leaf', 'leaf-list', 'list', 'choice', 'case',
'anyxml', 'anydata', 'action', 'notification'],
False, None),
('reference',
['container', 'leaf', 'leaf-list', 'list', 'choice', 'case',
'anyxml', 'anydata', 'action', 'notification'],
False, None),
('config',
['container', 'leaf', 'leaf-list', 'list', 'choice', 'anyxml', 'anydata'],
False, None),
('presence', ['container'], False, None),
('must', ['container', 'leaf', 'leaf-list', 'list', 'anyxml', 'anydata'],
True, None),
('default', ['leaf', ('$1.1', 'leaf-list'), 'choice'],
False, lambda ctx, target, default: v_default(ctx, target, default)),
('mandatory', ['leaf', 'choice', 'anyxml', 'anydata'], False, None),
('min-elements', ['leaf-list', 'list'], False, None),
('max-elements', ['leaf-list', 'list'], False, None),
('if-feature',
['container', 'leaf', 'leaf-list', 'list', 'choice', 'case',
'anyxml', 'anydata'],
True, None),
]
_singleton_keywords = {
'type':True,
'units':True,
'default':True,
'config':True,
'mandatory':True,
'min-elements':True,
'max-elements':True
}
_deviate_delete_singleton_keywords = {
'units':True,
'default':True
}
_valid_deviations = {
'type':['leaf', 'leaf-list'],
'units':['leaf', 'leaf-list'],
'default':['leaf', 'leaf-list', 'choice'],
'config':['leaf', 'choice', 'container', 'list', 'leaf-list'],
'mandatory':['leaf', 'choice'],
'min-elements':['leaf-list', 'list'],
'max-elements':['leaf-list', 'list'],
'must':['leaf', 'choice', 'container', 'list', 'leaf-list'],
'unique':['list'],
}
### Validation
def validate_module(ctx, module):
"""Validate `module`, which is a Statement representing a (sub)module"""
if module.i_is_validated:
return
def iterate(stmt, phase):
# if the grammar is not yet checked or if it is checked and
# valid, then we continue.
if getattr(stmt, 'is_grammatically_valid', None) is False:
return
# first check an exact match
key = (phase, stmt.keyword)
res = 'recurse'
if key in _validation_map:
f = _validation_map[key]
res = f(ctx, stmt)
if res == 'stop':
raise Abort
# then also run match by special variable
for var_name, var_f in _validation_variables:
key = phase, var_name
if key in _validation_map and var_f(stmt.keyword) is True:
f = _validation_map[key]
res = f(ctx, stmt)
if res == 'stop':
raise Abort
# then run wildcard
wildcard = (phase, '*')
if wildcard in _validation_map:
f = _validation_map[wildcard]
res = f(ctx, stmt)
if res == 'stop':
raise Abort
if res == 'continue':
pass
else:
# default is to recurse
if phase in _v_i_children:
if stmt.keyword == 'grouping':
return
if stmt.i_module is not None and stmt.i_module != module:
# this means that the stmt is from an included, expanded
# submodule - already validated.
return
if hasattr(stmt, 'i_children'):
for s in stmt.i_children:
iterate(s, phase)
for s in stmt.substmts:
if (hasattr(s, 'i_has_i_children') or
(phase, s.keyword) in _v_i_children_keywords):
iterate(s, phase)
else:
for s in stmt.substmts:
iterate(s, phase)
module.i_is_validated = 'in_progress'
try:
for phase in _validation_phases:
iterate(module, phase)
except Abort:
pass
module.i_is_validated = True
def v_init_module(ctx, stmt):
## remember that the grammar is not validated
vsn = stmt.search_one('yang-version')
if vsn is not None:
stmt.i_version = vsn.arg
else:
stmt.i_version = '1'
# create a prefix map in the module:
# <prefix string> -> (<modulename>, <revision-date> | None)
stmt.i_prefixes = {}
# keep track of unused prefixes: <prefix string> -> <import statement>
stmt.i_unused_prefixes = {}
# keep track of missing prefixes, to supress mulitple errors
stmt.i_missing_prefixes = {}
# insert our own prefix into the map
prefix = None
if stmt.keyword == 'module':
prefix = stmt.search_one('prefix')
stmt.i_modulename = stmt.arg
mod = stmt
else:
belongs_to = stmt.search_one('belongs-to')
if belongs_to is not None and belongs_to.arg is not None:
prefix = belongs_to.search_one('prefix')
stmt.i_modulename = belongs_to.arg
mod = ctx.get_module(stmt.i_modulename)
if mod is None or not mod.i_is_validated:
# this happens if a submodule is validated standalone
mod = stmt
else:
stmt.i_modulename = ""
mod = None
if prefix is not None and prefix.arg is not None:
stmt.i_prefixes[prefix.arg] = (stmt.arg, None)
stmt.i_prefix = prefix.arg
else:
stmt.i_prefix = None
# next we try to add prefixes for each import
for i in stmt.search('import'):
p = i.search_one('prefix')
# verify that the prefix is not used
if p is not None:
prefix = p.arg
r = i.search_one('revision-date')
if r is not None:
revision = r.arg
else:
revision = None
# check if the prefix is already used by someone else
if prefix in stmt.i_prefixes:
(m, _rev) = stmt.i_prefixes[prefix]
err_add(ctx.errors, p.pos, 'PREFIX_ALREADY_USED', (prefix, m))
# add the prefix to the unused prefixes
if (i.arg is not None and p.arg is not None
and i.arg != stmt.i_modulename):
stmt.i_prefixes[p.arg] = (i.arg, revision)
stmt.i_unused_prefixes[p.arg] = i
stmt.i_features = {}
stmt.i_identities = {}
stmt.i_extensions = {}
stmt.i_including_modulename = None
# save a pointer to the context
stmt.i_ctx = ctx
# keep track of created augment nodes
stmt.i_undefined_augment_nodes = {}
# next, set the attribute 'i_module' in each statement to point to the
# module where the statement is defined. if the module is a submodule,
# 'i_main_module' will point to the main module, except if a submodule is
# validated stand-alone (then in points to the submodule)
# 'i_orig_module' will point to the real module / submodule.
# 'i_module' will point to the main module.
def set_i_module(s):
s.i_orig_module = s.top
s.i_module = s.top
s.i_main_module = mod
return
iterate_stmt(stmt, set_i_module)
def v_init_extension(ctx, stmt):
"""find the modulename of the prefix, and set `stmt.keyword`"""
(prefix, identifier) = stmt.raw_keyword
(modname, revision) = util.prefix_to_modulename_and_revision(
stmt.i_module, prefix, stmt.pos, ctx.errors)
stmt.keyword = (modname, identifier)
stmt.i_extension_modulename = modname
stmt.i_extension_revision = revision
stmt.i_extension = None
def v_init_stmt(ctx, stmt):
stmt.i_typedefs = {}
stmt.i_groupings = {}
stmt.i_uniques = []
def v_init_has_children(ctx, stmt):
stmt.i_children = []
def v_init_import(ctx, stmt):
stmt.i_is_safe_import = False
### grammar phase
def v_grammar_module(ctx, stmt):
# check the statement hierarchy
canonical = (ctx.canonical and stmt.i_is_primary_module)
grammar.chk_module_statements(ctx, stmt, canonical)
# check revision statements order
prev = None
stmt.i_latest_revision = None
for r in stmt.search('revision'):
if stmt.i_latest_revision is None or r.arg > stmt.i_latest_revision:
stmt.i_latest_revision = r.arg
if prev is not None and r.arg > prev:
err_add(ctx.errors, r.pos, 'REVISION_ORDER', ())
prev = r.arg
def v_grammar_typedef(ctx, stmt):
if types.is_base_type(stmt.arg):
err_add(ctx.errors, stmt.pos, 'BAD_TYPE_NAME', stmt.arg)
def v_grammar_all(ctx, stmt):
v_grammar_unique_defs(ctx, stmt)
v_grammar_identifier(ctx, stmt)
def v_grammar_unique_defs(ctx, stmt):
"""Verify that all typedefs and groupings are unique
Called for every statement.
Stores all typedefs in stmt.i_typedef, groupings in stmt.i_grouping
"""
defs = [('typedef', 'TYPE_ALREADY_DEFINED', stmt.i_typedefs),
('grouping', 'GROUPING_ALREADY_DEFINED', stmt.i_groupings)]
if stmt.parent is None:
defs.extend(
[('feature', 'FEATURE_ALREADY_DEFINED', stmt.i_features),
('identity', 'IDENTITY_ALREADY_DEFINED', stmt.i_identities),
('extension', 'EXTENSION_ALREADY_DEFINED', stmt.i_extensions)])
for keyword, errcode, stmtdefs in defs:
for definition in stmt.search(keyword):
if definition.arg in stmtdefs:
other = stmtdefs[definition.arg]
err_add(ctx.errors, definition.pos,
errcode, (definition.arg, other.pos))
else:
stmtdefs[definition.arg] = definition
def v_grammar_identifier(ctx, stmt):
try:
(arg_type, _subspec) = grammar.stmt_map[stmt.keyword]
except KeyError:
return
if (arg_type == 'identifier' and
grammar.re_identifier_illegal_prefix.search(stmt.arg) is not None):
if stmt.keyword == 'module' or stmt.keyword == 'submodule':
mod = stmt
else:
mod = stmt.i_module
if mod.i_version == '1':
err_add(ctx.errors, stmt.pos, 'XML_IDENTIFIER', stmt.arg)
### import and include phase
def v_import_module(ctx, stmt):
imports = stmt.search('import')
includes = stmt.search('include')
if stmt.keyword == 'module':
mymodulename = stmt.arg
else:
b = stmt.search_one('belongs-to')
if b is not None:
mymodulename = b.arg
else:
mymodulename = None
def add_module(i, primary_module):
# check if the module to import is already added
modulename = i.arg
r = i.search_one('revision-date')
rev = None
if r is not None:
rev = r.arg
m = ctx.get_module(modulename, rev)
if m is not None and i.keyword == 'import' and i.i_is_safe_import:
pass
elif m is not None and m.i_is_validated == 'in_progress':
err_add(ctx.errors, i.pos,
'CIRCULAR_DEPENDENCY', ('module', modulename))
# try to add the module to the context
m = ctx.search_module(i.pos, modulename, rev,
primary_module=primary_module)
if m is not None:
validate_module(ctx, m)
if (m is not None and r is not None and
stmt.i_version == '1' and m.i_version == '1.1'):
err_add(ctx.errors, i.pos,
'BAD_IMPORT_YANG_VERSION',
(stmt.i_version, m.i_version))
return m
for i in imports:
module = add_module(i, False)
if module is not None and module.keyword != 'module':
err_add(ctx.errors, i.pos,
'BAD_IMPORT', (module.keyword, i.arg))
for i in includes:
submodule = add_module(i, stmt.i_is_primary_module)
if submodule is not None and submodule.keyword != 'submodule':
err_add(ctx.errors, i.pos,
'BAD_INCLUDE', (submodule.keyword, i.arg))
return
if submodule is not None:
if submodule.i_version != stmt.i_version:
err_add(ctx.errors, i.pos,
'BAD_INCLUDE_YANG_VERSION',
(submodule.i_version, stmt.i_version))
return
if stmt.keyword == 'module':
submodule.i_including_modulename = stmt.arg
else:
submodule.i_including_modulename = mymodulename
b = submodule.search_one('belongs-to')
if b is not None and b.arg != mymodulename:
err_add(ctx.errors, b.pos,
'BAD_SUB_BELONGS_TO',
(stmt.arg, submodule.arg, submodule.arg))
else:
# check that each submodule included by this submodule
# is also included by the module
if stmt.keyword == 'module':
for s in submodule.search('include'):
if stmt.search_one('include', s.arg) is None:
err_add(ctx.errors, s.pos,
'MISSING_INCLUDE',
(s.arg, submodule.arg, stmt.arg))
# add typedefs, groupings, nodes etc to this module
for ch in submodule.i_children:
if ch not in stmt.i_children:
stmt.i_children.append(ch)
# verify that the submodule's definitions do not collide
# with the module's definitions
defs = [
(submodule.i_typedefs, stmt.i_typedefs,
'TYPE_ALREADY_DEFINED'),
(submodule.i_groupings, stmt.i_groupings,
'GROUPING_ALREADY_DEFINED'),
(submodule.i_features, stmt.i_features,
'FEATURE_ALREADY_DEFINED'),
(submodule.i_identities, stmt.i_identities,
'IDENTITY_ALREADY_DEFINED'),
(submodule.i_extensions, stmt.i_extensions,
'EXTENSION_ALREADY_DEFINED')]
for substmtdefs, stmtdefs, errcode in defs:
for name in substmtdefs:
subdefinition = substmtdefs[name]
if name in stmtdefs:
# when the same submodule is inlcuded twice
# (e.g. by the module and by another submodule)
# the same definition will exist multiple times.
other = stmtdefs[name]
if other != subdefinition:
err_add(ctx.errors, other.pos,
errcode, (name, subdefinition.pos))
else:
stmtdefs[name] = subdefinition
### type phase
def v_type_typedef(ctx, stmt):
if hasattr(stmt, 'i_is_validated'):
if stmt.i_is_validated is True:
# this type has already been validated
return
elif stmt.i_is_circular is True:
return
elif stmt.i_is_validated == 'in_progress':
err_add(ctx.errors, stmt.pos,
'CIRCULAR_DEPENDENCY', ('type', stmt.arg) )
stmt.i_is_circular = True
return
stmt.i_is_circular = False
stmt.i_is_validated = 'in_progress'
stmt.i_default = None
stmt.i_default_str = ""
stmt.i_is_unused = True
stmt.i_leafref = None # path_type_spec
stmt.i_leafref_ptr = None # pointer to the leaf the leafref refer to
stmt.i_leafref_expanded = False
name = stmt.arg
if stmt.parent.parent is not None:
# non-top-level typedef; check if it is already defined
ptype = search_typedef(stmt.parent.parent, name)
if ptype is not None:
err_add(ctx.errors, stmt.pos, 'TYPE_ALREADY_DEFINED',
(name, ptype.pos))
type_ = stmt.search_one('type')
if type_ is None or type_.is_grammatically_valid is False:
# error is already reported by grammar check
stmt.i_is_validated = True
return
# ensure our type is validated
v_type_type(ctx, type_)
# keep track of our leafref
type_spec = type_.i_type_spec
if isinstance(type_spec, types.PathTypeSpec):
stmt.i_leafref = type_spec
def check_circular_typedef(ctx, type_):
# ensure the type is validated
v_type_type(ctx, type_)
# check the direct typedef
if (type_.i_typedef is not None and
type_.i_typedef.is_grammatically_valid is True):
v_type_typedef(ctx, type_.i_typedef)
# check all union's types
membertypes = type_.search('type')
for t in membertypes:
check_circular_typedef(ctx, t)
check_circular_typedef(ctx, type_)
stmt.i_is_validated = True
# check if we have a default value
default = stmt.search_one('default')
# ... or if we don't; check if our base typedef has one
if (default is None and
type_.i_typedef is not None and
type_.i_typedef.i_default is not None):
# validate that the base type's default value is still valid
stmt.i_default = type_.i_typedef.i_default
stmt.i_default_str = type_.i_typedef.i_default_str
type_.i_type_spec.validate(ctx.errors, stmt.pos,
stmt.i_default, stmt.i_module,
' for the inherited default value ')
elif (default is not None and
default.arg is not None and
type_.i_type_spec is not None):
stmt.i_default = type_.i_type_spec.str_to_val(ctx.errors,
default.pos,
default.arg,
stmt.i_module)
stmt.i_default_str = default.arg
if stmt.i_default is not None:
type_.i_type_spec.validate(ctx.errors, default.pos,
stmt.i_default, stmt.i_module,
' for the default value')
def v_type_type(ctx, stmt):
if hasattr(stmt, 'i_is_validated'):
# already validated
return
# set statement-specific variables
stmt.i_is_validated = True
stmt.i_is_derived = False
stmt.i_type_spec = None
stmt.i_typedef = None
# Find the base type_spec
prefix, name = util.split_identifier(stmt.arg)
if prefix is None or stmt.i_module.i_prefix == prefix:
# check local typedefs
stmt.i_typedef = search_typedef(stmt, name)
if stmt.i_typedef is None:
# check built-in types
try:
stmt.i_type_spec = types.yang_type_specs[name]
except KeyError:
err_add(ctx.errors, stmt.pos,
'TYPE_NOT_FOUND', (name, stmt.i_module.arg))
return
else:
# ensure the typedef is validated
if stmt.i_typedef.is_grammatically_valid is True:
v_type_typedef(ctx, stmt.i_typedef)
else:
stmt.i_typedef.i_default = None
stmt.i_typedef.i_default_str = ""
stmt.i_typedef.i_is_unused = False
else:
# this is a prefixed name, check the imported modules
pmodule = util.prefix_to_module(
stmt.i_module, prefix, stmt.pos, ctx.errors)
if pmodule is None:
return
stmt.i_typedef = search_typedef(pmodule, name)
if stmt.i_typedef is None:
err_add(ctx.errors, stmt.pos, 'TYPE_NOT_FOUND', (name, pmodule.arg))
return
else:
stmt.i_typedef.i_is_unused = False
if stmt.i_typedef is not None:
typedef_type = stmt.i_typedef.search_one('type')
if typedef_type is not None and hasattr(typedef_type, 'i_type_spec'):
# copy since we modify the typespec's definition
stmt.i_type_spec = copy.copy(typedef_type.i_type_spec)
if stmt.i_type_spec is not None:
stmt.i_type_spec.definition = ('at ' +
str(stmt.i_typedef.pos) +
' ')
if stmt.i_type_spec is None:
# an error has been added already; skip further validation
return
# check the fraction-digits - only applicable when the type is the builtin
# decimal64
frac = stmt.search_one('fraction-digits')
if frac is not None and stmt.arg != 'decimal64':
err_add(ctx.errors, frac.pos, 'BAD_RESTRICTION', 'fraction_digits')
elif stmt.arg == 'decimal64' and frac is None:
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC_1',
('decimal64', 'fraction-digits'))
elif stmt.arg == 'decimal64' and frac.is_grammatically_valid:
stmt.i_is_derived = True
stmt.i_type_spec = types.Decimal64TypeSpec(frac)
# check the range restriction
stmt.i_ranges = []
rangestmt = stmt.search_one('range')
if rangestmt is not None:
if 'range' not in stmt.i_type_spec.restrictions():
err_add(ctx.errors, rangestmt.pos, 'BAD_RESTRICTION', 'range')
else:
stmt.i_is_derived = True
ranges_spec = types.validate_range_expr(ctx.errors, rangestmt, stmt)
if ranges_spec is not None:
stmt.i_ranges = ranges_spec[0]
stmt.i_type_spec = types.RangeTypeSpec(stmt.i_type_spec,
ranges_spec)
# check the length restriction
stmt.i_lengths = []
length = stmt.search_one('length')
if (length is not None and
'length' not in stmt.i_type_spec.restrictions()):
err_add(ctx.errors, length.pos, 'BAD_RESTRICTION', 'length')
elif length is not None:
stmt.i_is_derived = True
lengths_spec = types.validate_length_expr(ctx.errors, length, stmt)
if lengths_spec is not None:
stmt.i_lengths = lengths_spec[0]
stmt.i_type_spec = types.LengthTypeSpec(stmt.i_type_spec,
lengths_spec)
# check the pattern restrictions
patterns = stmt.search('pattern')
if (patterns and
'pattern' not in stmt.i_type_spec.restrictions()):
err_add(ctx.errors, patterns[0].pos, 'BAD_RESTRICTION', 'pattern')
elif patterns:
stmt.i_is_derived = True
pattern_specs = [types.validate_pattern_expr(ctx.errors, p)
for p in patterns]
if None not in pattern_specs:
# all patterns valid
stmt.i_type_spec = types.PatternTypeSpec(stmt.i_type_spec,
pattern_specs)
# check the path restriction
path = stmt.search_one('path')
if path is not None and stmt.arg != 'leafref':
err_add(ctx.errors, path.pos, 'BAD_RESTRICTION', 'path')
elif stmt.arg == 'leafref' and path is None:
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC_1',
('leafref', 'path'))
elif path is not None:
stmt.i_is_derived = True
if path.is_grammatically_valid is True:
path_spec = types.validate_path_expr(ctx.errors, path)
if path_spec is not None:
stmt.i_type_spec = types.PathTypeSpec(stmt.i_type_spec,
path_spec, path, path.pos)
stmt.i_type_spec.i_source_stmt = stmt
# check the base restriction
bases = stmt.search('base')
if bases and stmt.arg != 'identityref':
err_add(ctx.errors, bases[0].pos, 'BAD_RESTRICTION', 'base')
elif len(bases) > 1 and stmt.i_module.i_version == '1':
err_add(ctx.errors, bases[1].pos, 'UNEXPECTED_KEYWORD', 'base')
elif stmt.arg == 'identityref' and not bases:
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC',
('identityref', 'base'))
else:
idbases = []
for base in bases:
v_type_base(ctx, base)
if base.i_identity is not None:
idbases.append(base)
if len(idbases) > 0:
stmt.i_is_derived = True
stmt.i_type_spec = types.IdentityrefTypeSpec(idbases)
# check the require-instance restriction
req_inst = stmt.search_one('require-instance')
if (req_inst is not None and
'require-instance' not in stmt.i_type_spec.restrictions()):
err_add(ctx.errors, req_inst.pos, 'BAD_RESTRICTION', 'require-instance')
if (req_inst is not None and stmt.i_type_spec.name == 'leafref' and
stmt.i_module.i_version == '1'):
err_add(ctx.errors, req_inst.pos, 'BAD_RESTRICTION', 'require-instance')
if req_inst is not None:
stmt.i_type_spec.require_instance = req_inst.arg == 'true'
# check the enums - only applicable when the type is the builtin
# enumeration type in YANG version 1, and for derived enumerations in 1.1
enums = stmt.search('enum')
if (enums and
('enum' not in stmt.i_type_spec.restrictions() or
stmt.i_module.i_version == '1' and stmt.arg != 'enumeration')):
err_add(ctx.errors, enums[0].pos, 'BAD_RESTRICTION', 'enum')
elif stmt.arg == 'enumeration' and not enums:
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC',
('enumeration', 'enum'))
elif enums:
stmt.i_is_derived = True
enum_spec = types.validate_enums(ctx.errors, enums, stmt)
if enum_spec is not None:
stmt.i_type_spec = types.EnumTypeSpec(stmt.i_type_spec,
enum_spec)
# check the bits - only applicable when the type is the builtin
# bits type in YANG version 1, and for derived bits in 1.1
bits = stmt.search('bit')
if (bits and
('bit' not in stmt.i_type_spec.restrictions() or
stmt.i_module.i_version == '1' and stmt.arg != 'bits')):
err_add(ctx.errors, bits[0].pos, 'BAD_RESTRICTION', 'bit')
elif stmt.arg == 'bits' and not bits:
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC',
('bits', 'bit'))
elif bits:
stmt.i_is_derived = True
bit_spec = types.validate_bits(ctx.errors, bits, stmt)
if bit_spec is not None:
stmt.i_type_spec = types.BitTypeSpec(stmt.i_type_spec,
bit_spec)
# check the union types
membertypes = stmt.search('type')
if membertypes and stmt.arg != 'union':
err_add(ctx.errors, membertypes[0].pos, 'BAD_RESTRICTION', 'union')
elif not membertypes and stmt.arg == 'union':
err_add(ctx.errors, stmt.pos, 'MISSING_TYPE_SPEC',
('union', 'type'))
elif membertypes:
stmt.i_is_derived = True
for t in membertypes:
if t.is_grammatically_valid is True:
v_type_type(ctx, t)
stmt.i_type_spec = types.UnionTypeSpec(membertypes)
if stmt.i_module.i_version == '1':
t = has_type(stmt, ['empty', 'leafref'])
if t is not None:
err_add(ctx.errors, stmt.pos, 'BAD_TYPE_IN_UNION',
(t.arg, t.pos))
return False
def v_check_if_feature(ctx, type, defval):
for s in type.substmts:
if defval == s.arg:
feat = s.search_one('if-feature')
if feat is not None:
err_add(ctx.errors, feat.pos, 'DEFAULT_AND_IFFEATURE', ())
return
def v_check_default(ctx, cur_type, def_value):
if def_value is None:
return