/
gotocjson.go
6087 lines (5740 loc) · 282 KB
/
gotocjson.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// gotocjson is a source-code converter. It takes a Go source file containing a
// bunch of enumeration and structure declarations, and turns them into equivalent
// C code, including not just declarations but also JSON encoding and decoding
// routines that respect the field tags specified in the Go code. This mechanism
// will both allow for rapid automated changes on the C side whenever we need to
// revise the Go interface, and ensure that the conversion rouines are up-to-date
// and accurate.
// Copyright (c) 2020-2021 GroundWork Open Source, Inc. ("GroundWork").
// All rights reserved.
package main
// All operations in this program assume that the source code under
// inspection fits easily into memory all at once; there is no need
// for any type of streaming in the handling of the source code.
// FIX LATER:
//
// (*) Consider implementing a layer of routines that effectively provide what C++
// calls "placement new" semantics, meaning the caller would be responsible for
// allocating the principal C-structure memory block and the called routine
// would then take a pointer to that block and fill it in. Figure out whether
// such an approach might eliminate some amount of allocation and deallocation
// at the top and bottom of the hierarchy, enough to make a switch worthwhile.
//
// (*) Consider optionally using the Custom Memory Allocation routines of Jansson
// to implement some sort of invocation-count and execution-time processing,
// so we can measure the amount of effort involved at that level of the
// implementation of JSON encoding and decoding.
//
// (*) Apply the emit_branch_detail flag in many places, default it to false, and
// provide a command-line option to set it.
import (
"bufio"
"bytes"
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"os"
"path/filepath"
"reflect"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"text/template"
"time"
"unicode"
"unicode/utf8"
)
// Argument parsing in Go seems to be something of a mess. The distributed Go language provides
// an {import "flag"} package (https://golang.org/pkg/flag/), but for no good reason it ignores
// longstanding conventions for how to construct long argument option names. Alternatives for
// getopt-like behavior include:
//
// import "github.com/pborman/getopt" // version 1
// import "github.com/pborman/getopt/v2" // version 2, preferred over version 1
// import "github.com/pborman/options" // improvement over "github.com/pborman/getopt/v2"
// import "github.com/mkideal/cli"
// import "github.com/galdor/go-cmdline"
// import "gopkg.in/alecthomas/kingpin.v2"
// import "github.com/docopt/docopt-go"
// import "github.com/jessevdk/go-flags"
//
// See:
//
// https://godoc.org/github.com/pborman/getopt
// https://godoc.org/github.com/pborman/getopt/v2
// https://godoc.org/github.com/pborman/options
// https://groups.google.com/forum/#!topic/golang-nuts/i8Qw9go6CnQ
// https://github.com/mkideal/cli
// https://github.com/galdor/go-cmdline
// http://snowsyn.net/2016/08/11/parsing-command-line-options-in-go/
// https://github.com/alecthomas/kingpin
// http://docopt.org/
// https://github.com/docopt/docopt.go
// https://godoc.org/github.com/jessevdk/go-flags
//
// Not wanting to get into that complexity at the moment, I have recorded those resources above for
// future reference, but for now I am resricting this program to use only short-option command-line
// arguments, and very simple parsing.
// Globals.
var PROGRAM = "gotocjson"
var VERSION = "0.5.0"
var bad_args = false
var exit_early = false
var print_help = false
var print_version = false
var print_diagnostics = false
var print_errors = true
var emit_branch_detail = true
var generate_generic_datatypes = false
var generate_generic_structures = false
var input_filepath = ""
var output_directory = ""
var diag_file = os.Stdout
// Functions.
func show_help() {
fmt.Fprintf(os.Stdout,
`usage: %[1]s [-d] [-g] [-o outdir] filename.go
%[1]s -h
%[1]s --help
%[1]s --version
where: -d produces diagnostic output on the stdout stream
-g produces output for generic datatypes, which may be shared across
multiple application input files and are normally suppressed in
the name of avoiding duplicate declarations and definitions
-o outdir specifies the directory where the generated .h and .c files
will be placed; default is the same directory in which the
filename.go file lives
filename.go path to the source-code file you wish to transform into C code
-h prints this usage message
--help prints this usage message
--version prints the version of this program
`, PROGRAM)
}
func show_version() {
fmt.Fprintf(os.Stdout, "%s version %s\n", PROGRAM, VERSION)
}
// Because of the mess that is Go's handling of command-line arguments, we centralize the parsing of
// those options so just this one routine will need replacement if/when we switch the implementation
// to some other package.
//
func parse_args() {
// Grab the full set of command-line arguments, so we can more readily manipulate them.
cmd_args := os.Args
// Skip the program name.
cmd_args = cmd_args[1:]
for {
if len(cmd_args) == 0 || cmd_args[0] == "-h" || cmd_args[0] == "--help" {
print_help = true
exit_early = true
break
}
if cmd_args[0] == "--version" {
print_version = true
exit_early = true
break
}
if cmd_args[0] == "-d" {
print_diagnostics = true
// Someday we'll give emit_branch_detail its own option flag.
// In the meantime, we can sponge off the existing flag that
// also means we want to see exactly what's going on.
emit_branch_detail = true
cmd_args = cmd_args[1:]
continue
}
if cmd_args[0] == "-g" {
generate_generic_datatypes = true
cmd_args = cmd_args[1:]
continue
}
if cmd_args[0] == "-o" {
if len(cmd_args) > 1 {
output_directory = cmd_args[1]
if len(output_directory) == 0 {
fmt.Fprintf(os.Stderr, "ERROR: Output directory is specified as an empty string.\n")
bad_args = true
print_help = true
exit_early = true
break
}
cmd_args = cmd_args[2:]
continue
} else {
bad_args = true
print_help = true
exit_early = true
break
}
}
if len(cmd_args) == 1 {
input_filepath = cmd_args[0]
if len(input_filepath) == 0 {
fmt.Fprintf(os.Stderr, "ERROR: Input filepath is specified as an empty string.\n")
bad_args = true
print_help = true
exit_early = true
break
}
if len(output_directory) == 0 {
output_directory = filepath.Dir(input_filepath)
}
break
} else {
bad_args = true
print_help = true
exit_early = true
break
}
}
if print_diagnostics && diag_file == os.Stdout {
print_errors = false
}
}
// An attempt at a function which will spill to the diag_file, and if that is not directed to the terminal
// and os.Stderr is directed to the terminal, also spill to os.Stderr so the user can see the message even
// if we are not printing full diagnostics, but in no case printing a duplicate message to the same file.
// So far this attempt fails, because we cannot find the file descriptor of the io.Writer argument.
func emit_diagnostic(w io.Writer, format string, args... interface{}) {
fmt.Fprintf(w, format, args...)
//
// "terminal" package:
// func IsTerminal(fd int) bool
//
// diagInfo, _ := w.Stat();
// errInfo, _ := os.Stderr.Stat()
// if (diagInfo.Mode() & os.ModeCharDevice) == 0 && (errInfo.Mode() & os.ModeCharDevice) != 0 {
// // w is not directed to a terminal, but stderr is a terminal
// fmt.Fprintf(os.Stderr, format, args...)
// }
}
func main() {
parse_args()
if print_help {
show_help()
}
if print_version {
show_version()
}
if exit_early {
// Go ought to have a ternary operator, but doesn't. Sigh.
if bad_args {
os.Exit(1)
} else {
os.Exit(0)
}
}
// FIX MINOR: add support for -V and --version options; automate updating the version string if that is somehow possible
fset, f, err := parse_file(input_filepath)
if err != nil {
panic(err)
os.Exit(1)
}
package_name,
simple_typedefs, enum_typedefs, const_groups, struct_typedefs, struct_field_typedefs,
simple_typedef_nodes, enum_typedef_nodes, const_group_nodes, struct_typedef_nodes,
precomputed_const_values, other_headers,
err := process_parse_nodes(fset, f)
if err != nil {
panic(err)
os.Exit(1)
}
final_type_order, err := topologically_sort_nodes(
package_name,
simple_typedefs, enum_typedefs, const_groups, struct_typedefs,
simple_typedef_nodes, enum_typedef_nodes, const_group_nodes, struct_typedef_nodes,
)
if err != nil {
panic(err)
os.Exit(1)
}
pointer_base_types, pointer_list_base_types, simple_list_base_types, list_base_types, key_value_pair_types,
struct_fields, struct_field_Go_packages, struct_field_Go_types,
struct_field_foreign_C_types, struct_field_C_types, struct_field_tags, generated_C_code,
err := print_type_declarations(
other_headers,
package_name,
final_type_order,
simple_typedefs, enum_typedefs, const_groups, struct_typedefs, struct_field_typedefs,
simple_typedef_nodes, enum_typedef_nodes, const_group_nodes, struct_typedef_nodes,
precomputed_const_values,
)
if err != nil {
panic(err)
os.Exit(1)
}
err = print_type_conversions(
generated_C_code,
package_name,
final_type_order, pointer_base_types, pointer_list_base_types, simple_list_base_types, list_base_types, key_value_pair_types,
simple_typedefs, enum_typedefs, const_groups, struct_typedefs,
simple_typedef_nodes, enum_typedef_nodes, const_group_nodes, struct_typedef_nodes,
struct_fields, struct_field_Go_packages, struct_field_Go_types,
struct_field_foreign_C_types, struct_field_C_types, struct_field_tags,
)
if err != nil {
panic(err)
os.Exit(1)
}
os.Exit(0)
}
// A routine whose output is to be used in debug messages, to precisely
// identify the source-code origin of the debug message.
func file_line() string {
var s string
if _, file_path, line_number, ok := runtime.Caller(1); ok {
// We get back the full absolute path for the file_path.
// That's much more than we need, so we extract the file
// basename and use that instead.
path_components := strings.Split(file_path, "/")
base_name := path_components[len(path_components)-1]
s = fmt.Sprintf("%s:%d", base_name, line_number)
} else {
s = ""
}
return s
}
// Routine to parse the file.
func parse_file(filepath string) (*token.FileSet, *ast.File, error) {
fset := token.NewFileSet() // positions are relative to fset
// mode := parser.ParseComments | parser.Trace | parser.DeclarationErrors
mode := parser.ParseComments | parser.DeclarationErrors
// Parse the specified file.
f, err := parser.ParseFile(fset, filepath, nil, mode)
if err != nil {
fmt.Fprintf(diag_file, "found Go-syntax parsing error in file %s: %s\n", filepath, err)
return nil, nil, err
}
return fset, f, nil
}
// FIX LATER: We could probably use a certain amount of refactoring, both to factor out similar
// code blocks and to allow for a certain degree of potential recursion in type declarations.
//
// FIX MAJOR: Make sure we test the following types separately:
//
// "foo"
// "*foo"
// "[]foo"
// "**foo"
// "*[]foo"
// "[]*foo"
// "[][]foo"
// "*[]*foo"
//
// We only track file-level (i.e., package-level) typedefs, and consts. We don't track signatures
// for generated functions because we expect that any topological sorting that would benefit from
// such tracking will be obviated by instead just putting all the necessary function declarations
// in a header file, where all the declarations will come ahead of the code that needs them.
//
// Here are the forms of the principal returned element-type maps:
//
// simple_typedefs map[ typedef_name string] typedef_type string
// enum_typedefs map[ enum_name string] enum_type string
// const_groups map[const_group_name string]constant_type string
// struct_typedefs map[ struct_name string] []field_type string
// struct_field_typedefs map[ struct_name string]map[field_name string]field_typedef string
//
// Since individual const groups are all anonymous in the Go syntax, we make up such names simply so
// we can coordinate access to multiple data structures that need to refer to the same const group.
// For simple uniqueness and to provide easy traceability, we will use the stringified content of the
// "const" keyword's parse-node "TokPos" field, such as "transit.go:21:1". (We could do the Go thing
// and just use the TokPos field directly as the map key, even if it is a structure rather than some
// simple datatype, but that would make it harder to track in development/diagnostic output which const
// group is being referred to.)
//
// Similar maps, using the same keys, are used to find the top-level parse node for each respective object:
//
// simple_typedef_nodes map[ typedef_name string]decl_node *ast.GenDecl
// enum_typedef_nodes map[ enum_name string]decl_node *ast.GenDecl
// const_group_nodes map[const_group_name string]decl_node *ast.GenDecl
// struct_typedef_nodes map[ struct_name string]decl_node *ast.GenDecl
//
func process_parse_nodes(
fset *token.FileSet,
f *ast.File,
) (
package_name string,
simple_typedefs map[string]string,
enum_typedefs map[string]string,
const_groups map[string]string,
struct_typedefs map[string][]string, // list of unique simplified types of the fields
struct_field_typedefs map[string]map[string]string,
simple_typedef_nodes map[string]*ast.GenDecl,
enum_typedef_nodes map[string]*ast.GenDecl,
const_group_nodes map[string]*ast.GenDecl,
struct_typedef_nodes map[string]*ast.GenDecl,
precomputed_const_values map[string]int64,
other_headers string,
err error,
) {
// FIX MINOR:
// Having this function in play turns out to be somewhat less than completely desirable,
// because the simple error message does not include all the failure-coordinate data that
// would have been printed by allowing the panic to proceed without interception.
defer func() {
if false {
if exception := recover(); exception != nil {
err = fmt.Errorf("internal error: %v", exception)
if print_diagnostics {
fmt.Fprintln(diag_file, err)
}
if print_errors {
fmt.Println(err)
}
}
}
}()
// struct_field_typedefs[struct_name][field_name] = full_field_typedef
// (Note that this data structure loses info about the ordering of the fields
// in any given struct, but that is fine for the uses we will make of this map.)
struct_field_typedefs = map[string]map[string]string{}
// package name
// f.Name *Ident
//
// package top-level declarations, or nil
// f.Decls []Decl
simple_typedefs = map[string]string{}
enum_typedefs = map[string]string{}
const_groups = map[string]string{}
struct_typedefs = map[string][]string{}
simple_typedef_nodes = map[string]*ast.GenDecl{}
enum_typedef_nodes = map[string]*ast.GenDecl{}
const_group_nodes = map[string]*ast.GenDecl{}
struct_typedef_nodes = map[string]*ast.GenDecl{}
precomputed_const_values = map[string]int64{}
// Print the package name.
package_name = f.Name.Name // from the "package" declaration inside the file
if print_diagnostics {
fmt.Fprintln(diag_file, "=== Package:")
fmt.Fprintln(diag_file, package_name)
}
// Print the file's imports.
if print_diagnostics {
fmt.Fprintln(diag_file, "=== Imports:")
}
// special_package_prefix := regexp.MustCompile(`^github\.com/gwos/tcg/([^/]+)$`)
special_package_prefix := regexp.MustCompile(`^github\.com/gwos/.*/([^/]+)$`)
include_headers := []string{}
for _, s := range f.Imports {
if print_diagnostics {
fmt.Fprintln(diag_file, s.Path.Value)
}
pkg := strings.ReplaceAll(s.Path.Value, "\"", "")
// We have a special cut-down version of Go's time/time.go file that we convert, and
// some of the Go application code that imports the official "time" package will in
// its converted form need to reference the result of converting our cut-down code.
if pkg == "time" {
include_headers = append(include_headers, fmt.Sprintf(`#include "%s.h"`, "time"))
}
// In general, we need to handle cross-package references in the converted Go application code.
// That said, references to the "logper" package we supply do not need any such special handling,
// because we won't actually be creating conversion routines for any datatypes in that package.
// The special-case handling here of that package prevents a #include "logper.h" directive from
// being emitted, inasmuch as that header will never be available.
special_package := special_package_prefix.FindStringSubmatch(pkg)
if special_package != nil && special_package[1] != "logper" {
include_headers = append(include_headers, fmt.Sprintf(`#include "%s.h"`, special_package[1]))
}
}
other_headers = strings.Join(include_headers, "\n")
// Print the file's documentation.
// It only prints the leading package doc, not function comments.
// For that, one needs to dig deeper (see below).
// FIX MAJOR: This is not stripping the leading "//" from comment lines.
if print_diagnostics {
fmt.Fprintln(diag_file, "=== Package Documentation:")
if f.Doc != nil {
for _, doc := range f.Doc.List {
fmt.Fprintln(diag_file, doc.Text)
}
}
}
if print_diagnostics {
fmt.Fprintln(diag_file, "=== Declarations:")
}
// Print the file-level declarations. This conveniently ignores declarations within functions,
// which we don't care about for our purposes.
panic_message := ""
node_loop:
for _, file_decl := range f.Decls {
if print_diagnostics {
// fmt.Fprintln(diag_file, d) // "&{<nil> <nil> parse_file 0xc000093660 0xc00007abd0}" and other forms
}
if func_decl, ok := file_decl.(*ast.FuncDecl); ok {
if print_diagnostics {
fmt.Fprintf(diag_file, "--- function name: %v\n", func_decl.Name.Name)
if func_decl.Doc != nil {
fmt.Fprintln(diag_file, "--- function documentation:")
// FIX MAJOR: This is not stripping the leading "//" from comment lines.
for _, doc := range func_decl.Doc.List {
fmt.Fprintln(diag_file, doc.Text)
}
}
}
}
if gen_decl, ok := file_decl.(*ast.GenDecl); ok {
if gen_decl.Tok == token.TYPE {
for _, spec := range gen_decl.Specs {
// I'm just assuming that spec.(*ast.TypeSpec).Type is of type *ast.Ident here in all cases.
// If that turns out not to be true, we'll have to fill in other cases.
if type_ident, ok := spec.(*ast.TypeSpec).Type.(*ast.Ident); ok {
if print_diagnostics {
fmt.Fprintf(diag_file, "--- simple type declaration name and type: %v %v\n", spec.(*ast.TypeSpec).Name.Name, type_ident.Name)
}
simple_typedefs[spec.(*ast.TypeSpec).Name.Name] = type_ident.Name
simple_typedef_nodes[spec.(*ast.TypeSpec).Name.Name] = gen_decl
} else if type_struct, ok := spec.(*ast.TypeSpec).Type.(*ast.StructType); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, "--- struct type: %#v\n", type_struct)
fmt.Fprintf(diag_file, "--- struct type declaration name: %v\n", spec.(*ast.TypeSpec).Name.Name)
}
struct_typedefs[spec.(*ast.TypeSpec).Name.Name] = nil
// FIX MINOR: I'm not yet sure if this is correct (though it seems to be working).
struct_field_typedefs[spec.(*ast.TypeSpec).Name.Name] = map[string]string{}
// fiX QUICK: drop the extra commented-out code here
// struct_typedefs[spec.(*ast.TypeSpec).Name.Name] = []string{nil}
// struct_typedefs[spec.(*ast.TypeSpec).Name.Name] = []string{}
struct_typedef_nodes[spec.(*ast.TypeSpec).Name.Name] = gen_decl
if type_struct.Incomplete {
// I'm not sure when this condition might be true, so let's alarm on it if we encounter it
// just to make sure we're not overlooking anything.
if print_diagnostics {
fmt.Fprintf(diag_file, " --- The list of fields is incomplete.\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
for _, field := range type_struct.Fields.List {
// FIX MAJOR: Add support for the .Doc and .Comment attributes as well.
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- field: %#v\n", field)
}
// Field elements to process:
// .Doc *ast.CommentGroup // may be nil
// .Names []*ast.Ident
if field.Names == nil {
// Here, we have an anonymous field, such as occurs with Go's structure embedding. Since
// that won't do in C, we autovivify a field name from the field type, similar to how that
// is done implicitly in Go itself but generally appending a small string to guarantee that
// there will be no confusion in C between the field name and the type name.
if type_ident, ok := field.Type.(*ast.Ident); ok {
// Old construction: just accept that we have a missing field name.
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", "(none)", type_ident.Name)
}
// New construction: autovivify a sensible field name.
name_ident := new(ast.Ident)
// Testing shows I was wrong; modern C can handle having a variable or struct field named
// the same as a struct typedef. So to keep things simple, we don't append an underscore
// to type_ident.Name here.
name_ident.Name = type_ident.Name
field.Names = append(field.Names, name_ident)
} else if type_starexpr, ok := field.Type.(*ast.StarExpr); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", "(none)", type_starexpr)
}
if type_ident, ok := type_starexpr.X.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and StarExpr type: %#v %#v\n", name.Name, type_ident.Name)
}
name_ident := new(ast.Ident)
name_ident.Name = type_ident.Name + "_ptr_"
field.Names = append(field.Names, name_ident)
} else if type_selectorexpr, ok := type_starexpr.X.(*ast.SelectorExpr); ok {
/*
var x_type_ident *ast.Ident
var ok bool
if x_type_ident, ok = type_selectorexpr.X.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and SelectorExpr X: %#v %#v\n", name.Name, x_type_ident.Name)
// fmt.Fprintf(diag_file, " --- struct field SelectorExpr X: %#v\n", x_type_ident.Name)
}
} else {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, found unexpected field.Type.X type: %T\n",
file_line(), type_selectorexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
panic_message = "aborting due to previous errors"
break node_loop
}
*/
if type_selectorexpr.Sel == nil {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, struct field Type Sel field is unexpectedly nil\n", file_line())
panic_message = "aborting due to previous errors"
break node_loop
}
name_ident := new(ast.Ident)
// We used to append an underscore in this construction of name_ident.Name, but we
// are backing off from that until and unless we find it to actually be necessary.
// (The backoff is not yet done, pending testing.)
//
// We logically ought to include the x_type_ident.Name as the first part of this constructed
// field name to totally disambiguate it, but for now we are dropping that out. This improves
// our ability to identify whether a given field should be exported in JSON, by making the
// selector name (which is what we believe Go will look for when deciding on the effective
// name of the field, and thus for deciding whether the field is exported) be visible at
// the start of the field name. That way, we can just check the first rune for uppercase
// just as Go would. However, without having the x_type_ident.Name component as part of the
// name, we risk generating a field-name conflict, which would happen if we had two identical
// type_selectorexpr.Sel.Name names in the same structure originating from different packages.
// If that happens and we therefore need to put the x_type_ident.Name back into the field name,
// we could do so in some later part of the field name, even though that would look a bit ugly.
//
// name_ident.Name = x_type_ident.Name + "_" + type_selectorexpr.Sel.Name + "_ptr_"
//
name_ident.Name = type_selectorexpr.Sel.Name + "_ptr_"
if print_diagnostics {
// fmt.Fprintf(diag_file, " ==> manufactured field name: %s\n", name_ident.Name)
}
field.Names = append(field.Names, name_ident)
} else {
//
// . . . . . . . List: []*ast.Field (len = 1) {
// . . . . . . . . 0: *ast.Field {
// . . . . . . . . . Type: *ast.StarExpr {
// . . . . . . . . . . Star: transit.go:404:2
// . . . . . . . . . . X: *ast.SelectorExpr {
// . . . . . . . . . . . X: *ast.Ident {
// . . . . . . . . . . . . NamePos: transit.go:404:3
// . . . . . . . . . . . . Name: "setup"
// . . . . . . . . . . . }
// . . . . . . . . . . . Sel: *ast.Ident {
// . . . . . . . . . . . . NamePos: transit.go:404:10
// . . . . . . . . . . . . Name: "Config"
// . . . . . . . . . . . }
// . . . . . . . . . . }
// . . . . . . . . . }
// . . . . . . . . }
// . . . . . . . }
//
// The type of type_starexpr.X is a *ast.SelectorExpr, and that occurs within a field of type *ast.StarExpr .
// So once we figure out the field name we will manufacture for type_starexpr.X, we will append "_ptr_" to that name.
//
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, found unexpected field.Type.X type: %T\n",
file_line(), type_starexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
} else if type_selectorexpr, ok := field.Type.(*ast.SelectorExpr); ok {
/*
var x_type_ident *ast.Ident
var ok bool
if x_type_ident, ok = type_selectorexpr.X.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and SelectorExpr X: %#v %#v\n", name.Name, x_type_ident.Name)
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, found unexpected field.Type.X type: %T\n",
file_line(), type_selectorexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
*/
if type_selectorexpr.Sel == nil {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, struct field Type Sel field is unexpectedly nil\n", file_line())
}
panic_message = "aborting due to previous errors"
break node_loop
}
name_ident := new(ast.Ident)
// We used to append an underscore in this construction of name_ident.Name, but we
// are backing off from that until and unless we find it to actually be necessary.
// (The backoff is not yet done, pending testing.)
//
// We logically ought to include the x_type_ident.Name as the first part of this constructed
// field name to totally disambiguate it, but for now we are dropping that out. This improves
// our ability to identify whether a given field should be exported in JSON, by making the
// selector name (which is what we believe Go will look for when deciding on the effective
// name of the field, and thus for deciding whether the field is exported) be visible at
// the start of the field name. That way, we can just check the first rune for uppercase
// just as Go would. However, without having the x_type_ident.Name component as part of the
// name, we risk generating a field-name conflict, which would happen if we had two identical
// type_selectorexpr.Sel.Name names in the same structure originating from different packages.
// If that happens and we therefore need to put the x_type_ident.Name back into the field name,
// we could do so in some later part of the field name, even though that would look a bit ugly.
//
// name_ident.Name = x_type_ident.Name + "_" + type_selectorexpr.Sel.Name + "_"
//
name_ident.Name = type_selectorexpr.Sel.Name + "_"
field.Names = append(field.Names, name_ident)
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: when autovivifying at %s, found unexpected field.Type type: %T\n", file_line(), field.Type)
fmt.Fprintf(diag_file, "ERROR: struct field Type field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
}
for _, name := range field.Names {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- field name: %#v\n", name)
}
if name.Name != "" && !unicode.IsUpper([]rune(name.Name)[0]) {
if print_diagnostics {
fmt.Fprintf(diag_file, " --- skipping uncapitalized struct field name: %#v\n", name.Name)
}
continue
}
var field_type_name string
if type_ident, ok := field.Type.(*ast.Ident); ok {
field_type_name = type_ident.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", name.Name, field_type_name)
}
} else if type_starexpr, ok := field.Type.(*ast.StarExpr); ok {
if type_ident, ok := type_starexpr.X.(*ast.Ident); ok {
field_type_name = "*" + type_ident.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and StarExpr type: %#v %#v\n", name.Name, field_type_name)
}
} else if type_array, ok := type_starexpr.X.(*ast.ArrayType); ok {
var array_type_ident *ast.Ident
// A nil type_array.Len means it's a slice type.
if type_array.Len != nil {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, a non-nil value for a StarExpr array-type Len is not yet handled (%#v)\n",
file_line(), type_array.Len)
}
panic_message = "aborting due to previous errors"
break node_loop
}
if array_type_ident, ok = type_array.Elt.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field Type X Elt array element ident %#v\n", array_type_ident)
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.X.Elt type: %T\n", file_line(), type_array.Elt)
fmt.Fprintf(diag_file, "ERROR: struct field Type X Elt field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
field_type_name = "*[]" + array_type_ident.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", name.Name, field_type_name)
}
} else if type_selectorexpr, ok := type_starexpr.X.(*ast.SelectorExpr); ok {
var x_type_ident *ast.Ident
var ok bool
if x_type_ident, ok = type_selectorexpr.X.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and SelectorExpr X: %#v %#v\n", name.Name, x_type_ident.Name)
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.X type: %T\n", file_line(), type_selectorexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
if type_selectorexpr.Sel == nil {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, struct field Type Sel field is unexpectedly nil\n", file_line())
}
panic_message = "aborting due to previous errors"
break node_loop
}
// FIX MINOR: This may need work to fully and correctly reflect the complete selector.
field_type_name = "*" + x_type_ident.Name + "." + type_selectorexpr.Sel.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v *%v.%v\n", name.Name, x_type_ident.Name, field_type_name)
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.X type: %T\n", file_line(), type_starexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
} else if type_array, ok := field.Type.(*ast.ArrayType); ok {
// A nil type_array.Len means it's a slice type.
if type_array.Len != nil {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, a non-nil value for an array-type Len is not yet handled (%#v)\n",
file_line(), type_array.Len)
}
panic_message = "aborting due to previous errors"
break node_loop
}
if type_ident, ok := type_array.Elt.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- array element ident %#v\n", type_ident)
}
field_type_name = "[]" + type_ident.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", name.Name, field_type_name)
}
} else if type_starexpr, ok := type_array.Elt.(*ast.StarExpr); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- array element starexpr %#v\n", type_starexpr)
}
if type_ident, ok := type_starexpr.X.(*ast.Ident); ok {
field_type_name = "[]*" + type_ident.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and interior StarExpr type: %#v %#v\n", name.Name, field_type_name)
}
} else if type_array, ok := type_starexpr.X.(*ast.ArrayType); ok {
if print_diagnostics {
fmt.Fprintf(diag_file, " --- UNEXPECTED interior field.Type.X Type *ast.ArrayType %#v\n", type_array)
}
// FIX MAJOR: Handle this case.
panic_message = "aborting due to previous errors"
break node_loop
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected interior field.Type.X type: %T\n", file_line(), type_starexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field interior Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.Elt type: %T\n", file_line(), type_array.Elt)
fmt.Fprintf(diag_file, "ERROR: struct field Type Elt field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
} else if type_map, ok := field.Type.(*ast.MapType); ok {
var key_type_ident *ast.Ident
var value_type_ident *ast.Ident
var value_type_interface *ast.InterfaceType
var ok bool
if key_type_ident, ok = type_map.Key.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- map Key Ident %#v\n", key_type_ident)
}
if value_type_ident, ok = type_map.Value.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- map Value Ident %#v\n", value_type_ident)
}
field_type_name = "map[" + key_type_ident.Name + "]" + value_type_ident.Name
} else if value_type_interface, ok = type_map.Value.(*ast.InterfaceType); ok {
if print_diagnostics {
// Suppress Go's "unused variable" noisiness, when the following printed output is commented out.
value_type_interface = value_type_interface
// fmt.Fprintf(diag_file, " --- map Value InterfaceType %#v\n", value_type_interface)
}
// Logically, we would want to declare the field type name to refer to some sort of
// generic interface object, and then generate code to handle all the various types of
// objects (floats, slices, deep maps, slices of deep maps, etc.) one might encounter as
// instances of such an interface. But that is far too complex for present needs. Those
// needs currently are just to handle a structure in the Go code that has an interface
// as the value of one of its declared map values, in a structure that currently has no
// business being transferred between Go code and C code in the first place. So instead,
// we punt; we just abort this iteration of the loop, which suppresses recognition of the
// affected structure field. This will create a hiccup in later code (see "COMPENSATORY
// CONTINUE #1" below), which we handle there in a similar manner, by simply skipping further
// processing of that loop iteration as well. The net effect is that said field will not
// appear in the C structure at all, and it will be neither serialized nor deserialized.
// But that won't matter, because we don't expect to ever exchange that structure with
// any Go code anyway. This is just a workaround to avoid having to make larger changes.
// field_type_name = "map[" + key_type_ident.Name + "]" + "interface{...}"
continue
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.Value type: %T\n", file_line(), type_map.Value)
fmt.Fprintf(diag_file, "ERROR: struct field Type Value field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
// FIX QUICK: This needs work to fully reflect the map structure; perhaps the new statements now do so.
// field_type_name = value_type_ident.Name
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and type: %#v map[%#v]%#v\n", name.Name, key_type_ident.Name, field_type_name)
}
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", name.Name, field_type_name)
}
} else if key_type_starexpr, ok := type_map.Key.(*ast.StarExpr); ok {
//
// We land in this case when we have this kind of struct field as input:
//
// Re map[*regexp.Regexp][]byte
//
// and we get this error message as a consequence:
//
// found unexpected field.Type.Key type: *ast.StarExpr
//
// For now, the conversion processing has been rejiggered so we are
// no longer processing a Go file containing such a struct definition.
// So for the time being, we are not extending the conversion tool to
// cover that case, because it looks like there could be significant
// extra complexity if and when we want to process a *regexp.Regexp
// element during such conversions.
//
key_type_starexpr = key_type_starexpr
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.Key type: %T\n", file_line(), type_map.Key)
fmt.Fprintf(diag_file, "ERROR: struct field Type Key field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.Key type: %T\n", file_line(), type_map.Key)
fmt.Fprintf(diag_file, "ERROR: struct field Type Key field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
} else if type_selectorexpr, ok := field.Type.(*ast.SelectorExpr); ok {
var x_type_ident *ast.Ident
var ok bool
if x_type_ident, ok = type_selectorexpr.X.(*ast.Ident); ok {
if print_diagnostics {
// fmt.Fprintf(diag_file, " --- struct field name and SelectorExpr X: %#v %#v\n", name.Name, x_type_ident.Name)
}
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type.X type: %T\n", file_line(), type_selectorexpr.X)
fmt.Fprintf(diag_file, "ERROR: struct field Type.X field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
if type_selectorexpr.Sel == nil {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, struct field Type Sel field is unexpectedly nil\n", file_line())
}
panic_message = "aborting due to previous errors"
break node_loop
}
// FIX QUICK: This may need work to fully and correctly reflect the complete selector.
field_type_name = x_type_ident.Name + "." + type_selectorexpr.Sel.Name
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field name and type: %#v %v.%v\n", name.Name, x_type_ident.Name, field_type_name)
}
} else if type_interfacetype, ok := field.Type.(*ast.InterfaceType); ok {
if print_diagnostics {
// Suppress Go's "declared but not used" noisiness.
type_interfacetype = type_interfacetype
// We could analyze the declared type_interfacetype.Methods part of the abstract syntax tree
// in detail and include the interface methods within the braces in the printed field_type_name,
// but for now the complicated effort to walk the AST tree details to do so is not warranted.
// field_type_name = "interface{...}"
// fmt.Fprintf(diag_file, " --- struct field name and type: %#v %#v\n", name.Name, field_type_name)
}
// See the comment above for a map[...]interface{...} type as to why we are skipping this field --
// the data structures using an interface{} field will not be used in C/Go data transfers. See the
// companion "COMPENSATORY CONTINUE #2" below as well.
continue
} else {
if print_diagnostics {
fmt.Fprintf(diag_file, "ERROR: at %s, found unexpected field.Type type: %T\n", file_line(), field.Type)
fmt.Fprintf(diag_file, "ERROR: struct field Type field is not of a recognized type\n")
}
panic_message = "aborting due to previous errors"
break node_loop
}
struct_typedefs[spec.(*ast.TypeSpec).Name.Name] = append(struct_typedefs[spec.(*ast.TypeSpec).Name.Name], field_type_name)
struct_field_typedefs[spec.(*ast.TypeSpec).Name.Name][name.Name] = field_type_name
if field.Tag != nil {
if print_diagnostics {
fmt.Fprintf(diag_file, " --- struct field tag Value: %#v\n", field.Tag.Value)
}
}
}
// .Type *ast.Ident
// .Tag *ast.BasicLit // may be nil
// .Comment *ast.CommentGroup // likely nil
}
} else if type_interface, ok := spec.(*ast.TypeSpec).Type.(*ast.InterfaceType); ok {
if print_diagnostics {
fmt.Fprintf(diag_file, "FIX MAJOR: Handle this next case (where the type is *ast.InterfaceType)\n")
}
// This is an interface definition, which perhaps mostly declares methods, not simple types,
// enumerations, constants, or structs. Verify that assumption, and perhaps extend this case
// to process whatever it might need to. We might, for instance, at least need to emit function
// signatures, even if we don't generate full function bodies.
if print_diagnostics {
fmt.Fprintf(diag_file, "--- interface type declaration name and type: %v %#v\n", spec.(*ast.TypeSpec).Name.Name, type_interface)