Skip to content

Commit

Permalink
[build] Share COMPILERS_VARIANTS matrix among 3 C++ builds
Browse files Browse the repository at this point in the history
- osh_eval
- mycpp-unit
- mycpp-examples

We are going to make the more consistent to:

- Add the .d dependencies for mycpp, fixing the incremental build
  problem, fixing #1236.
- Make the GC variant of the bin/osh_eval build.  This can share the
  same objects!
  - In the next change, they will be in _build/obj-mycpp, and only
    shared among those two.  Because of -D LEAKY_BINDINGS and so forth.
  • Loading branch information
Andy C committed Jul 23, 2022
1 parent e002d4c commit 4b22991
Show file tree
Hide file tree
Showing 5 changed files with 108 additions and 95 deletions.
139 changes: 77 additions & 62 deletions cpp/NINJA_subgraph.py
Expand Up @@ -71,7 +71,8 @@
import os
import sys

# from mycpp.NINJA_subgraph import GC_RUNTIME
# TODO: could use GC_RUNTIME
from mycpp import NINJA_subgraph as mycpp_subgraph


def log(msg, *args):
Expand Down Expand Up @@ -115,13 +116,21 @@ def log(msg, *args):
]


# -D DUMB_ALLOC: not sure why but only the _bin/cxx-opt/osh_eval binary needs it?
# -D NO_GC_HACK: Avoid memset(). TODO: remove this hack!
# -D OSH_EVAL: hack for leaky_osh_eval_stubs.h
# -D NO_GC_HACK: Avoid memset(). -- rename GC_NO_MEMSET?
# - only applies to gc_heap.h in Space::Clear()
# -D LEAKY_BINDINGS: for QSN, which is used by the ASDL runtime
# TODO: use .leaky variant
# - _bin/cxx-leaky/osh_eval -- this means it's optimized then?
# - we still want to be able to debug it
# - $compiler-$variant-$allocator triple?
# -D DUMB_ALLOC: a speed optimization. Should be obsolete with garbage
# collector. Used for parser benchmarks.
# - can this also be controlled with the .leaky variant?

# leakyopt, leakyasan -- I guess this is good for tests

# single quoted in Ninja/shell syntax
OSH_EVAL_FLAGS_STR = "'-D DUMB_ALLOC -D NO_GC_HACK -D OSH_EVAL -D LEAKY_BINDINGS'"
OSH_EVAL_FLAGS_STR = "'-D DUMB_ALLOC -D NO_GC_HACK -D LEAKY_BINDINGS'"


def NinjaGraph(n):
Expand Down Expand Up @@ -180,83 +189,89 @@ def NinjaGraph(n):

n.newline()

for compiler in ['cxx', 'clang']:
for variant in [
'dbg', 'opt', 'asan', 'alloclog', 'malloc', 'uftrace',
# leave out tcmalloc since it requires system libs to be installed
# 'tcmalloc'
]:
COMPILERS_VARIANTS = mycpp_subgraph.COMPILERS_VARIANTS + [
# note: these could be clang too
('cxx', 'alloclog'),
('cxx', 'malloc'),
('cxx', 'uftrace'),

ninja_vars = [('compiler', compiler), ('variant', variant), ('more_cxx_flags', OSH_EVAL_FLAGS_STR)]
# leave out tcmalloc since it requires system libs to be installed
# 'tcmalloc'
#('cxx', 'tcmalloc')
]

sources = DEPS_CC + OLD_RUNTIME
for compiler, variant in COMPILERS_VARIANTS:

#
# See how much input we're feeding to the compiler. Test C++ template
# explosion, e.g. <unordered_map>
#
ninja_vars = [('compiler', compiler), ('variant', variant), ('more_cxx_flags', OSH_EVAL_FLAGS_STR)]

preprocessed = []
for src in sources:
# e.g. _build/obj/dbg/posix.o
base_name, _ = os.path.splitext(os.path.basename(src))
sources = DEPS_CC + OLD_RUNTIME

pre = '_build/preprocessed/%s-%s/%s.cc' % (compiler, variant, base_name)
preprocessed.append(pre)
#
# See how much input we're feeding to the compiler. Test C++ template
# explosion, e.g. <unordered_map>
#

n.build(pre, 'preprocess', [src], variables=ninja_vars)
n.newline()
preprocessed = []
for src in sources:
# e.g. _build/obj/dbg/posix.o
base_name, _ = os.path.splitext(os.path.basename(src))

pre = '_build/preprocessed/%s-%s/%s.cc' % (compiler, variant, base_name)
preprocessed.append(pre)

n.build('_build/preprocessed/%s-%s.txt' % (compiler, variant),
'line_count', preprocessed, variables=ninja_vars)
n.build(pre, 'preprocess', [src], variables=ninja_vars)
n.newline()

#
# TOGETHER
#
n.build('_build/preprocessed/%s-%s.txt' % (compiler, variant),
'line_count', preprocessed, variables=ninja_vars)
n.newline()

bin_together = '_bin/%s-%s-together/osh_eval' % (compiler, variant)
binaries.append(bin_together)
#
# TOGETHER
#

n.build(bin_together, 'compile_and_link',
sources, variables=ninja_vars)
n.newline()
bin_together = '_bin/%s-%s-together/osh_eval' % (compiler, variant)
binaries.append(bin_together)

#
# SEPARATE: Compile objects
#
n.build(bin_together, 'compile_and_link',
sources, variables=ninja_vars)
n.newline()

objects = []
for src in sources:
# e.g. _build/obj/dbg/posix.o
base_name, _ = os.path.splitext(os.path.basename(src))
#
# SEPARATE: Compile objects
#

obj = '_build/obj/%s-%s/%s.o' % (compiler, variant, base_name)
objects.append(obj)
objects = []
for src in sources:
# e.g. _build/obj/dbg/posix.o
base_name, _ = os.path.splitext(os.path.basename(src))

n.build(obj, 'compile_one', [src], variables=ninja_vars)
n.newline()
obj = '_build/obj/%s-%s/%s.o' % (compiler, variant, base_name)
objects.append(obj)

bin_separate = '_bin/%s-%s/osh_eval' % (compiler, variant)
binaries.append(bin_separate)
n.build(obj, 'compile_one', [src], variables=ninja_vars)
n.newline()

#
# SEPARATE: Link objects into binary
#
bin_separate = '_bin/%s-%s/osh_eval' % (compiler, variant)
binaries.append(bin_separate)

link_vars = [('compiler', compiler), ('variant', variant)] # no CXX flags
n.build(bin_separate, 'link', objects, variables=link_vars)
n.newline()
#
# SEPARATE: Link objects into binary
#

# Strip the .opt binary
if variant == 'opt':
for b in [bin_together, bin_separate]:
stripped = b + '.stripped'
symbols = b + '.symbols'
n.build([stripped, symbols], 'strip', [b])
n.newline()
link_vars = [('compiler', compiler), ('variant', variant)] # no CXX flags
n.build(bin_separate, 'link', objects, variables=link_vars)
n.newline()

# Strip the .opt binary
if variant == 'opt':
for b in [bin_together, bin_separate]:
stripped = b + '.stripped'
symbols = b + '.symbols'
n.build([stripped, symbols], 'strip', [b])
n.newline()

binaries.append(stripped)
binaries.append(stripped)

n.default(['_bin/cxx-dbg/osh_eval'])

Expand Down
5 changes: 1 addition & 4 deletions cpp/leaky_preamble.h
Expand Up @@ -35,16 +35,13 @@ using id_kind_asdl::Kind_t;
#include "leaky_frontend_tdop.h"
#include "leaky_libc.h"
#include "leaky_osh.h"
#include "leaky_osh_eval_stubs.h"
#include "leaky_pgen2.h"
#include "leaky_pylib.h"
#include "leaky_stdlib.h"
#include "qsn.h"
#include "segfault_handler.h"

#ifdef OSH_EVAL
#include "leaky_osh_eval_stubs.h"
#endif

inline bool are_equal(id_kind_asdl::Kind left, id_kind_asdl::Kind right) {
return left == right;
}
Expand Down
1 change: 1 addition & 0 deletions cpp/test.sh
Expand Up @@ -134,6 +134,7 @@ unit() {

gc-binding-test '' gcevery
gc-binding-test '' leaky
# leakyasan?

# Has generated code
leaky-flag-spec-test '' ''
Expand Down
55 changes: 29 additions & 26 deletions mycpp/NINJA_subgraph.py
Expand Up @@ -20,8 +20,16 @@
Output Layout:
_bin/
_build/
# TODO: combine with obj/ after we get rid of -D LEAKY_BINDINGS -D
# NO_GC_HACK, etc.
obj-mycpp/
cxx-dbg/
cxx-gcevery/
cxx-opt/
clang-coverage/
_bin/
cxx-dbg/
mycpp-examples/
cgi
Expand All @@ -35,6 +43,8 @@
mycpp-unit/
gc_heap_test
clang-coverage/
_test/
gen-mycpp/ # rewrite
varargs_raw.cc
Expand Down Expand Up @@ -189,14 +199,29 @@ def ShouldSkipBenchmark(name):
'parse': [], # added dynamically from mycpp/examples/parse.translate.txt
}

# Unused. Could use mycpp/examples/parse.typecheck.txt
EXAMPLES_PY = {
'parse': [], # added dynamically
'parse': [],
}

EXAMPLES_CC = {
'parse': ['_test/asdl/expr_asdl.cc'],
}

COMPILERS_VARIANTS = [
# mainly for unit tests
('cxx', 'gcstats'),
('cxx', 'gcevery'),

('cxx', 'dbg'),
('cxx', 'opt'),
('cxx', 'asan'),
('cxx', 'ubsan'),

#('clang', 'asan'),
('clang', 'ubsan'),
('clang', 'coverage'),
]

def TranslatorSubgraph(n, translator, ex, to_compare, benchmark_tasks, phony):
raw = '_test/gen-%s/%s_raw.cc' % (translator, ex)
Expand All @@ -222,15 +247,7 @@ def TranslatorSubgraph(n, translator, ex, to_compare, benchmark_tasks, phony):
phony['pea-translate'].append(cc_src)

if translator == 'mycpp':
example_matrix = [
('cxx', 'gcevery'),
('cxx', 'asan'),
('cxx', 'ubsan'),
('cxx', 'opt'),

('clang', 'ubsan'), # Finds more bugs!
('clang', 'coverage'),
]
example_matrix = COMPILERS_VARIANTS
else:
example_matrix = [
('cxx', 'gcevery')
Expand Down Expand Up @@ -407,21 +424,7 @@ def NinjaGraph(n):
# assume names are unique
test_name = os.path.basename(test_path)

UNIT_TEST_MATRIX = [
('cxx', 'gcstats'),
('cxx', 'gcevery'),

# Clang and GCC have different implementations of ASAN and UBSAN
('cxx', 'asan'),
('cxx', 'ubsan'),

('clang', 'asan'),
('clang', 'ubsan'),

('clang', 'coverage'),
]

for (compiler, variant) in UNIT_TEST_MATRIX:
for (compiler, variant) in COMPILERS_VARIANTS:
b = '_bin/%s-%s/mycpp-unit/%s' % (compiler, variant, test_name)

main_cc = '%s.cc' % test_path
Expand Down
3 changes: 0 additions & 3 deletions mycpp/gc_heap_test.cc
Expand Up @@ -41,14 +41,12 @@ using gc_heap::RoundUp;
// Variables
using gc_heap::gHeap;


#ifdef GC_STATS
#define ASSERT_NUM_LIVE_OBJS(x) ASSERT_EQ_FMT((x), gHeap.num_live_objs_, "%d")
#else
#define ASSERT_NUM_LIVE_OBJS(x)
#endif


// Hm we're getting a warning because these aren't plain old data?
// https://stackoverflow.com/questions/1129894/why-cant-you-use-offsetof-on-non-pod-structures-in-c
// https://stackoverflow.com/questions/53850100/warning-offset-of-on-non-standard-layout-type-derivedclass
Expand Down Expand Up @@ -538,7 +536,6 @@ TEST slab_trace_test() {
gHeap.Collect();
ASSERT_NUM_LIVE_OBJS(0);


List<Str*>* strings = nullptr;
Str* tmp = nullptr;
StackRoots _roots({&strings, &tmp});
Expand Down

0 comments on commit 4b22991

Please sign in to comment.