Permalink
Browse files

Enhance various test harnesses to prepare for release .

- Preparing to publish a log of test/osh2oil.sh
- Add osh-to-bash ratio in elapsed time table in the osh-parser
  benchmark.
- Improve the unit test harness.
  - 'all' now runs all the tests and stops at the first failure.  No
    logging.
  - 'run-for-release' does the logging.  Still need to check for
    failure.
  - Start of HTML table
- Factor out common.R so I can use it for test harnesses too.

Other:

- Delete shell function to reproduce non-deterministic CPython bug.  It
  was copied to ~/git/scratch.
  • Loading branch information...
Andy Chu
Andy Chu committed Dec 21, 2017
1 parent 3c48518 commit 588c9fb3cd0bd49f3f0e91950eb058fe0ce55c46
Showing with 197 additions and 128 deletions.
  1. +55 −0 benchmarks/common.R
  2. +7 −48 benchmarks/report.R
  3. +7 −2 scripts/release.sh
  4. +1 −1 test/gold.sh
  5. +21 −1 test/osh2oil.sh
  6. +106 −76 test/unit.sh
View
@@ -0,0 +1,55 @@
#!/usr/bin/Rscript
#
# common.R - Shared R functions.
Log = function(fmt, ...) {
cat(sprintf(fmt, ...))
cat('\n')
}
# Same precision for all columns.
SamePrecision = function(precision = 1) {
return(function(column_name) {
precision
})
}
# Precision by column.
ColumnPrecision = function(precision_map, default = 1) {
return(function(column_name) {
p = precision_map[[column_name]]
if (is.null(p)) {
default
} else {
p
}
})
}
# Write a CSV file along with a schema.
#
# precision: list(column name -> integer precision)
writeCsv = function(table, prefix, precision_func = NULL) {
data_out_path = paste0(prefix, '.csv')
write.csv(table, data_out_path, row.names = F)
getFieldType = function(field_name) { typeof(table[[field_name]]) }
if (is.null(precision_func)) {
precision_func = function(column_name) { 1 }
}
types_list = lapply(names(table), getFieldType)
precision_list = lapply(names(table), precision_func)
print(precision_list)
schema = data_frame(
column_name = names(table),
type = as.character(types_list),
precision = as.character(precision_list)
)
schema_out_path = paste0(prefix, '.schema.csv')
write.csv(schema, schema_out_path, row.names = F)
}
View
@@ -9,6 +9,8 @@ library(dplyr)
library(tidyr)
library(stringr)
source('benchmarks/common.R')
options(stringsAsFactors = F)
Log = function(fmt, ...) {
@@ -34,51 +36,6 @@ benchmarkDataLink = function(subdir, name, suffix) {
subdir, name, suffix)
}
# Same precision for all columns.
SamePrecision = function(precision = 1) {
return(function(column_name) {
precision
})
}
# Precision by column.
ColumnPrecision = function(precision_map, default = 1) {
return(function(column_name) {
p = precision_map[[column_name]]
if (is.null(p)) {
default
} else {
p
}
})
}
# Write a CSV file along with a schema.
#
# precision: list(column name -> integer precision)
writeCsv = function(table, prefix, precision_func = NULL) {
data_out_path = paste0(prefix, '.csv')
write.csv(table, data_out_path, row.names = F)
getFieldType = function(field_name) { typeof(table[[field_name]]) }
if (is.null(precision_func)) {
precision_func = function(column_name) { 1 }
}
types_list = lapply(names(table), getFieldType)
precision_list = lapply(names(table), precision_func)
print(precision_list)
schema = data_frame(
column_name = names(table),
type = as.character(types_list),
precision = as.character(precision_list)
)
schema_out_path = paste0(prefix, '.schema.csv')
write.csv(schema, schema_out_path, row.names = F)
}
ParserReport = function(in_dir, out_dir) {
times = read.csv(file.path(in_dir, 'times.csv'))
lines = read.csv(file.path(in_dir, 'lines.csv'))
@@ -173,9 +130,10 @@ ParserReport = function(in_dir, out_dir) {
select(-c(lines_per_ms)) %>%
spread(key = shell_label, value = elapsed_ms) %>%
arrange(host_label, num_lines) %>%
mutate(filename = basename(path), filename_HREF = sourceUrl(path)) %>%
mutate(filename = basename(path), filename_HREF = sourceUrl(path),
osh_to_bash_ratio = `osh-ovm` / bash) %>%
select(c(host_label, bash, dash, mksh, zsh, `osh-ovm`, `osh-cpython`,
num_lines, filename, filename_HREF)) ->
osh_to_bash_ratio, num_lines, filename, filename_HREF)) ->
elapsed
Log('\n')
@@ -227,7 +185,8 @@ ParserReport = function(in_dir, out_dir) {
precision = ColumnPrecision(list(total_ms = 0)) # round to nearest millisecond
writeCsv(shell_summary, file.path(out_dir, 'summary'), precision)
precision = SamePrecision(0) # round to nearest millisecond
# Round to nearest millisecond, but the ratio has a decimal point.
precision = ColumnPrecision(list(osh_to_bash_ratio = 1), default = 0)
writeCsv(elapsed, file.path(out_dir, 'elapsed'), precision)
writeCsv(rate, file.path(out_dir, 'rate'))
View
@@ -54,6 +54,7 @@ log() {
# machine-lisa/
# wild.wwz/
# unit/
# osh2oil/
# gold/
# tarball/ # log of building and running the tarball?
# asan/ # spec tests or other?
@@ -82,7 +83,7 @@ log() {
remove-files() {
rm -r -f \
_devbuild _build _release \
_tmp/{spec,wild,unit} \
_tmp/{spec,wild,unit,osh2oil} \
_tmp/{osh-parser,osh-runtime,vm-baseline,oheap} \
_tmp/metrics \
_tmp/oil-tar-test
@@ -94,8 +95,8 @@ build-and-test() {
build/dev.sh all # for {libc,fastlex}.so, needed to crawl deps
# TODO: publish unit tests.
test/unit.sh all
test/osh2oil.sh run-for-release
build/prepare.sh configure
build/prepare.sh build-python
@@ -335,6 +336,10 @@ _link() {
compress() {
local root=$PWD/_release/VERSION/
# TODO:
#log "--- test/unit"
#log "--- test/osh2oil"
log "--- test/spec"
local out="$root/test/spec.wwz"
pushd _tmp/spec
View
@@ -116,7 +116,7 @@ declare() { _compare gold/declare.sh demo; }
scope() { _compare gold/scope.sh; }
all() {
# This one differs by timestamp
# FLAKY: This one differs by timestamp
version-text
comments
View
@@ -885,7 +885,7 @@ time {
OIL
}
all-passing() {
readonly -a PASSING=(
simple-command
more-env
line-breaks
@@ -919,6 +919,26 @@ all-passing() {
# Builtins
bracket-builtin
)
all-passing() {
for t in "${PASSING[@]}"; do
# fail calls 'exit 1' i this fails.
$t
echo "OK $t"
done
echo
echo "All osh2oil tests passed."
}
run-for-release() {
local out_dir=_tmp/osh2oil
mkdir -p $out_dir
all-passing | tee $out_dir/log.txt
echo "Wrote $out_dir/log.txt"
}
"$@"
Oops, something went wrong.

0 comments on commit 588c9fb

Please sign in to comment.