Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
101 changes: 101 additions & 0 deletions bench/src/sjsonnet/bench/EvaluatorBenchmark.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
package sjsonnet.bench

import org.openjdk.jmh.annotations.*
import org.openjdk.jmh.infra.*
import sjsonnet.*

import java.io.{ByteArrayOutputStream, OutputStream, PrintStream, StringWriter}
import java.util.concurrent.TimeUnit

/**
* A/B benchmark comparing old (instanceof chain) vs new (tag + tableswitch) evaluator.
*
* Runs the full interpret pipeline (parse → optimize → evaluate → materialize) for each benchmark
* file, isolating the evaluator difference by using the same Settings with only `useNewEvaluator`
* toggled.
*/
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
@Threads(1)
@Warmup(iterations = 15)
@Measurement(iterations = 20)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
class EvaluatorBenchmark {

@Param(
Array(
// cpp_suite — C++ jsonnet benchmarks
"bench/resources/cpp_suite/bench.01.jsonnet",
"bench/resources/cpp_suite/bench.02.jsonnet",
"bench/resources/cpp_suite/bench.03.jsonnet",
"bench/resources/cpp_suite/bench.04.jsonnet",
"bench/resources/cpp_suite/bench.06.jsonnet",
"bench/resources/cpp_suite/bench.08.jsonnet",
"bench/resources/cpp_suite/bench.09.jsonnet",
"bench/resources/cpp_suite/gen_big_object.jsonnet",
"bench/resources/cpp_suite/heavy_string_render.jsonnet",
"bench/resources/cpp_suite/large_string_join.jsonnet",
"bench/resources/cpp_suite/realistic1.jsonnet",
"bench/resources/cpp_suite/realistic2.jsonnet",
"bench/resources/cpp_suite/string_render_perf.jsonnet",
// go_suite — Go jsonnet builtins
"bench/resources/go_suite/base64_heavy.jsonnet",
"bench/resources/go_suite/base64_mega.jsonnet",
"bench/resources/go_suite/comparison.jsonnet",
"bench/resources/go_suite/comparison2.jsonnet",
"bench/resources/go_suite/foldl.jsonnet",
"bench/resources/go_suite/reverse.jsonnet",
"bench/resources/go_suite/substr.jsonnet",
// bug_suite
"bench/resources/bug_suite/assertions.jsonnet",
// sjsonnet_suite
"bench/resources/sjsonnet_suite/setDiff.jsonnet"
)
)
var path: String = _

private var wd: os.Path = _
private var filePath: OsPath = _
private var fileContent: String = _
private var jpaths: Seq[OsPath] = _

@Setup(Level.Trial)
def setup(): Unit = {
wd = sys.env.get("MILL_WORKSPACE_ROOT").map(os.Path(_)).getOrElse(os.pwd)
filePath = OsPath(wd / os.RelPath(path))
fileContent = os.read(wd / os.RelPath(path))
jpaths = Seq(OsPath(wd))
}

private def run(useNew: Boolean): String = {
val settings = new Settings(
useNewEvaluator = useNew,
maxStack = 100000
)
val interp = new Interpreter(
Map.empty[String, String],
Map.empty[String, String],
OsPath(wd),
importer = new SjsonnetMainBase.SimpleImporter(jpaths, None),
parseCache = new DefaultParseCache,
settings = settings
)
val writer = new StringWriter
val renderer = new Renderer(writer, indent = 3)
interp.interpret0(fileContent, filePath, renderer) match {
case Right(_) => writer.toString
case Left(e) => throw new RuntimeException(e)
}
}

@Benchmark
def oldEvaluator(bh: Blackhole): Unit = {
bh.consume(run(useNew = false))
}

@Benchmark
def newEvaluator(bh: Blackhole): Unit = {
bh.consume(run(useNew = true))
}
}
166 changes: 166 additions & 0 deletions bench/src/sjsonnet/bench/ExprTagProfile.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
package sjsonnet.bench

import sjsonnet.*

import java.io.StringWriter

/**
* Profile the frequency of each ExprTag in visitExpr calls across benchmark workloads. Run with:
* ./mill bench.runMain sjsonnet.bench.ExprTagProfile [file1.jsonnet file2.jsonnet ...]
*
* If no files are given, profiles ALL .jsonnet files under bench/resources/.
*/
object ExprTagProfile {

private val tagNames = Array(
"UNTAGGED", // 0
"ValidId", // 1
"BinaryOp", // 2
"Select", // 3
"Val.Literal", // 4
"Val.Func", // 5
"ApplyBuiltin0", // 6
"ApplyBuiltin1", // 7
"ApplyBuiltin2", // 8
"ApplyBuiltin3", // 9
"ApplyBuiltin4", // 10
"And", // 11
"Or", // 12
"UnaryOp", // 13
"Apply1", // 14
"Lookup", // 15
"Function", // 16
"LocalExpr", // 17
"Apply", // 18
"IfElse", // 19
"Apply3", // 20
"ObjBody.MemberList", // 21
"Apply2", // 22
"AssertExpr", // 23
"ApplyBuiltin", // 24
"Comp", // 25
"Arr", // 26
"SelectSuper", // 27
"LookupSuper", // 28
"InSuper", // 29
"ObjExtend", // 30
"ObjBody.ObjComp", // 31
"Slice", // 32
"Import", // 33
"Apply0", // 34
"ImportStr", // 35
"ImportBin", // 36
"Error" // 37
)

def main(args: Array[String]): Unit = {
val wd = sys.env.get("MILL_WORKSPACE_ROOT").map(os.Path(_)).getOrElse(os.pwd)
val benchRoot = wd / "bench" / "resources"

val files =
if (args.nonEmpty) args.map(os.RelPath(_)).toSeq
else
os.walk(benchRoot)
.filter(_.ext == "jsonnet")
.map(_.relativeTo(wd))
.sorted

val globalCounts = new Array[Long](40)
var globalTotal = 0L
val perFile = scala.collection.mutable.ArrayBuffer[(String, Long, Array[Long])]()

for (rel <- files) {
val counts = new Array[Long](40)
val filePath = OsPath(wd / rel)
val content =
try os.read(wd / rel)
catch { case _: Exception => System.err.println(s"SKIP (read error): $rel"); "" }
if (content.nonEmpty) {
val ok =
try {
val interp = new Interpreter(
Map.empty[String, String],
Map.empty[String, String],
OsPath(wd),
importer = new SjsonnetMainBase.SimpleImporter(
Seq(OsPath(wd), OsPath(wd / "bench"), OsPath(wd / "bench" / "resources")),
None
),
parseCache = new DefaultParseCache,
settings = new Settings(maxStack = 100000)
) {
override def createEvaluator(
resolver: CachedResolver,
extVars: String => Option[Expr],
wd: Path,
settings: Settings): Evaluator =
new Evaluator(resolver, extVars, wd, settings) {
override def visitExpr(e: Expr)(implicit scope: ValScope): Val = {
val t = e.tag & 0xff
if (t < counts.length) counts(t) += 1
super.visitExpr(e)
}
}
}
val writer = new StringWriter
val renderer = new Renderer(writer, indent = 3)
interp.interpret0(content, filePath, renderer) match {
case Right(_) => true
case Left(e) =>
System.err.println(s"ERROR: $rel: $e")
false
}
} catch {
case e: StackOverflowError =>
System.err.println(s"SKIP (StackOverflow): $rel")
false
case e: Exception =>
System.err.println(s"SKIP (${e.getClass.getSimpleName}): $rel")
false
}

val total = counts.sum
if (ok && total > 0) {
perFile += ((rel.toString, total, counts.clone()))
var i = 0
while (i < counts.length) {
globalCounts(i) += counts(i)
i += 1
}
globalTotal += total
}
}
}

// Per-file summary
println("\n" + "=" * 100)
println("PER-FILE SUMMARY")
println("=" * 100)
for ((file, total, counts) <- perFile.sortBy(-_._2)) {
val sorted = counts.zipWithIndex.filter(_._1 > 0).sortBy(-_._1)
val top3 = sorted
.take(3)
.map { case (c, idx) =>
val name = if (idx < tagNames.length) tagNames(idx) else s"tag=$idx"
f"$name(${c * 100.0 / total}%.0f%%)"
}
.mkString(", ")
println(f" $file%-65s total=$total%10d top3: $top3")
}

// Global aggregation
println("\n" + "=" * 100)
println(f"GLOBAL AGGREGATE (${perFile.size} files, $globalTotal%,d total visitExpr calls)")
println("=" * 100)
val globalSorted = globalCounts.zipWithIndex.filter(_._1 > 0).sortBy(-_._1)
var cumPct = 0.0
println(f" ${"Rank"}%-5s ${"ExprTag"}%-20s ${"Count"}%12s ${"Pct"}%7s ${"Cumulative"}%10s")
println(" " + "-" * 60)
for (((count, idx), rank) <- globalSorted.zipWithIndex) {
val name = if (idx < tagNames.length) tagNames(idx) else s"tag=$idx"
val pct = count * 100.0 / globalTotal
cumPct += pct
println(f" ${rank + 1}%-5d $name%-20s $count%,12d $pct%6.1f%% $cumPct%9.1f%%")
}
}
}
5 changes: 5 additions & 0 deletions sjsonnet/src-jvm-native/sjsonnet/Config.scala
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,11 @@ final case class Config(
"Profile evaluation and write results to a file. Format: --profile <file> or --profile <format>:<file> where format is 'text' (default) or 'flamegraph'"
)
profile: Option[String] = None,
@arg(
name = "new-evaluator",
doc = "Use the new tag-based evaluator (hybrid instanceof + tableswitch dispatch)"
)
newEvaluator: Flag = Flag(),
@arg(
doc = "The jsonnet file you wish to evaluate",
positional = true
Expand Down
1 change: 1 addition & 0 deletions sjsonnet/src-jvm-native/sjsonnet/SjsonnetMainBase.scala
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ object SjsonnetMainBase {
throwErrorForInvalidSets = config.throwErrorForInvalidSets.value,
maxParserRecursionDepth = config.maxParserRecursionDepth,
brokenAssertionLogic = config.brokenAssertionLogic.value,
useNewEvaluator = config.newEvaluator.value,
maxStack = config.maxStack
),
parseCache,
Expand Down
27 changes: 16 additions & 11 deletions sjsonnet/src/sjsonnet/Evaluator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2048,13 +2048,24 @@ class NewEvaluator(
fc: FormatCache = FormatCache.SharedDefault)
extends Evaluator(r, e, w, s, wa, ds, fc) {

// Hot path: top 7 types cover 96.1% of all visitExpr calls across benchmarks.
// ~120 bytes bytecode — within JIT FreqInlineSize=325, unlike the old evaluator's ~700 bytes.
// Order matches old evaluator's first 4 types (ValidId, BinaryOp, Select, Val) for C1 parity.
override def visitExpr(e: Expr)(implicit scope: ValScope): Val = try {
if (e.isInstanceOf[ValidId]) visitValidId(e.asInstanceOf[ValidId])
else if (e.isInstanceOf[BinaryOp]) visitBinaryOp(e.asInstanceOf[BinaryOp])
else if (e.isInstanceOf[Select]) visitSelect(e.asInstanceOf[Select])
else if (e.isInstanceOf[Val]) e.asInstanceOf[Val]
else if (e.isInstanceOf[Apply1]) visitApply1(e.asInstanceOf[Apply1])
else if (e.isInstanceOf[ObjExtend]) visitObjExtend(e.asInstanceOf[ObjExtend])
else if (e.isInstanceOf[IfElse]) visitIfElse(e.asInstanceOf[IfElse])
else visitExprCold(e)
} catch {
Error.withStackFrame(e)
}

private def visitExprCold(e: Expr)(implicit scope: ValScope): Val =
(e.tag: @switch) match {
case ExprTags.ValidId => visitValidId(e.asInstanceOf[ValidId])
case ExprTags.BinaryOp => visitBinaryOp(e.asInstanceOf[BinaryOp])
case ExprTags.Select => visitSelect(e.asInstanceOf[Select])
case ExprTags.`Val.Func` => e.asInstanceOf[Val.Func]
case ExprTags.`Val.Literal` => e.asInstanceOf[Val.Literal]
case ExprTags.ApplyBuiltin0 => visitApplyBuiltin0(e.asInstanceOf[ApplyBuiltin0])
case ExprTags.ApplyBuiltin1 => visitApplyBuiltin1(e.asInstanceOf[ApplyBuiltin1])
case ExprTags.ApplyBuiltin2 => visitApplyBuiltin2(e.asInstanceOf[ApplyBuiltin2])
Expand All @@ -2063,14 +2074,12 @@ class NewEvaluator(
case ExprTags.And => visitAnd(e.asInstanceOf[And])
case ExprTags.Or => visitOr(e.asInstanceOf[Or])
case ExprTags.UnaryOp => visitUnaryOp(e.asInstanceOf[UnaryOp])
case ExprTags.Apply1 => visitApply1(e.asInstanceOf[Apply1])
case ExprTags.Lookup => visitLookup(e.asInstanceOf[Lookup])
case ExprTags.Function =>
val f = e.asInstanceOf[Function]
visitMethod(f.body, f.params, f.pos)
case ExprTags.LocalExpr => visitLocalExpr(e.asInstanceOf[LocalExpr])
case ExprTags.Apply => visitApply(e.asInstanceOf[Apply])
case ExprTags.IfElse => visitIfElse(e.asInstanceOf[IfElse])
case ExprTags.Apply3 => visitApply3(e.asInstanceOf[Apply3])
case ExprTags.`ObjBody.MemberList` =>
val oml = e.asInstanceOf[ObjBody.MemberList]
Expand All @@ -2083,7 +2092,6 @@ class NewEvaluator(
case ExprTags.SelectSuper => visitSelectSuper(e.asInstanceOf[SelectSuper])
case ExprTags.LookupSuper => visitLookupSuper(e.asInstanceOf[LookupSuper])
case ExprTags.InSuper => visitInSuper(e.asInstanceOf[InSuper])
case ExprTags.ObjExtend => visitObjExtend(e.asInstanceOf[ObjExtend])
case ExprTags.`ObjBody.ObjComp` => visitObjComp(e.asInstanceOf[ObjBody.ObjComp], null)
case ExprTags.Slice => visitSlice(e.asInstanceOf[Slice])
case ExprTags.Import => visitImport(e.asInstanceOf[Import])
Expand All @@ -2093,9 +2101,6 @@ class NewEvaluator(
case ExprTags.Error => visitError(e.asInstanceOf[Expr.Error])
case _ => visitInvalid(e)
}
} catch {
Error.withStackFrame(e)
}
// This is only needed for --no-static-errors, otherwise these expression types do not make it past the optimizer
override def visitInvalid(e: Expr): Nothing = (e.tag: @switch) match {
case ExprTags.Id =>
Expand Down
Loading