From c8688c4122ff8b8e1dd6fec0230579458bbcd691 Mon Sep 17 00:00:00 2001 From: Tobias Schlatter Date: Sun, 27 Sep 2020 12:58:52 +0200 Subject: [PATCH] Address comments: Add Module Splitting support in the linker --- .../org/scalajs/linker/interface/Linker.scala | 113 ++++++++++++------ .../org/scalajs/linker/interface/Report.scala | 14 ++- .../unstable/OutputDirectoryImpl.scala | 4 +- .../interface/unstable/ReportImpl.scala | 4 +- .../scalajs/linker/NodeOutputDirectory.scala | 17 ++- .../scalajs/linker/PathOutputDirectory.scala | 30 ++--- .../scalajs/linker/MemOutputDirectory.scala | 4 +- .../org/scalajs/linker/analyzer/Infos.scala | 2 +- .../scalajs/linker/backend/OutputWriter.scala | 10 +- .../linker/backend/emitter/Emitter.scala | 25 ++-- .../backend/emitter/FunctionEmitter.scala | 2 +- .../backend/emitter/KnowledgeGuardian.scala | 2 +- .../backend/emitter/ModuleContext.scala | 2 +- .../linker/backend/emitter/VarGen.scala | 6 +- .../scalajs/linker/frontend/BaseLinker.scala | 2 +- .../modulesplitter/MaxModuleAnalyzer.scala | 7 +- .../modulesplitter/MinModuleAnalyzer.scala | 15 +-- .../modulesplitter/ModuleSplitter.scala | 14 ++- .../scalajs/linker/standard/ModuleSet.scala | 13 +- .../scala/org/scalajs/linker/LinkerTest.scala | 8 +- 20 files changed, 174 insertions(+), 120 deletions(-) diff --git a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Linker.scala b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Linker.scala index 0997d6e95c..ddc5b9ca65 100644 --- a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Linker.scala +++ b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Linker.scala @@ -43,45 +43,38 @@ abstract class Linker private[interface] () { moduleInitializers: Seq[ModuleInitializer], output: LinkerOutput, logger: Logger)( implicit ec: ExecutionContext): Future[Unit] = { + /* Backwards compatibility implementation for pre 1.3.0 link method. + * + * The major interface change in 1.3.0 is that the linker (and not the + * caller) determines the set of files to be written. As a consequence, the + * post 1.3.0 API does not offer as much control over cross-file references + * (i.e. source map links): it is based on patterns rather than simply + * asking the caller to verbatim provide the URI to reference in each file. + * + * To provide a backwards compatible interface, we do the following post-run + * processing: + * + * - Match and copy the produced set of files (in the OutputDirectory) to + * the files provided by the caller (in LinkerOutput). + * - Replace the pattern generated cross-file references with the ones + * provided by the caller. This is necessary as a post-processing step, + * because of the reduced flexibility of the 1.3.0 API: we cannot express + * all legacy requests in the new API. + */ + val outDir = new Linker.MemOutputDirectory() link(irFiles, moduleInitializers, outDir, logger).flatMap { report => - val module = { - if (report.publicModules.size != 1) { - throw new LinkingException( - "Linking did not return exactly one public module, but the legacy " + - "`link` method was called. Call the overload taking an " + - s"OutputDirectory instead. Full report:\n$report") - } - - report.publicModules.head - } - - val expectedFiles = Set(module.jsFileName) ++ module.sourceMapName - val foundFiles = outDir.content.keySet - - if (foundFiles != expectedFiles) { - if (expectedFiles.subsetOf(foundFiles)) { - throw new LinkingException( - "Linking produced more than a single JS file (and source map) but " + - "the legacy `link` method was called. Call the overload taking " + - "an OutputDirectory instead. " + - s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles") - } else { - throw new LinkingException( - "Linking did not produce the files mentioned in the report. " + - "This is a bug in the linker." + - s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles") - } - } + val (jsFileContent, optSourceMapContent) = + Linker.retrieveOutputFiles(report, outDir) val hasSourceMap = - module.sourceMapName.isDefined && output.sourceMap.isDefined + optSourceMapContent.isDefined && output.sourceMap.isDefined import StandardCharsets.UTF_8 val jsFileWrite = { - val content = new String(outDir.content(module.jsFileName), UTF_8) + val content = new String(jsFileContent, UTF_8) val patched = Linker.patchJSFileContent(content, output.sourceMapURI.filter(_ => hasSourceMap)) @@ -90,10 +83,10 @@ abstract class Linker private[interface] () { } val sourceMapWrite = for { - sourceMapName <- module.sourceMapName + sourceMapContent <- optSourceMapContent sourceMapFile <- output.sourceMap } yield { - val content = new String(outDir.content(sourceMapName), UTF_8) + val content = new String(sourceMapContent, UTF_8) val patched = Linker.patchSourceMapContent(content, output.jsFileURI) OutputFileImpl.fromOutputFile(sourceMapFile) @@ -120,7 +113,50 @@ private object Linker { */ private val fileFieldRe = """(?m)([,{])\s*"file"\s*:\s*".*"\s*([,}])""".r - /** Patches the JS file content to have the provided source map link (or none) */ + /** Retrieve the linker JS file and an optional source map */ + private def retrieveOutputFiles(report: Report, + outDir: MemOutputDirectory): (Array[Byte], Option[Array[Byte]]) = { + val module = { + if (report.publicModules.size != 1) { + throw new LinkingException( + "Linking did not return exactly one public module, but the legacy " + + "`link` method was called. Call the overload taking an " + + s"OutputDirectory instead. Full report:\n$report") + } + + report.publicModules.head + } + + val expectedFiles = Set(module.jsFileName) ++ module.sourceMapName + val foundFiles = outDir.content.keySet + + if (foundFiles != expectedFiles) { + if (expectedFiles.subsetOf(foundFiles)) { + throw new LinkingException( + "Linking produced more than a single JS file (and source map) but " + + "the legacy `link` method was called. Call the overload taking " + + "an OutputDirectory instead. " + + s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles") + } else { + throw new LinkingException( + "Linking did not produce the files mentioned in the report. " + + "This is a bug in the linker." + + s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles") + } + } + + val jsFileContent = outDir.content(module.jsFileName) + val sourceMapContent = module.sourceMapName.map(outDir.content(_)) + + (jsFileContent, sourceMapContent) + } + + /** Patches the JS file content to have the provided source map link (or none) + * + * Looks for a line of the form `//# sourceMappingURL=.*` and replaces the + * URL with the provided `sourceMapURI`. In case `sourceMapURI` is None, the + * line is replaced with an empty line. + */ private def patchJSFileContent(content: String, sourceMapURI: Option[URI]): String = { @@ -145,7 +181,12 @@ private object Linker { } } - /** Patches the source map content to have the provided JS file link (or none) */ + /** Patches the source map content to have the provided JS file link (or none). + * + * Looks for a `"file":` key in the top-level source map JSON object and + * replaces it's value with `jsFileURI`. In case `jsFileURI` is None, it + * removes the key from the object. + */ private def patchSourceMapContent(content: String, jsFileURI: Option[URI]): String = { @@ -195,8 +236,8 @@ private object Linker { Future.successful(()) } - def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = synchronized { - Future.successful(content.keys) + def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = synchronized { + Future.successful(content.keys.toList) } def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] = synchronized { diff --git a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Report.scala b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Report.scala index 12c7227cf5..ff5c4b5e8c 100644 --- a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Report.scala +++ b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/Report.scala @@ -12,6 +12,8 @@ package org.scalajs.linker.interface +import scala.collection.immutable + import java.io._ import org.scalajs.ir.ScalaJSVersions @@ -19,13 +21,13 @@ import org.scalajs.ir.ScalaJSVersions import org.scalajs.linker.interface.unstable.ReportImpl /** Information about a linker run. */ -abstract class Report private[interface] { +abstract class Report private[interface] () { /** Public modules resulting from linking. * * An incremental linker must return all public modules here, even if some * were not re-written during this linking run. */ - def publicModules: Iterable[Report.Module] + def publicModules: immutable.Iterable[Report.Module] override def toString(): String = { s"""Report( @@ -38,7 +40,7 @@ abstract class Report private[interface] { object Report { /** Information about a module produced by the linker. */ - abstract class Module private[interface] { + abstract class Module private[interface] () { /** The module ID of this module. */ def moduleID: String @@ -65,7 +67,7 @@ object Report { /** Serializes this [[Report]] to a byte array. * - * A report serialized with the exact same linker version is guaranteed to + * A report serialized with the exact same linker version is guaranteed to be * deserializable using [[deserialize]]. If the linker version is different, * no guarantee is given. */ @@ -109,8 +111,8 @@ object Report { private def writeModuleKind(kind: ModuleKind): Unit = { val i = kind match { - case ModuleKind.NoModule => 0 - case ModuleKind.ESModule => 1 + case ModuleKind.NoModule => 0 + case ModuleKind.ESModule => 1 case ModuleKind.CommonJSModule => 2 } writeByte(i) diff --git a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/OutputDirectoryImpl.scala b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/OutputDirectoryImpl.scala index 6b17ef6b8a..d30da0ad8f 100644 --- a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/OutputDirectoryImpl.scala +++ b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/OutputDirectoryImpl.scala @@ -25,13 +25,13 @@ abstract class OutputDirectoryImpl extends OutputDirectory { * * Writing should only result in a file write if the contents of the file * actually changed. Further, if the underlying filesystem allows it, the - * file should be written atomically. + * file should be written atomically. */ def writeFull(name: String, buf: ByteBuffer)( implicit ec: ExecutionContext): Future[Unit] /** Lists all the files in the directory. */ - def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] + def listFiles()(implicit ec: ExecutionContext): Future[List[String]] /** Deletes the given file. Fails if it does not exist. */ def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] diff --git a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/ReportImpl.scala b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/ReportImpl.scala index fa27b02ec4..bde8389020 100644 --- a/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/ReportImpl.scala +++ b/linker-interface/shared/src/main/scala/org/scalajs/linker/interface/unstable/ReportImpl.scala @@ -12,10 +12,12 @@ package org.scalajs.linker.interface.unstable +import scala.collection.immutable + import org.scalajs.linker.interface._ final class ReportImpl( - val publicModules: Iterable[Report.Module]) extends Report + val publicModules: immutable.Iterable[Report.Module]) extends Report object ReportImpl { final class ModuleImpl( diff --git a/linker/js/src/main/scala/org/scalajs/linker/NodeOutputDirectory.scala b/linker/js/src/main/scala/org/scalajs/linker/NodeOutputDirectory.scala index c2d76d3bec..016b7f7bde 100644 --- a/linker/js/src/main/scala/org/scalajs/linker/NodeOutputDirectory.scala +++ b/linker/js/src/main/scala/org/scalajs/linker/NodeOutputDirectory.scala @@ -31,16 +31,21 @@ object NodeOutputDirectory { def writeFull(name: String, buf: ByteBuffer)(implicit ec: ExecutionContext): Future[Unit] = { val path = getPath(name) - val data = - if (buf.hasTypedArray()) buf.typedArray().subarray(buf.position(), buf.limit()) - else ByteBuffer.allocateDirect(buf.remaining()).put(buf).typedArray() + val data = { + if (buf.hasTypedArray()) { + val result = buf.typedArray().subarray(buf.position(), buf.limit()) + buf.position(buf.limit()) + result + } else { + ByteBuffer.allocateDirect(buf.remaining()).put(buf).typedArray() + } + } cbFuture[Unit](NodeFS.writeFile(path, data, _)) - .map(_ => buf.position(buf.limit())) } - def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = - cbFuture[js.Array[String]](NodeFS.readdir(directory, _)).map(x => x) + def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = + cbFuture[js.Array[String]](NodeFS.readdir(directory, _)).map(_.toList) def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] = cbFuture[Unit](NodeFS.unlink(getPath(name), _)) diff --git a/linker/jvm/src/main/scala/org/scalajs/linker/PathOutputDirectory.scala b/linker/jvm/src/main/scala/org/scalajs/linker/PathOutputDirectory.scala index 14cafbe565..4cd0e77289 100644 --- a/linker/jvm/src/main/scala/org/scalajs/linker/PathOutputDirectory.scala +++ b/linker/jvm/src/main/scala/org/scalajs/linker/PathOutputDirectory.scala @@ -17,9 +17,6 @@ import scala.concurrent._ import java.nio._ import java.nio.channels._ import java.nio.file._ -import java.nio.file.attribute.BasicFileAttributes - -import java.util.EnumSet import java.io.IOException @@ -44,23 +41,12 @@ object PathOutputDirectory { } } - def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = Future { + def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = Future { blocking { - val builder = Iterable.newBuilder[String] - - /* Files.list(...) would be simpler, but it requires Java Streams which - * are, at the time of this writing, not implemented in Scala.js. - */ - val dirVisitor = new SimpleFileVisitor[Path] { - override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = { - if (path != directory) - builder += directory.relativize(path).toString() - super.visitFile(path, attrs) - } + val builder = List.newBuilder[String] + Files.list(directory).forEachOrdered { entry => + builder += directory.relativize(entry).toString() } - - Files.walkFileTree(directory, EnumSet.noneOf(classOf[FileVisitOption]), 1, dirVisitor) - builder.result() } } @@ -166,11 +152,11 @@ object PathOutputDirectory { def readNext(): Unit = { readBuf.clear() - chan.read(readBuf, pos, null, Handler) + chan.read(readBuf, pos, (), Handler) } - object Handler extends CompletionHandler[Integer, Null] { - def completed(read: Integer, n: Null): Unit = { + object Handler extends CompletionHandler[Integer, Unit] { + def completed(read: Integer, unit: Unit): Unit = { if (read == -1) { /* We have checked the file size beforehand. So if we get here, * there's no diff. @@ -193,7 +179,7 @@ object PathOutputDirectory { } } - def failed(exc: Throwable, n: Null): Unit = + def failed(exc: Throwable, unit: Unit): Unit = promise.failure(exc) } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/MemOutputDirectory.scala b/linker/shared/src/main/scala/org/scalajs/linker/MemOutputDirectory.scala index 012774be74..a8115d755d 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/MemOutputDirectory.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/MemOutputDirectory.scala @@ -46,8 +46,8 @@ object MemOutputDirectory { Future.successful(()) } - def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = synchronized { - Future.successful(_content.keys) + def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = synchronized { + Future.successful(_content.keys.toList) } def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] = synchronized { diff --git a/linker/shared/src/main/scala/org/scalajs/linker/analyzer/Infos.scala b/linker/shared/src/main/scala/org/scalajs/linker/analyzer/Infos.scala index 99e429bf46..1fba54d766 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/analyzer/Infos.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/analyzer/Infos.scala @@ -390,7 +390,7 @@ object Infos { val info = new GenInfoTraverser().generateTopLevelExportInfo(enclosingClass, topLevelExportDef) new TopLevelExportInfo(enclosingClass, info, - new ModuleID(topLevelExportDef.moduleID), + ModuleID(topLevelExportDef.moduleID), topLevelExportDef.topLevelExportName) } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/OutputWriter.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/OutputWriter.scala index 1c47a85d0a..5a1b2c9ddb 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/OutputWriter.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/OutputWriter.scala @@ -43,10 +43,12 @@ private[backend] abstract class OutputWriter(output: OutputDirectory, for { currentFiles <- outputImpl.listFiles() - reports <- Future.traverse(moduleSet.modules)(m => - ioThrottler.throttle(writeModule(m.id))) - _ <- Future.traverse(filesToRemove(currentFiles, reports))(f => - ioThrottler.throttle(outputImpl.delete(f))) + reports <- Future.traverse(moduleSet.modules) { m => + ioThrottler.throttle(writeModule(m.id)) + } + _ <- Future.traverse(filesToRemove(currentFiles, reports)) { f => + ioThrottler.throttle(outputImpl.delete(f)) + } } yield { val publicModules = for { (module, report) <- moduleSet.modules.zip(reports) diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/Emitter.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/Emitter.scala index 7c511f47f4..7d2bf96404 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/Emitter.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/Emitter.scala @@ -80,7 +80,9 @@ final class Emitter(config: Emitter.Config) { moduleKind match { case ModuleKind.NoModule => + assert(moduleSet.modules.size <= 1) val topLevelVars = moduleSet.modules + .headOption.toList .flatMap(_.topLevelExports) .map(_.exportName) @@ -207,9 +209,15 @@ final class Emitter(config: Emitter.Config) { } } - def classIter = moduleClasses + def classIter = moduleClasses.iterator - // Emit everything in the appropriate order. + /* Emit everything but module imports in the appropriate order. + * + * We do not emit module imports to be able to assert that the + * resulting module is non-empty. This is a non-trivial condition that + * requires consistency between the Analyzer and the Emitter. As such, + * it is crucial that we verify it. + */ val defTrees = js.Block( /* The definitions of the CoreJSLib, which depend on nothing. * All classes potentially depend on it. @@ -257,8 +265,7 @@ final class Emitter(config: Emitter.Config) { /* Module imports, which depend on nothing. * All classes potentially depend on them. */ - extractWithGlobals(genModuleImports(module)) ++ - Iterator.single(defTrees) + extractWithGlobals(genModuleImports(module)) :+ defTrees )(Position.NoPosition) classIter.foreach { genClass => @@ -277,11 +284,13 @@ final class Emitter(config: Emitter.Config) { def importParts = ( ( - module.externalDependencies.map(x => - sjsGen.varGen.externalModuleFieldIdent(x) -> x) + module.externalDependencies.map { x => + sjsGen.varGen.externalModuleFieldIdent(x) -> x + } ) ++ ( - module.internalDependencies.map(x => - sjsGen.varGen.internalModuleFieldIdent(x) -> config.internalModulePattern(x)) + module.internalDependencies.map { x => + sjsGen.varGen.internalModuleFieldIdent(x) -> config.internalModulePattern(x) + } ) ).toList.sortBy(_._1.name) diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/FunctionEmitter.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/FunctionEmitter.scala index ec8a47f18d..00af221cca 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/FunctionEmitter.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/FunctionEmitter.scala @@ -642,7 +642,7 @@ private[emitter] class FunctionEmitter(sjsGen: SJSGen) { if (needToUseGloballyMutableVarSetter(scope)) { unnest(rhs) { (rhs, env0) => implicit val env = env0 - js.Apply(globalVar("u", scope), transformExprNoChar(rhs) :: Nil) + js.Apply(globalVar("u", scope), transformExpr(rhs, select.tpe) :: Nil) } } else { // Assign normally. diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/KnowledgeGuardian.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/KnowledgeGuardian.scala index 66f82bcb4e..1cfd1ea742 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/KnowledgeGuardian.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/KnowledgeGuardian.scala @@ -146,7 +146,7 @@ private[emitter] final class KnowledgeGuardian(config: Emitter.Config) { for { module <- moduleSet.modules export <- module.topLevelExports - } yield { + } { export.tree match { case TopLevelFieldExportDef(_, exportName, FieldIdent(fieldName)) => val className = export.owningClass diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/ModuleContext.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/ModuleContext.scala index 292c09976c..953f8e73c0 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/ModuleContext.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/ModuleContext.scala @@ -32,7 +32,7 @@ private[emitter] final class ModuleContext private ( import scala.util.hashing.MurmurHash3._ var acc = ModuleContext.HashSeed acc = mix(acc, moduleID.##) - acc = mix(acc, public.##) + acc = mixLast(acc, public.##) finalizeHash(acc, 2) } } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/VarGen.scala b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/VarGen.scala index 6facd6b2e2..70412c98b1 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/VarGen.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/VarGen.scala @@ -93,10 +93,10 @@ private[emitter] final class VarGen(jsGen: JSGen, nameGen: NameGen, val varDef = genLet(ident, mutable = true, value) if (config.moduleKind == ModuleKind.ESModule && !moduleContext.public) { - val setterIdent = globalVarIdent(setterField, scope, origName) + val setterIdent = globalVarIdent(setterField, scope) val x = Ident("x") val setter = FunctionDef(setterIdent, List(ParamDef(x, rest = false)), { - Assign(VarRef(ident), VarRef(x)) + Assign(VarRef(ident), VarRef(x)) }) val exports = @@ -209,7 +209,7 @@ private[emitter] final class VarGen(jsGen: JSGen, nameGen: NameGen, } else { val export = config.moduleKind match { case ModuleKind.NoModule => - throw new AssertionError("non-leaf module in NoModule mode") + throw new AssertionError("non-public module in NoModule mode") case ModuleKind.ESModule => WithGlobals(Export(genExportIdent(ident) :: Nil)) diff --git a/linker/shared/src/main/scala/org/scalajs/linker/frontend/BaseLinker.scala b/linker/shared/src/main/scala/org/scalajs/linker/frontend/BaseLinker.scala index f58dd7106a..9a124322cf 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/frontend/BaseLinker.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/frontend/BaseLinker.scala @@ -124,7 +124,7 @@ final class BaseLinker(config: CommonPhaseConfig) { topLevelExport <- classDef.topLevelExportDefs } yield { val infos = analysis.topLevelExportInfos( - (new ModuleID(topLevelExport.moduleID), topLevelExport.topLevelExportName)) + (ModuleID(topLevelExport.moduleID), topLevelExport.topLevelExportName)) new LinkedTopLevelExport(className, topLevelExport, infos.staticDependencies.toSet, infos.externalDependencies.toSet) } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MaxModuleAnalyzer.scala b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MaxModuleAnalyzer.scala index b9c837ce57..06a3897271 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MaxModuleAnalyzer.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MaxModuleAnalyzer.scala @@ -89,6 +89,9 @@ private object MaxModuleAnalyzer { * * We sort the ModuleIDs to not depend on map iteration order (or the * order of the input files). + * + * All of this is to avoid module ID collisions, for example with the + * following set of public modules: {`a`, `b`, `a-b`}. */ val publicIDs = { // Best way I could find to create SortedSet from a Set :-/ @@ -103,10 +106,10 @@ private object MaxModuleAnalyzer { modules <- publicIDs.subsets() if modules.nonEmpty } yield { - var candidate = new ModuleID(modules.map(_.id).mkString("-")) + var candidate = ModuleID(modules.map(_.id).mkString("-")) while (seenIDs.contains(candidate)) - candidate = new ModuleID(candidate.id + "$") + candidate = ModuleID(candidate.id + "$") seenIDs.add(candidate) diff --git a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MinModuleAnalyzer.scala b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MinModuleAnalyzer.scala index 40765944f6..529f63c8a3 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MinModuleAnalyzer.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/MinModuleAnalyzer.scala @@ -44,7 +44,7 @@ private object MinModuleAnalyzer { private[this] var nextIndex = 0 private[this] val nodes = mutable.Map.empty[ClassName, Node] - private[this] val stack = mutable.Stack.empty[Node] + private[this] val stack = mutable.ArrayBuffer.empty[Node] private[this] val moduleIndexToID = mutable.Map.empty[Int, ModuleID] def moduleForClass(className: ClassName): Option[ModuleID] = @@ -82,7 +82,7 @@ private object MinModuleAnalyzer { nextIndex += 1 nodes(className) = node - stack.push(node) + stack += node for (depName <- info.classDependencies(className)) { nodes.get(depName).fold { @@ -106,10 +106,10 @@ private object MinModuleAnalyzer { @tailrec def pop(): Unit = { - val n = stack.pop() + val n = stack.remove(stack.size - 1) n.moduleIndex = moduleIndex - /* Take the lexographically smallest name as a stable name of the + /* Take the lexicographically smallest name as a stable name of the * module, with the exception of j.l.Object which identifies the root * module. * @@ -123,7 +123,8 @@ private object MinModuleAnalyzer { name = n.className } - if (n ne node) pop() + if (n ne node) + pop() } pop() @@ -139,9 +140,9 @@ private object MinModuleAnalyzer { * Note that this is stable, because it does not depend on the order we * iterate over nodes. */ - var moduleID = new ModuleID(name.nameString) + var moduleID = ModuleID(name.nameString) while (info.publicModuleDependencies.contains(moduleID)) - moduleID = new ModuleID(moduleID.id + ".") + moduleID = ModuleID(moduleID.id + ".") moduleIndexToID(moduleIndex) = moduleID } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/ModuleSplitter.scala b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/ModuleSplitter.scala index 2c487a8805..23be8c468e 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/ModuleSplitter.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/frontend/modulesplitter/ModuleSplitter.scala @@ -60,7 +60,7 @@ final class ModuleSplitter private (analyzer: ModuleAnalyzer) { analysis: ModuleAnalyzer.Analysis, publicModuleDependencies: Map[ModuleID, Set[ClassName]]): ModuleSet = { - // LinkedHashMap for stability. + // LinkedHashMap for stability of module order. val builders = mutable.LinkedHashMap.empty[ModuleID, ModuleBuilder] val abstractClasses = List.newBuilder[LinkedClass] @@ -88,16 +88,18 @@ final class ModuleSplitter private (analyzer: ModuleAnalyzer) { } } - // Output TopLevelExports and module initializers first for stability. + /* Output TopLevelExports and module initializers before + * publicModuleDependencies for stable module order. + */ for (tle <- unit.topLevelExports) { - val builder = getBuilder(new ModuleID(tle.tree.moduleID)) + val builder = getBuilder(ModuleID(tle.tree.moduleID)) builder.topLevelExports += tle tle.externalDependencies.foreach(builder.externalDependencies += _) } for (mi <- unit.moduleInitializers) { - val builder = getBuilder(new ModuleID(mi.moduleID)) + val builder = getBuilder(ModuleID(mi.moduleID)) builder.initializers += mi.initializer } @@ -136,7 +138,7 @@ final class ModuleSplitter private (analyzer: ModuleAnalyzer) { * https://github.com/scala-js/scala-js/pull/4111#issuecomment-673590827 */ val classesWithStaticInits = - unit.classDefs.filter(_.hasStaticInitializer).map(_.className) + unit.classDefs.withFilter(_.hasStaticInitializer).map(_.className) // What all modules depend on. val baseDeps = classesWithStaticInits.toSet + ObjectClass @@ -144,7 +146,7 @@ final class ModuleSplitter private (analyzer: ModuleAnalyzer) { val result = mutable.Map.empty[ModuleID, Set[ClassName]] def add(moduleID: String, deps: Iterable[ClassName]) = { - val id = new ModuleID(moduleID) + val id = ModuleID(moduleID) val cur = result.getOrElse(id, baseDeps) result(id) = cur ++ deps } diff --git a/linker/shared/src/main/scala/org/scalajs/linker/standard/ModuleSet.scala b/linker/shared/src/main/scala/org/scalajs/linker/standard/ModuleSet.scala index 11e7c3a7bb..e8773b6aa2 100644 --- a/linker/shared/src/main/scala/org/scalajs/linker/standard/ModuleSet.scala +++ b/linker/shared/src/main/scala/org/scalajs/linker/standard/ModuleSet.scala @@ -24,11 +24,14 @@ import org.scalajs.linker.interface.ModuleInitializer * - External Modules: Modules that are imported through native JS * types/members. These are not generated by Scala.js but need to be provided * by the user. For example, the Node.js `fs` module. - * - Public Modules: Scala.js produced modules intended for import by user + * - Public Modules: Scala.js-produced modules intended for import by user * code. Only these modules may hold top level exports (but may contain more). * Public modules may not be imported by other Scala.js produced modules. - * - Internal Modules: Scala.js produced modules purely relevant for splitting. - * These are only intended to be imported by other Scala.js produced modules. + * - Internal Modules: Scala.js-produced modules only relevant for splitting. + * These are only intended to be imported by other Scala.js-produced modules. + * + * Note that a ModuleSet may contain no modules at all. This happens if there + * are no public modules. */ final class ModuleSet private[linker] ( val coreSpec: CoreSpec, @@ -50,6 +53,10 @@ object ModuleSet { override def toString(): String = s"ModuleID($id)" } + object ModuleID { + def apply(id: String): ModuleID = new ModuleID(id) + } + final class Module( val id: ModuleID, val internalDependencies: Set[ModuleID], diff --git a/linker/shared/src/test/scala/org/scalajs/linker/LinkerTest.scala b/linker/shared/src/test/scala/org/scalajs/linker/LinkerTest.scala index 18182c1ca0..76aaa9861f 100644 --- a/linker/shared/src/test/scala/org/scalajs/linker/LinkerTest.scala +++ b/linker/shared/src/test/scala/org/scalajs/linker/LinkerTest.scala @@ -119,14 +119,8 @@ class LinkerTest { @Test @deprecated("Mark deprecated to silence warnings", "never/always") def testLegacyAPI(): AsyncResult = await { - val classDefs = Seq( - mainTestClassDef({ - consoleLog(StringLiteral("Hello world!")) - }) - ) - val linker = StandardImpl.linker(StandardConfig()) - val classDefsFiles = classDefs.map(MemClassDefIRFile(_)) + val classDefsFiles = helloWorldClassDefs.map(MemClassDefIRFile(_)) val jsOutput = MemOutputFile() val smOutput = MemOutputFile()