Skip to content

Commit

Permalink
Address comments: Add Module Splitting support in the linker
Browse files Browse the repository at this point in the history
  • Loading branch information
gzm0 committed Sep 27, 2020
1 parent e01c0cc commit 6ae1e52
Show file tree
Hide file tree
Showing 20 changed files with 185 additions and 121 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,45 +43,38 @@ abstract class Linker private[interface] () {
moduleInitializers: Seq[ModuleInitializer],
output: LinkerOutput, logger: Logger)(
implicit ec: ExecutionContext): Future[Unit] = {
/* Backwards compatibility implementation for pre 1.3.0 link method.
*
* The major interface change in 1.3.0 is that the linker (and not the
* caller) determines the set of files to be written. As a consequence, the
* post 1.3.0 API does not offer as much control over cross-file references
* (i.e. source map links): it is based on patterns rather than simply
* asking the caller to verbatim provide the URI to reference in each file.
*
* To provide a backwards compatible interface, we do the following post-run
* processing:
*
* - Match and copy the produced set of files (in the OutputDirectory) to
* the files provided by the caller (in LinkerOutput).
* - Replace the pattern generated cross-file references with the ones
* provided by the caller. This is necessary as a post-processing step,
* because of the reduced flexibility of the 1.3.0 API: we cannot express
* all legacy requests in the new API.
*/

val outDir = new Linker.MemOutputDirectory()

link(irFiles, moduleInitializers, outDir, logger).flatMap { report =>
val module = {
if (report.publicModules.size != 1) {
throw new LinkingException(
"Linking did not return exactly one public module, but the legacy " +
"`link` method was called. Call the overload taking an " +
s"OutputDirectory instead. Full report:\n$report")
}

report.publicModules.head
}

val expectedFiles = Set(module.jsFileName) ++ module.sourceMapName
val foundFiles = outDir.content.keySet

if (foundFiles != expectedFiles) {
if (expectedFiles.subsetOf(foundFiles)) {
throw new LinkingException(
"Linking produced more than a single JS file (and source map) but " +
"the legacy `link` method was called. Call the overload taking " +
"an OutputDirectory instead. " +
s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles")
} else {
throw new LinkingException(
"Linking did not produce the files mentioned in the report. " +
"This is a bug in the linker." +
s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles")
}
}
val (jsFileContent, optSourceMapContent) =
Linker.retrieveOutputFiles(report, outDir)

val hasSourceMap =
module.sourceMapName.isDefined && output.sourceMap.isDefined
optSourceMapContent.isDefined && output.sourceMap.isDefined

import StandardCharsets.UTF_8

val jsFileWrite = {
val content = new String(outDir.content(module.jsFileName), UTF_8)
val content = new String(jsFileContent, UTF_8)
val patched = Linker.patchJSFileContent(content,
output.sourceMapURI.filter(_ => hasSourceMap))

Expand All @@ -90,10 +83,10 @@ abstract class Linker private[interface] () {
}

val sourceMapWrite = for {
sourceMapName <- module.sourceMapName
sourceMapContent <- optSourceMapContent
sourceMapFile <- output.sourceMap
} yield {
val content = new String(outDir.content(sourceMapName), UTF_8)
val content = new String(sourceMapContent, UTF_8)
val patched = Linker.patchSourceMapContent(content, output.jsFileURI)

OutputFileImpl.fromOutputFile(sourceMapFile)
Expand All @@ -120,7 +113,50 @@ private object Linker {
*/
private val fileFieldRe = """(?m)([,{])\s*"file"\s*:\s*".*"\s*([,}])""".r

/** Patches the JS file content to have the provided source map link (or none) */
/** Retrieve the linker JS file and an optional source map */
private def retrieveOutputFiles(report: Report,
outDir: MemOutputDirectory): (Array[Byte], Option[Array[Byte]]) = {
val module = {
if (report.publicModules.size != 1) {
throw new LinkingException(
"Linking did not return exactly one public module, but the legacy " +
"`link` method was called. Call the overload taking an " +
s"OutputDirectory instead. Full report:\n$report")
}

report.publicModules.head
}

val expectedFiles = Set(module.jsFileName) ++ module.sourceMapName
val foundFiles = outDir.content.keySet

if (foundFiles != expectedFiles) {
if (expectedFiles.subsetOf(foundFiles)) {
throw new LinkingException(
"Linking produced more than a single JS file (and source map) but " +
"the legacy `link` method was called. Call the overload taking " +
"an OutputDirectory instead. " +
s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles")
} else {
throw new LinkingException(
"Linking did not produce the files mentioned in the report. " +
"This is a bug in the linker." +
s"Expected files:\n$expectedFiles\nProduced files:\n$foundFiles")
}
}

val jsFileContent = outDir.content(module.jsFileName)
val sourceMapContent = module.sourceMapName.map(outDir.content(_))

(jsFileContent, sourceMapContent)
}

/** Patches the JS file content to have the provided source map link (or none)
*
* Looks for a line of the form `//# sourceMappingURL=.*` and replaces the
* URL with the provided `sourceMapURI`. In case `sourceMapURI` is None, the
* line is replaced with an empty line.
*/
private def patchJSFileContent(content: String,
sourceMapURI: Option[URI]): String = {

Expand All @@ -145,7 +181,12 @@ private object Linker {
}
}

/** Patches the source map content to have the provided JS file link (or none) */
/** Patches the source map content to have the provided JS file link (or none).
*
* Looks for a `"file":` key in the top-level source map JSON object and
* replaces it's value with `jsFileURI`. In case `jsFileURI` is None, it
* removes the key from the object.
*/
private def patchSourceMapContent(content: String,
jsFileURI: Option[URI]): String = {

Expand Down Expand Up @@ -195,8 +236,8 @@ private object Linker {
Future.successful(())
}

def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = synchronized {
Future.successful(content.keys)
def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = synchronized {
Future.successful(content.keys.toList)
}

def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] = synchronized {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,22 @@

package org.scalajs.linker.interface

import scala.collection.immutable

import java.io._

import org.scalajs.ir.ScalaJSVersions

import org.scalajs.linker.interface.unstable.ReportImpl

/** Information about a linker run. */
abstract class Report private[interface] {
abstract class Report private[interface] () {
/** Public modules resulting from linking.
*
* An incremental linker must return all public modules here, even if some
* were not re-written during this linking run.
*/
def publicModules: Iterable[Report.Module]
def publicModules: immutable.Iterable[Report.Module]

override def toString(): String = {
s"""Report(
Expand All @@ -38,7 +40,7 @@ abstract class Report private[interface] {

object Report {
/** Information about a module produced by the linker. */
abstract class Module private[interface] {
abstract class Module private[interface] () {
/** The module ID of this module. */
def moduleID: String

Expand All @@ -65,7 +67,7 @@ object Report {

/** Serializes this [[Report]] to a byte array.
*
* A report serialized with the exact same linker version is guaranteed to
* A report serialized with the exact same linker version is guaranteed to be
* deserializable using [[deserialize]]. If the linker version is different,
* no guarantee is given.
*/
Expand Down Expand Up @@ -109,8 +111,8 @@ object Report {

private def writeModuleKind(kind: ModuleKind): Unit = {
val i = kind match {
case ModuleKind.NoModule => 0
case ModuleKind.ESModule => 1
case ModuleKind.NoModule => 0
case ModuleKind.ESModule => 1
case ModuleKind.CommonJSModule => 2
}
writeByte(i)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ abstract class OutputDirectoryImpl extends OutputDirectory {
*
* Writing should only result in a file write if the contents of the file
* actually changed. Further, if the underlying filesystem allows it, the
* file should be written atomically.
* file should be written atomically.
*/
def writeFull(name: String, buf: ByteBuffer)(
implicit ec: ExecutionContext): Future[Unit]

/** Lists all the files in the directory. */
def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]]
def listFiles()(implicit ec: ExecutionContext): Future[List[String]]

/** Deletes the given file. Fails if it does not exist. */
def delete(name: String)(implicit ec: ExecutionContext): Future[Unit]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@

package org.scalajs.linker.interface.unstable

import scala.collection.immutable

import org.scalajs.linker.interface._

final class ReportImpl(
val publicModules: Iterable[Report.Module]) extends Report
val publicModules: immutable.Iterable[Report.Module]) extends Report

object ReportImpl {
final class ModuleImpl(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,21 @@ object NodeOutputDirectory {
def writeFull(name: String, buf: ByteBuffer)(implicit ec: ExecutionContext): Future[Unit] = {
val path = getPath(name)

val data =
if (buf.hasTypedArray()) buf.typedArray().subarray(buf.position(), buf.limit())
else ByteBuffer.allocateDirect(buf.remaining()).put(buf).typedArray()
val data = {
if (buf.hasTypedArray()) {
val result = buf.typedArray().subarray(buf.position(), buf.limit())
buf.position(buf.limit())
result
} else {
ByteBuffer.allocateDirect(buf.remaining()).put(buf).typedArray()
}
}

cbFuture[Unit](NodeFS.writeFile(path, data, _))
.map(_ => buf.position(buf.limit()))
}

def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] =
cbFuture[js.Array[String]](NodeFS.readdir(directory, _)).map(x => x)
def listFiles()(implicit ec: ExecutionContext): Future[List[String]] =
cbFuture[js.Array[String]](NodeFS.readdir(directory, _)).map(_.toList)

def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] =
cbFuture[Unit](NodeFS.unlink(getPath(name), _))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@ import scala.concurrent._
import java.nio._
import java.nio.channels._
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes

import java.util.EnumSet

import java.io.IOException

Expand All @@ -44,23 +41,12 @@ object PathOutputDirectory {
}
}

def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = Future {
def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = Future {
blocking {
val builder = Iterable.newBuilder[String]

/* Files.list(...) would be simpler, but it requires Java Streams which
* are, at the time of this writing, not implemented in Scala.js.
*/
val dirVisitor = new SimpleFileVisitor[Path] {
override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = {
if (path != directory)
builder += directory.relativize(path).toString()
super.visitFile(path, attrs)
}
val builder = List.newBuilder[String]
Files.list(directory).forEachOrdered { entry =>
builder += directory.relativize(entry).toString()
}

Files.walkFileTree(directory, EnumSet.noneOf(classOf[FileVisitOption]), 1, dirVisitor)

builder.result()
}
}
Expand Down Expand Up @@ -166,11 +152,11 @@ object PathOutputDirectory {

def readNext(): Unit = {
readBuf.clear()
chan.read(readBuf, pos, null, Handler)
chan.read(readBuf, pos, (), Handler)
}

object Handler extends CompletionHandler[Integer, Null] {
def completed(read: Integer, n: Null): Unit = {
object Handler extends CompletionHandler[Integer, Unit] {
def completed(read: Integer, unit: Unit): Unit = {
if (read == -1) {
/* We have checked the file size beforehand. So if we get here,
* there's no diff.
Expand All @@ -193,7 +179,7 @@ object PathOutputDirectory {
}
}

def failed(exc: Throwable, n: Null): Unit =
def failed(exc: Throwable, unit: Unit): Unit =
promise.failure(exc)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ object MemOutputDirectory {
Future.successful(())
}

def listFiles()(implicit ec: ExecutionContext): Future[Iterable[String]] = synchronized {
Future.successful(_content.keys)
def listFiles()(implicit ec: ExecutionContext): Future[List[String]] = synchronized {
Future.successful(_content.keys.toList)
}

def delete(name: String)(implicit ec: ExecutionContext): Future[Unit] = synchronized {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ object Infos {
val info = new GenInfoTraverser().generateTopLevelExportInfo(enclosingClass,
topLevelExportDef)
new TopLevelExportInfo(enclosingClass, info,
new ModuleID(topLevelExportDef.moduleID),
ModuleID(topLevelExportDef.moduleID),
topLevelExportDef.topLevelExportName)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,12 @@ private[backend] abstract class OutputWriter(output: OutputDirectory,

for {
currentFiles <- outputImpl.listFiles()
reports <- Future.traverse(moduleSet.modules)(m =>
ioThrottler.throttle(writeModule(m.id)))
_ <- Future.traverse(filesToRemove(currentFiles, reports))(f =>
ioThrottler.throttle(outputImpl.delete(f)))
reports <- Future.traverse(moduleSet.modules) { m =>
ioThrottler.throttle(writeModule(m.id))
}
_ <- Future.traverse(filesToRemove(currentFiles, reports)) { f =>
ioThrottler.throttle(outputImpl.delete(f))
}
} yield {
val publicModules = for {
(module, report) <- moduleSet.modules.zip(reports)
Expand Down

0 comments on commit 6ae1e52

Please sign in to comment.