Skip to content

Commit

Permalink
Update source mappings to exclude filtered files.
Browse files Browse the repository at this point in the history
  • Loading branch information
Han van Venrooij committed Dec 26, 2015
1 parent 1ebf123 commit 174a366
Show file tree
Hide file tree
Showing 4 changed files with 178 additions and 6 deletions.
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,12 @@
- Fix source map paths

## 1.3.2
- Allow including web-jarred Sass files
- Allow including web-jarred Sass files

## 1.4.0
- Fix caret position on exceptions
- Add support for generic compilation issues (rather than line based ones)
- Reuse the same compiler instance within an assets run
- Use the libsass error code to determine whether the compilation succeeded or not
- Make sure that assets can be found (now that absolute URL's are used in the source map)
- Update the source mappings to exclude filtered files.
72 changes: 72 additions & 0 deletions src/org/irundaia/base64/Base64VLQ.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright 2015 Han van Venrooij
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.irundaia.base64

import scala.annotation.tailrec

object Base64VLQ {

private val baseShift = 5
private val base = 1 << baseShift
private val baseMask = base - 1
private val continuationBit = base
private val base64Chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
private val base64CharMap = base64Chars.zipWithIndex.toMap

def decode(encoded: String): Seq[Int] = {
val digits = encoded.map(base64CharMap)

digits.foldLeft((Seq[Int](), 0, 0)){
case ((values, currentValue, shift), nextDigit) =>
val resultValue = currentValue + (decodedValue(nextDigit) << shift)

if (shouldContinueDecode(nextDigit)) {
(values, resultValue, shift + baseShift)
} else {
if (shouldNegateDecodedValue(resultValue)) {
(values ++ Seq(-(resultValue >> 1)), 0, 0)
} else {
(values ++ Seq(resultValue >> 1), 0, 0)
}
}
}._1
}

def encode(value: Int): String = {
val toEncodeValue = if (value < 0)
((-value) << 1) | 1
else
value << 1

encodePartial(toEncodeValue)
}

@tailrec private def encodePartial(value: Int, acc: String = ""): String = {
val digit = value & baseMask
val nextValue = value >>> baseShift
val base64Char = base64Chars(if (nextValue > 0) digit | continuationBit else digit)

if (nextValue > 0)
encodePartial(nextValue, acc + base64Char)
else
acc + base64Char
}

private def shouldContinueDecode(digit: Int): Boolean = (digit & continuationBit) > 0
private def shouldNegateDecodedValue(value: Int): Boolean = (value & 1) > 0
private def decodedValue(digit: Int): Int = digit & baseMask
}
28 changes: 23 additions & 5 deletions src/org/irundaia/sbt/sass/compiler/SassCompiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import java.util.regex.Pattern
import com.typesafe.sbt.web.incremental.OpSuccess
import io.bit3.jsass.{Output, Compiler}
import org.irundaia.sbt.sass._
import org.irundaia.sourcemap.SourceMapping
import play.api.libs.json._

import scala.util.Try
Expand Down Expand Up @@ -107,30 +108,47 @@ class SassCompiler(compilerSettings: CompilerSettings) {

private def fixSourceMap(originalSourceMap: String, baseDir: String, sourceDir: String): String = {
val parsedSourceMap = Json.parse(originalSourceMap).as[JsObject]
val sourcesContent = (parsedSourceMap \ "sourcesContent").toOption.map(_.as[Seq[String]])
// Combine source file references with their contents
val sourcesWithContents = normalizeFiles(baseDir, (parsedSourceMap \ "sources").as[Seq[String]])
.zip(sourcesContent.getOrElse(Stream.continually("")))
val sources = normalizeFiles(baseDir, (parsedSourceMap \ "sources").as[Seq[String]])
val sourcesWithContents = sources
.zip((parsedSourceMap \ "sourcesContent").toOption.map(_.as[Seq[String]]).getOrElse(Stream.continually("")))
.toMap
.filterKeys(_.exists) // Filter non-existing sources

// Exclude unknown files from the mappings
val excludedSources = sources.zipWithIndex.toMap.filterKeys(!_.exists)
val mappings = (parsedSourceMap \ "mappings").toOption.map(_.as[String]).getOrElse("")
val mappingsWithoutExcludedSources = excludeMappings(SourceMapping.decode(mappings), excludedSources.values.toSet)

// Use relative file names to make sure that the browser can find the files when they are moved to the target dir
val transformedSources = sourcesWithContents.keys
.map(convertToRelativePath)
.map(JsString.apply)

// Update the source map with the newly computed sources (contents)
val updatedMap = if (sourcesContent.isDefined)
val updatedMap = if (compilerSettings.embedSources)
parsedSourceMap ++
Json.obj("sources" -> JsArray(transformedSources.toSeq)) ++
Json.obj("mappings" -> JsString(mappingsWithoutExcludedSources)) ++
Json.obj("sourcesContent" -> JsArray(sourcesWithContents.values.map(JsString).toSeq))
else
parsedSourceMap ++
Json.obj("sources" -> JsArray(transformedSources.toSeq))
Json.obj("sources" -> JsArray(transformedSources.toSeq)) ++
Json.obj("mappings" -> JsString(mappingsWithoutExcludedSources))

Json.prettyPrint(updatedMap)
}

private def excludeMappings(mappings: Seq[SourceMapping], excludedFileIndices: Set[Int]): String = {
val fileIndicesWithDeltas = mappings.map(_.sourceFileIndex).toSet[Int].map(fileIndex => (fileIndex, excludedFileIndices.count(_ < fileIndex))).toMap
SourceMapping.encode(
mappings
.filterNot(mapping => excludedFileIndices.contains(mapping.sourceFileIndex))
.map(mapping =>
mapping.copy(
sourceFileIndex = mapping.sourceFileIndex - fileIndicesWithDeltas(mapping.sourceFileIndex))))
}

private def normalizeFiles(baseDir: String, fileNames: Iterable[String]): Seq[File] =
fileNames
.map(f => normalizeFile(new File(s"""$baseDir/$f""")))
Expand Down
74 changes: 74 additions & 0 deletions src/org/irundaia/sourcemap/SourceMapping.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright 2015 Han van Venrooij
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.irundaia.sourcemap

import org.irundaia.base64.Base64VLQ

case class SourceMapping(sourceFileIndex: Int, sourceLine: Int, sourceColumn: Int, targetLine: Int, targetColumn: Int)

object SourceMapping {
private val lineSeparator = ";"
private val segmentSeparator = ","

def decode(input: String): Seq[SourceMapping] = {
input.split(lineSeparator).toSeq.zipWithIndex.map(decodeLine).reduce(_ ++ _)
}

private def doDecodeLine(line: String, targetLine: Int): Seq[SourceMapping] = {
if (line.isEmpty)
Seq()
else {
val segments = line.split(segmentSeparator).toSeq.map(decodeSegment)
segments.foldLeft(Seq[SourceMapping]()) {
case
(mappings, (targetColumnIncrement, sourceFileIncrement, sourceLineIncrement, sourceColumnIncrement)) =>
val prevMapping = mappings.headOption.getOrElse(SourceMapping(0, 0, 0, targetLine, 0))
val nextTargetColumn = prevMapping.targetColumn + targetColumnIncrement
val nextSourceIndex = prevMapping.sourceFileIndex + sourceFileIncrement
val nextSourceLine = prevMapping.sourceLine + sourceLineIncrement
val nextSourceColumn = prevMapping.sourceColumn + sourceColumnIncrement

SourceMapping(nextSourceIndex, nextSourceLine, nextSourceColumn, targetLine, nextTargetColumn) +: mappings
}.reverse
}
}
private def decodeLine = (doDecodeLine _).tupled

private def decodeSegment(segment: String): (Int, Int, Int, Int) = {
val foundValues = Base64VLQ.decode(segment)

(foundValues(0), foundValues(1), foundValues(2), foundValues(3))
}

def encode(mappings: Seq[SourceMapping]): String = mappings.groupBy(_.targetLine).mapValues(encodeLine).values.mkString(lineSeparator)

private def encodeLine(mappings: Seq[SourceMapping]): String =
mappings.foldLeft((Seq[String](), SourceMapping(0, 0, 0, 0, 0))){
case ((encodedSegments, previousMapping), nextMapping) => (encodeSegment(nextMapping, previousMapping) +: encodedSegments, nextMapping)
}._1.reverse.mkString(segmentSeparator)

private def encodeSegment(currentMapping: SourceMapping, previousMapping: SourceMapping): String = {
val toBeEncodedValues = Seq(
currentMapping.targetColumn - previousMapping.targetColumn, // Target column increment
currentMapping.sourceFileIndex - previousMapping.sourceFileIndex, // Source file increment
currentMapping.sourceLine - previousMapping.sourceLine, // Source line increment
currentMapping.sourceColumn - previousMapping.sourceColumn // Source column increment
)

toBeEncodedValues.map(Base64VLQ.encode).mkString("")
}
}

0 comments on commit 174a366

Please sign in to comment.