Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Perceptual image precision + 90% speed improvement #628

Merged
merged 8 commits into from Sep 21, 2022
2 changes: 1 addition & 1 deletion Sources/SnapshotTesting/Snapshotting/Any.swift
Expand Up @@ -7,7 +7,7 @@ extension Snapshotting where Format == String {
}
}

@available(macOS 10.13, watchOS 4.0, *)
@available(macOS 10.13, watchOS 4.0, tvOS 11.0, *)
extension Snapshotting where Format == String {
/// A snapshot strategy for comparing any structure based on their JSON representation.
public static var json: Snapshotting {
Expand Down
13 changes: 8 additions & 5 deletions Sources/SnapshotTesting/Snapshotting/CALayer.swift
Expand Up @@ -9,9 +9,11 @@ extension Snapshotting where Value == CALayer, Format == NSImage {

/// A snapshot strategy for comparing layers based on pixel equality.
///
/// - Parameter precision: The percentage of pixels that must match.
public static func image(precision: Float) -> Snapshotting {
return SimplySnapshotting.image(precision: precision).pullback { layer in
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
public static func image(precision: Float, perceptualPrecision: Float = 1) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { layer in
let image = NSImage(size: layer.bounds.size)
image.lockFocus()
let context = NSGraphicsContext.current!.cgContext
Expand All @@ -36,10 +38,11 @@ extension Snapshotting where Value == CALayer, Format == UIImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - traits: A trait collection override.
public static func image(precision: Float = 1, traits: UITraitCollection = .init())
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init())
-> Snapshotting {
return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).pullback { layer in
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).pullback { layer in
renderer(bounds: layer.bounds, for: traits).image { ctx in
layer.setNeedsLayout()
layer.layoutIfNeeded()
Expand Down
16 changes: 10 additions & 6 deletions Sources/SnapshotTesting/Snapshotting/CGPath.swift
Expand Up @@ -9,9 +9,11 @@ extension Snapshotting where Value == CGPath, Format == NSImage {

/// A snapshot strategy for comparing bezier paths based on pixel equality.
///
/// - Parameter precision: The percentage of pixels that must match.
public static func image(precision: Float = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting {
return SimplySnapshotting.image(precision: precision).pullback { path in
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { path in
let bounds = path.boundingBoxOfPath
var transform = CGAffineTransform(translationX: -bounds.origin.x, y: -bounds.origin.y)
let path = path.copy(using: &transform)!
Expand All @@ -38,9 +40,11 @@ extension Snapshotting where Value == CGPath, Format == UIImage {

/// A snapshot strategy for comparing bezier paths based on pixel equality.
///
/// - Parameter precision: The percentage of pixels that must match.
public static func image(precision: Float = 1, scale: CGFloat = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, scale: scale).pullback { path in
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: scale).pullback { path in
let bounds = path.boundingBoxOfPath
let format: UIGraphicsImageRendererFormat
if #available(iOS 11.0, tvOS 11.0, *) {
Expand Down
8 changes: 5 additions & 3 deletions Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift
Expand Up @@ -9,9 +9,11 @@ extension Snapshotting where Value == NSBezierPath, Format == NSImage {

/// A snapshot strategy for comparing bezier paths based on pixel equality.
///
/// - Parameter precision: The percentage of pixels that must match.
public static func image(precision: Float = 1) -> Snapshotting {
return SimplySnapshotting.image(precision: precision).pullback { path in
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { path in
// Move path info frame:
let bounds = path.bounds
let transform = AffineTransform(translationByX: -bounds.origin.x, byY: -bounds.origin.y)
Expand Down
83 changes: 60 additions & 23 deletions Sources/SnapshotTesting/Snapshotting/NSImage.swift
@@ -1,21 +1,24 @@
#if os(macOS)
import CoreImage.CIFilterBuiltins
import Cocoa
import XCTest

extension Diffing where Value == NSImage {
/// A pixel-diffing strategy for NSImage's which requires a 100% match.
public static let image = Diffing.image(precision: 1)
public static let image = Diffing.image()

/// A pixel-diffing strategy for NSImage that allows customizing how precise the matching must be.
///
/// - Parameter precision: A value between 0 and 1, where 1 means the images must match 100% of their pixels.
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - Returns: A new diffing strategy.
public static func image(precision: Float) -> Diffing {
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Diffing {
return .init(
toData: { NSImagePNGRepresentation($0)! },
fromData: { NSImage(data: $0)! }
) { old, new in
guard !compare(old, new, precision: precision) else { return nil }
guard !compare(old, new, precision: precision, perceptualPrecision: perceptualPrecision) else { return nil }
let difference = SnapshotTesting.diff(old, new)
let message = new.size == old.size
? "Newly-taken snapshot does not match reference."
Expand All @@ -31,16 +34,18 @@ extension Diffing where Value == NSImage {
extension Snapshotting where Value == NSImage, Format == NSImage {
/// A snapshot strategy for comparing images based on pixel equality.
public static var image: Snapshotting {
return .image(precision: 1)
return .image()
}

/// A snapshot strategy for comparing images based on pixel equality.
///
/// - Parameter precision: The percentage of pixels that must match.
public static func image(precision: Float) -> Snapshotting {
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting {
return .init(
pathExtension: "png",
diffing: .image(precision: precision)
diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision)
)
}
}
Expand All @@ -52,13 +57,11 @@ private func NSImagePNGRepresentation(_ image: NSImage) -> Data? {
return rep.representation(using: .png, properties: [:])
}

private func compare(_ old: NSImage, _ new: NSImage, precision: Float) -> Bool {
private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float) -> Bool {
guard let oldCgImage = old.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return false }
guard let newCgImage = new.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return false }
guard oldCgImage.width != 0 else { return false }
guard newCgImage.width != 0 else { return false }
guard oldCgImage.width == newCgImage.width else { return false }
guard oldCgImage.height != 0 else { return false }
guard newCgImage.height != 0 else { return false }
guard oldCgImage.height == newCgImage.height else { return false }
guard let oldContext = context(for: oldCgImage) else { return false }
Expand All @@ -72,19 +75,53 @@ private func compare(_ old: NSImage, _ new: NSImage, precision: Float) -> Bool {
guard let newerContext = context(for: newerCgImage) else { return false }
guard let newerData = newerContext.data else { return false }
if memcmp(oldData, newerData, byteCount) == 0 { return true }
if precision >= 1 { return false }
let oldRep = NSBitmapImageRep(cgImage: oldCgImage)
let newRep = NSBitmapImageRep(cgImage: newerCgImage)
var differentPixelCount = 0
let pixelCount = oldRep.pixelsWide * oldRep.pixelsHigh
let threshold = (1 - precision) * Float(pixelCount)
let p1: UnsafeMutablePointer<UInt8> = oldRep.bitmapData!
let p2: UnsafeMutablePointer<UInt8> = newRep.bitmapData!
for offset in 0 ..< pixelCount * 4 {
if p1[offset] != p2[offset] {
differentPixelCount += 1
if precision >= 1, perceptualPrecision >= 1 { return false }
if perceptualPrecision < 1, #available(macOS 10.13, *) {
let deltaFilter = CIFilter(
name: "CILabDeltaE",
parameters: [
kCIInputImageKey: CIImage(cgImage: newCgImage),
"inputImage2": CIImage(cgImage: oldCgImage)
]
)
guard let deltaOutputImage = deltaFilter?.outputImage else { return false }
let extent = CGRect(x: 0, y: 0, width: oldCgImage.width, height: oldCgImage.height)
let thresholdOutputImage = try? ThresholdImageProcessorKernel.apply(
withExtent: extent,
inputs: [deltaOutputImage],
arguments: [ThresholdImageProcessorKernel.inputThresholdKey: (1 - perceptualPrecision) * 100]
)
guard let thresholdOutputImage = thresholdOutputImage else { return false }
let averageFilter = CIFilter(
name: "CIAreaAverage",
parameters: [
kCIInputImageKey: thresholdOutputImage,
kCIInputExtentKey: extent
]
)
guard let averageOutputImage = averageFilter?.outputImage else { return false }
var averagePixel: Float = 0
CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]).render(
averageOutputImage,
toBitmap: &averagePixel,
rowBytes: MemoryLayout<Float>.size,
bounds: CGRect(x: 0, y: 0, width: 1, height: 1),
format: .Rf,
colorSpace: nil
)
let pixelCountThreshold = 1 - precision
if averagePixel > pixelCountThreshold { return false }
} else {
let oldRep = NSBitmapImageRep(cgImage: oldCgImage).bitmapData!
let newRep = NSBitmapImageRep(cgImage: newerCgImage).bitmapData!
let byteCountThreshold = Int((1 - precision) * Float(byteCount))
var differentByteCount = 0
for offset in 0..<byteCount {
if oldRep[offset] != newRep[offset] {
differentByteCount += 1
if differentByteCount > byteCountThreshold { return false }
}
}
if Float(differentPixelCount) > threshold { return false }
}
return true
}
Expand Down
5 changes: 3 additions & 2 deletions Sources/SnapshotTesting/Snapshotting/NSView.swift
Expand Up @@ -11,9 +11,10 @@ extension Snapshotting where Value == NSView, Format == NSImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: A view size override.
public static func image(precision: Float = 1, size: CGSize? = nil) -> Snapshotting {
return SimplySnapshotting.image(precision: precision).asyncPullback { view in
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil) -> Snapshotting {
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).asyncPullback { view in
let initialSize = view.frame.size
if let size = size { view.frame.size = size }
guard view.frame.width > 0, view.frame.height > 0 else {
Expand Down
5 changes: 3 additions & 2 deletions Sources/SnapshotTesting/Snapshotting/NSViewController.swift
Expand Up @@ -11,9 +11,10 @@ extension Snapshotting where Value == NSViewController, Format == NSImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: A view size override.
public static func image(precision: Float = 1, size: CGSize? = nil) -> Snapshotting {
return Snapshotting<NSView, NSImage>.image(precision: precision, size: size).pullback { $0.view }
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil) -> Snapshotting {
return Snapshotting<NSView, NSImage>.image(precision: precision, perceptualPrecision: perceptualPrecision, size: size).pullback { $0.view }
}
}

Expand Down
14 changes: 8 additions & 6 deletions Sources/SnapshotTesting/Snapshotting/SceneKit.swift
Expand Up @@ -12,9 +12,10 @@ extension Snapshotting where Value == SCNScene, Format == NSImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: The size of the scene.
public static func image(precision: Float = 1, size: CGSize) -> Snapshotting {
return .scnScene(precision: precision, size: size)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting {
return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size)
}
}
#elseif os(iOS) || os(tvOS)
Expand All @@ -23,16 +24,17 @@ extension Snapshotting where Value == SCNScene, Format == UIImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: The size of the scene.
public static func image(precision: Float = 1, size: CGSize) -> Snapshotting {
return .scnScene(precision: precision, size: size)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting {
return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size)
}
}
#endif

fileprivate extension Snapshotting where Value == SCNScene, Format == Image {
static func scnScene(precision: Float, size: CGSize) -> Snapshotting {
return Snapshotting<View, Image>.image(precision: precision).pullback { scene in
static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize) -> Snapshotting {
return Snapshotting<View, Image>.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { scene in
let view = SCNView(frame: .init(x: 0, y: 0, width: size.width, height: size.height))
view.scene = scene
return view
Expand Down
14 changes: 8 additions & 6 deletions Sources/SnapshotTesting/Snapshotting/SpriteKit.swift
Expand Up @@ -12,9 +12,10 @@ extension Snapshotting where Value == SKScene, Format == NSImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: The size of the scene.
public static func image(precision: Float = 1, size: CGSize) -> Snapshotting {
return .skScene(precision: precision, size: size)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting {
return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size)
}
}
#elseif os(iOS) || os(tvOS)
Expand All @@ -23,16 +24,17 @@ extension Snapshotting where Value == SKScene, Format == UIImage {
///
/// - Parameters:
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - size: The size of the scene.
public static func image(precision: Float = 1, size: CGSize) -> Snapshotting {
return .skScene(precision: precision, size: size)
public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting {
return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size)
}
}
#endif

fileprivate extension Snapshotting where Value == SKScene, Format == Image {
static func skScene(precision: Float, size: CGSize) -> Snapshotting {
return Snapshotting<View, Image>.image(precision: precision).pullback { scene in
static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize) -> Snapshotting {
return Snapshotting<View, Image>.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { scene in
let view = SKView(frame: .init(x: 0, y: 0, width: size.width, height: size.height))
view.presentScene(scene)
return view
Expand Down
4 changes: 3 additions & 1 deletion Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift
Expand Up @@ -28,11 +28,13 @@ extension Snapshotting where Value: SwiftUI.View, Format == UIImage {
/// - Parameters:
/// - drawHierarchyInKeyWindow: Utilize the simulator's key window in order to render `UIAppearance` and `UIVisualEffect`s. This option requires a host application for your tests and will _not_ work for framework test targets.
/// - precision: The percentage of pixels that must match.
/// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e)
/// - layout: A view layout override.
/// - traits: A trait collection override.
public static func image(
drawHierarchyInKeyWindow: Bool = false,
precision: Float = 1,
perceptualPrecision: Float = 1,
layout: SwiftUISnapshotLayout = .sizeThatFits,
traits: UITraitCollection = .init()
)
Expand All @@ -51,7 +53,7 @@ extension Snapshotting where Value: SwiftUI.View, Format == UIImage {
config = .init(safeArea: .zero, size: size, traits: traits)
}

return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).asyncPullback { view in
return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).asyncPullback { view in
var config = config

let controller: UIViewController
Expand Down