diff --git a/Sources/SnapshotTesting/Snapshotting/Any.swift b/Sources/SnapshotTesting/Snapshotting/Any.swift index 55d5155a..385c2642 100644 --- a/Sources/SnapshotTesting/Snapshotting/Any.swift +++ b/Sources/SnapshotTesting/Snapshotting/Any.swift @@ -7,7 +7,7 @@ extension Snapshotting where Format == String { } } -@available(macOS 10.13, watchOS 4.0, *) +@available(macOS 10.13, watchOS 4.0, tvOS 11.0, *) extension Snapshotting where Format == String { /// A snapshot strategy for comparing any structure based on their JSON representation. public static var json: Snapshotting { diff --git a/Sources/SnapshotTesting/Snapshotting/CALayer.swift b/Sources/SnapshotTesting/Snapshotting/CALayer.swift index 835095f5..237a5910 100644 --- a/Sources/SnapshotTesting/Snapshotting/CALayer.swift +++ b/Sources/SnapshotTesting/Snapshotting/CALayer.swift @@ -9,9 +9,11 @@ extension Snapshotting where Value == CALayer, Format == NSImage { /// A snapshot strategy for comparing layers based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - public static func image(precision: Float) -> Snapshotting { - return SimplySnapshotting.image(precision: precision).pullback { layer in + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + public static func image(precision: Float, perceptualPrecision: Float = 1) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { layer in let image = NSImage(size: layer.bounds.size) image.lockFocus() let context = NSGraphicsContext.current!.cgContext @@ -36,10 +38,11 @@ extension Snapshotting where Value == CALayer, Format == UIImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - traits: A trait collection override. - public static func image(precision: Float = 1, traits: UITraitCollection = .init()) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init()) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).pullback { layer in + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).pullback { layer in renderer(bounds: layer.bounds, for: traits).image { ctx in layer.setNeedsLayout() layer.layoutIfNeeded() diff --git a/Sources/SnapshotTesting/Snapshotting/CGPath.swift b/Sources/SnapshotTesting/Snapshotting/CGPath.swift index d7ee7df1..8e96568e 100644 --- a/Sources/SnapshotTesting/Snapshotting/CGPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/CGPath.swift @@ -9,9 +9,11 @@ extension Snapshotting where Value == CGPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - public static func image(precision: Float = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting { - return SimplySnapshotting.image(precision: precision).pullback { path in + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { path in let bounds = path.boundingBoxOfPath var transform = CGAffineTransform(translationX: -bounds.origin.x, y: -bounds.origin.y) let path = path.copy(using: &transform)! @@ -38,9 +40,11 @@ extension Snapshotting where Value == CGPath, Format == UIImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - public static func image(precision: Float = 1, scale: CGFloat = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: scale).pullback { path in + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, drawingMode: CGPathDrawingMode = .eoFill) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: scale).pullback { path in let bounds = path.boundingBoxOfPath let format: UIGraphicsImageRendererFormat if #available(iOS 11.0, tvOS 11.0, *) { diff --git a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift index d9d5defc..b76bf57d 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift @@ -9,9 +9,11 @@ extension Snapshotting where Value == NSBezierPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - public static func image(precision: Float = 1) -> Snapshotting { - return SimplySnapshotting.image(precision: precision).pullback { path in + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { path in // Move path info frame: let bounds = path.bounds let transform = AffineTransform(translationByX: -bounds.origin.x, byY: -bounds.origin.y) diff --git a/Sources/SnapshotTesting/Snapshotting/NSImage.swift b/Sources/SnapshotTesting/Snapshotting/NSImage.swift index 625639d8..886f7524 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSImage.swift @@ -1,21 +1,24 @@ #if os(macOS) +import CoreImage.CIFilterBuiltins import Cocoa import XCTest extension Diffing where Value == NSImage { /// A pixel-diffing strategy for NSImage's which requires a 100% match. - public static let image = Diffing.image(precision: 1) + public static let image = Diffing.image() /// A pixel-diffing strategy for NSImage that allows customizing how precise the matching must be. /// - /// - Parameter precision: A value between 0 and 1, where 1 means the images must match 100% of their pixels. + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - Returns: A new diffing strategy. - public static func image(precision: Float) -> Diffing { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Diffing { return .init( toData: { NSImagePNGRepresentation($0)! }, fromData: { NSImage(data: $0)! } ) { old, new in - guard !compare(old, new, precision: precision) else { return nil } + guard !compare(old, new, precision: precision, perceptualPrecision: perceptualPrecision) else { return nil } let difference = SnapshotTesting.diff(old, new) let message = new.size == old.size ? "Newly-taken snapshot does not match reference." @@ -31,16 +34,18 @@ extension Diffing where Value == NSImage { extension Snapshotting where Value == NSImage, Format == NSImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image(precision: 1) + return .image() } /// A snapshot strategy for comparing images based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - public static func image(precision: Float) -> Snapshotting { + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { return .init( pathExtension: "png", - diffing: .image(precision: precision) + diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision) ) } } @@ -52,13 +57,11 @@ private func NSImagePNGRepresentation(_ image: NSImage) -> Data? { return rep.representation(using: .png, properties: [:]) } -private func compare(_ old: NSImage, _ new: NSImage, precision: Float) -> Bool { +private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float) -> Bool { guard let oldCgImage = old.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return false } guard let newCgImage = new.cgImage(forProposedRect: nil, context: nil, hints: nil) else { return false } - guard oldCgImage.width != 0 else { return false } guard newCgImage.width != 0 else { return false } guard oldCgImage.width == newCgImage.width else { return false } - guard oldCgImage.height != 0 else { return false } guard newCgImage.height != 0 else { return false } guard oldCgImage.height == newCgImage.height else { return false } guard let oldContext = context(for: oldCgImage) else { return false } @@ -72,19 +75,54 @@ private func compare(_ old: NSImage, _ new: NSImage, precision: Float) -> Bool { guard let newerContext = context(for: newerCgImage) else { return false } guard let newerData = newerContext.data else { return false } if memcmp(oldData, newerData, byteCount) == 0 { return true } - if precision >= 1 { return false } - let oldRep = NSBitmapImageRep(cgImage: oldCgImage) - let newRep = NSBitmapImageRep(cgImage: newerCgImage) - var differentPixelCount = 0 - let pixelCount = oldRep.pixelsWide * oldRep.pixelsHigh - let threshold = (1 - precision) * Float(pixelCount) - let p1: UnsafeMutablePointer = oldRep.bitmapData! - let p2: UnsafeMutablePointer = newRep.bitmapData! - for offset in 0 ..< pixelCount * 4 { - if p1[offset] != p2[offset] { - differentPixelCount += 1 + if precision >= 1, perceptualPrecision >= 1 { return false } + if perceptualPrecision < 1, #available(macOS 10.13, *) { + let deltaFilter = CIFilter( + name: "CILabDeltaE", + parameters: [ + kCIInputImageKey: CIImage(cgImage: newCgImage), + "inputImage2": CIImage(cgImage: oldCgImage) + ] + ) + guard let deltaOutputImage = deltaFilter?.outputImage else { return false } + let extent = CGRect(x: 0, y: 0, width: oldCgImage.width, height: oldCgImage.height) + guard + let thresholdOutputImage = try? ThresholdImageProcessorKernel.apply( + withExtent: extent, + inputs: [deltaOutputImage], + arguments: [ThresholdImageProcessorKernel.inputThresholdKey: (1 - perceptualPrecision) * 100] + ) + else { return false } + let averageFilter = CIFilter( + name: "CIAreaAverage", + parameters: [ + kCIInputImageKey: thresholdOutputImage, + kCIInputExtentKey: extent + ] + ) + guard let averageOutputImage = averageFilter?.outputImage else { return false } + var averagePixel: Float = 0 + CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]).render( + averageOutputImage, + toBitmap: &averagePixel, + rowBytes: MemoryLayout.size, + bounds: CGRect(x: 0, y: 0, width: 1, height: 1), + format: .Rf, + colorSpace: nil + ) + let pixelCountThreshold = 1 - precision + if averagePixel > pixelCountThreshold { return false } + } else { + let oldRep = NSBitmapImageRep(cgImage: oldCgImage).bitmapData! + let newRep = NSBitmapImageRep(cgImage: newerCgImage).bitmapData! + let byteCountThreshold = Int((1 - precision) * Float(byteCount)) + var differentByteCount = 0 + for offset in 0.. byteCountThreshold { return false } + } } - if Float(differentPixelCount) > threshold { return false } } return true } diff --git a/Sources/SnapshotTesting/Snapshotting/NSView.swift b/Sources/SnapshotTesting/Snapshotting/NSView.swift index 292570f2..b0402f92 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSView.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSView.swift @@ -11,9 +11,10 @@ extension Snapshotting where Value == NSView, Format == NSImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: A view size override. - public static func image(precision: Float = 1, size: CGSize? = nil) -> Snapshotting { - return SimplySnapshotting.image(precision: precision).asyncPullback { view in + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).asyncPullback { view in let initialSize = view.frame.size if let size = size { view.frame.size = size } guard view.frame.width > 0, view.frame.height > 0 else { diff --git a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift index 70d97247..6f316e8a 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift @@ -11,9 +11,10 @@ extension Snapshotting where Value == NSViewController, Format == NSImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: A view size override. - public static func image(precision: Float = 1, size: CGSize? = nil) -> Snapshotting { - return Snapshotting.image(precision: precision, size: size).pullback { $0.view } + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil) -> Snapshotting { + return Snapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, size: size).pullback { $0.view } } } diff --git a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift index 86dc7ff5..42bd21b9 100644 --- a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift @@ -12,9 +12,10 @@ extension Snapshotting where Value == SCNScene, Format == NSImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: The size of the scene. - public static func image(precision: Float = 1, size: CGSize) -> Snapshotting { - return .scnScene(precision: precision, size: size) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting { + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) } } #elseif os(iOS) || os(tvOS) @@ -23,16 +24,17 @@ extension Snapshotting where Value == SCNScene, Format == UIImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: The size of the scene. - public static func image(precision: Float = 1, size: CGSize) -> Snapshotting { - return .scnScene(precision: precision, size: size) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting { + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) } } #endif fileprivate extension Snapshotting where Value == SCNScene, Format == Image { - static func scnScene(precision: Float, size: CGSize) -> Snapshotting { - return Snapshotting.image(precision: precision).pullback { scene in + static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize) -> Snapshotting { + return Snapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { scene in let view = SCNView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.scene = scene return view diff --git a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift index 8d71ce13..2a1f6036 100644 --- a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift @@ -12,9 +12,10 @@ extension Snapshotting where Value == SKScene, Format == NSImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: The size of the scene. - public static func image(precision: Float = 1, size: CGSize) -> Snapshotting { - return .skScene(precision: precision, size: size) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting { + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) } } #elseif os(iOS) || os(tvOS) @@ -23,16 +24,17 @@ extension Snapshotting where Value == SKScene, Format == UIImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: The size of the scene. - public static func image(precision: Float = 1, size: CGSize) -> Snapshotting { - return .skScene(precision: precision, size: size) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) -> Snapshotting { + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) } } #endif fileprivate extension Snapshotting where Value == SKScene, Format == Image { - static func skScene(precision: Float, size: CGSize) -> Snapshotting { - return Snapshotting.image(precision: precision).pullback { scene in + static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize) -> Snapshotting { + return Snapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision).pullback { scene in let view = SKView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.presentScene(scene) return view diff --git a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift index bd02a76b..6dfb40ad 100644 --- a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift @@ -28,11 +28,13 @@ extension Snapshotting where Value: SwiftUI.View, Format == UIImage { /// - Parameters: /// - drawHierarchyInKeyWindow: Utilize the simulator's key window in order to render `UIAppearance` and `UIVisualEffect`s. This option requires a host application for your tests and will _not_ work for framework test targets. /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - layout: A view layout override. /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, precision: Float = 1, + perceptualPrecision: Float = 1, layout: SwiftUISnapshotLayout = .sizeThatFits, traits: UITraitCollection = .init() ) @@ -51,7 +53,7 @@ extension Snapshotting where Value: SwiftUI.View, Format == UIImage { config = .init(safeArea: .zero, size: size, traits: traits) } - return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).asyncPullback { view in + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).asyncPullback { view in var config = config let controller: UIViewController diff --git a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift index b9d2eb8a..d0ce9fd1 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift @@ -11,9 +11,10 @@ extension Snapshotting where Value == UIBezierPath, Format == UIImage { /// /// - Parameters: /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - scale: The scale to use when loading the reference image from disk. - public static func image(precision: Float = 1, scale: CGFloat = 1) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: scale).pullback { path in + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1) -> Snapshotting { + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: scale).pullback { path in let bounds = path.bounds let format: UIGraphicsImageRendererFormat if #available(iOS 11.0, tvOS 11.0, *) { diff --git a/Sources/SnapshotTesting/Snapshotting/UIImage.swift b/Sources/SnapshotTesting/Snapshotting/UIImage.swift index 4b7c51dc..5e7f96b7 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIImage.swift @@ -1,17 +1,20 @@ #if os(iOS) || os(tvOS) +import CoreImage.CIFilterBuiltins import UIKit import XCTest extension Diffing where Value == UIImage { /// A pixel-diffing strategy for UIImage's which requires a 100% match. - public static let image = Diffing.image(precision: 1, scale: nil) + public static let image = Diffing.image() /// A pixel-diffing strategy for UIImage that allows customizing how precise the matching must be. /// - /// - Parameter precision: A value between 0 and 1, where 1 means the images must match 100% of their pixels. - /// - Parameter scale: Scale to use when loading the reference image from disk. If `nil` or the `UITraitCollection`s default value of `0.0`, the screens scale is used. + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + /// - scale: Scale to use when loading the reference image from disk. If `nil` or the `UITraitCollection`s default value of `0.0`, the screens scale is used. /// - Returns: A new diffing strategy. - public static func image(precision: Float, scale: CGFloat?) -> Diffing { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil) -> Diffing { let imageScale: CGFloat if let scale = scale, scale != 0.0 { imageScale = scale @@ -23,7 +26,7 @@ extension Diffing where Value == UIImage { toData: { $0.pngData() ?? emptyImage().pngData()! }, fromData: { UIImage(data: $0, scale: imageScale)! } ) { old, new in - guard !compare(old, new, precision: precision) else { return nil } + guard !compare(old, new, precision: precision, perceptualPrecision: perceptualPrecision) else { return nil } let difference = SnapshotTesting.diff(old, new) let message = new.size == old.size ? "Newly-taken snapshot does not match reference." @@ -56,37 +59,38 @@ extension Diffing where Value == UIImage { extension Snapshotting where Value == UIImage, Format == UIImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image(precision: 1, scale: nil) + return .image() } /// A snapshot strategy for comparing images based on pixel equality. /// - /// - Parameter precision: The percentage of pixels that must match. - /// - Parameter scale: The scale of the reference image stored on disk. - public static func image(precision: Float, scale: CGFloat?) -> Snapshotting { + /// - Parameters: + /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) + /// - scale: The scale of the reference image stored on disk. + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil) -> Snapshotting { return .init( pathExtension: "png", - diffing: .image(precision: precision, scale: scale) + diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision, scale: scale) ) } } // remap snapshot & reference to same colorspace -let imageContextColorSpace = CGColorSpace(name: CGColorSpace.sRGB) -let imageContextBitsPerComponent = 8 -let imageContextBytesPerPixel = 4 +private let imageContextColorSpace = CGColorSpace(name: CGColorSpace.sRGB) +private let imageContextBitsPerComponent = 8 +private let imageContextBytesPerPixel = 4 -private func compare(_ old: UIImage, _ new: UIImage, precision: Float) -> Bool { +private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float) -> Bool { guard let oldCgImage = old.cgImage else { return false } guard let newCgImage = new.cgImage else { return false } - guard oldCgImage.width != 0 else { return false } guard newCgImage.width != 0 else { return false } guard oldCgImage.width == newCgImage.width else { return false } - guard oldCgImage.height != 0 else { return false } guard newCgImage.height != 0 else { return false } guard oldCgImage.height == newCgImage.height else { return false } - let byteCount = imageContextBytesPerPixel * oldCgImage.width * oldCgImage.height + let pixelCount = oldCgImage.width * oldCgImage.height + let byteCount = imageContextBytesPerPixel * pixelCount var oldBytes = [UInt8](repeating: 0, count: byteCount) guard let oldContext = context(for: oldCgImage, data: &oldBytes) else { return false } guard let oldData = oldContext.data else { return false } @@ -99,12 +103,52 @@ private func compare(_ old: UIImage, _ new: UIImage, precision: Float) -> Bool { guard let newerContext = context(for: newerCgImage, data: &newerBytes) else { return false } guard let newerData = newerContext.data else { return false } if memcmp(oldData, newerData, byteCount) == 0 { return true } - if precision >= 1 { return false } - var differentPixelCount = 0 - let threshold = 1 - precision - for byte in 0.. threshold { return false} + if precision >= 1, perceptualPrecision >= 1 { return false } + if perceptualPrecision < 1, #available(iOS 11.0, tvOS 11.0, *) { + let deltaFilter = CIFilter( + name: "CILabDeltaE", + parameters: [ + kCIInputImageKey: CIImage(cgImage: newCgImage), + "inputImage2": CIImage(cgImage: oldCgImage) + ] + ) + guard let deltaOutputImage = deltaFilter?.outputImage else { return false } + let extent = CGRect(x: 0, y: 0, width: oldCgImage.width, height: oldCgImage.height) + guard + let thresholdOutputImage = try? ThresholdImageProcessorKernel.apply( + withExtent: extent, + inputs: [deltaOutputImage], + arguments: [ThresholdImageProcessorKernel.inputThresholdKey: (1 - perceptualPrecision) * 100] + ) + else { return false } + let averageFilter = CIFilter( + name: "CIAreaAverage", + parameters: [ + kCIInputImageKey: thresholdOutputImage, + kCIInputExtentKey: extent + ] + ) + guard let averageOutputImage = averageFilter?.outputImage else { return false } + var averagePixel: Float = 0 + CIContext(options: [.workingColorSpace: NSNull(), .outputColorSpace: NSNull()]).render( + averageOutputImage, + toBitmap: &averagePixel, + rowBytes: MemoryLayout.size, + bounds: CGRect(x: 0, y: 0, width: 1, height: 1), + format: .Rf, + colorSpace: nil + ) + let pixelCountThreshold = 1 - precision + if averagePixel > pixelCountThreshold { return false } + } else { + let byteCountThreshold = Int((1 - precision) * Float(byteCount)) + var differentByteCount = 0 + for offset in 0.. byteCountThreshold { return false } + } + } } return true } @@ -140,3 +184,40 @@ private func diff(_ old: UIImage, _ new: UIImage) -> UIImage { return differenceImage } #endif + +#if os(iOS) || os(tvOS) || os(macOS) +import CoreImage.CIKernel +import MetalPerformanceShaders + +// Copied from https://developer.apple.com/documentation/coreimage/ciimageprocessorkernel +@available(iOS 10.0, tvOS 10.0, macOS 10.13, *) +final class ThresholdImageProcessorKernel: CIImageProcessorKernel { + static let inputThresholdKey = "thresholdValue" + static let device = MTLCreateSystemDefaultDevice() + + override class func process(with inputs: [CIImageProcessorInput]?, arguments: [String: Any]?, output: CIImageProcessorOutput) throws { + guard + let device = device, + let commandBuffer = output.metalCommandBuffer, + let input = inputs?.first, + let sourceTexture = input.metalTexture, + let destinationTexture = output.metalTexture, + let thresholdValue = arguments?[inputThresholdKey] as? Float else { + return + } + + let threshold = MPSImageThresholdBinary( + device: device, + thresholdValue: thresholdValue, + maximumValue: 1.0, + linearGrayColorTransform: nil + ) + + threshold.encode( + commandBuffer: commandBuffer, + sourceTexture: sourceTexture, + destinationTexture: destinationTexture + ) + } +} +#endif diff --git a/Sources/SnapshotTesting/Snapshotting/UIView.swift b/Sources/SnapshotTesting/Snapshotting/UIView.swift index fe1e81a5..d2ed6e0f 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIView.swift @@ -12,17 +12,19 @@ extension Snapshotting where Value == UIView, Format == UIImage { /// - Parameters: /// - drawHierarchyInKeyWindow: Utilize the simulator's key window in order to render `UIAppearance` and `UIVisualEffect`s. This option requires a host application for your tests and will _not_ work for framework test targets. /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: A view size override. /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, precision: Float = 1, + perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).asyncPullback { view in + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).asyncPullback { view in snapshotView( config: .init(safeArea: .zero, size: size ?? view.frame.size, traits: .init()), drawHierarchyInKeyWindow: drawHierarchyInKeyWindow, diff --git a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift index 736bdcb2..833ec5e0 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift @@ -12,17 +12,19 @@ extension Snapshotting where Value == UIViewController, Format == UIImage { /// - Parameters: /// - config: A set of device configuration settings. /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: A view size override. /// - traits: A trait collection override. public static func image( on config: ViewImageConfig, precision: Float = 1, + perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).asyncPullback { viewController in + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).asyncPullback { viewController in snapshotView( config: size.map { .init(safeArea: config.safeArea, size: $0, traits: config.traits) } ?? config, drawHierarchyInKeyWindow: false, @@ -38,17 +40,19 @@ extension Snapshotting where Value == UIViewController, Format == UIImage { /// - Parameters: /// - drawHierarchyInKeyWindow: Utilize the simulator's key window in order to render `UIAppearance` and `UIVisualEffect`s. This option requires a host application for your tests and will _not_ work for framework test targets. /// - precision: The percentage of pixels that must match. + /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a match. [98-99% mimics the precision of the human eye.](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) /// - size: A view size override. /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, precision: Float = 1, + perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) -> Snapshotting { - return SimplySnapshotting.image(precision: precision, scale: traits.displayScale).asyncPullback { viewController in + return SimplySnapshotting.image(precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale).asyncPullback { viewController in snapshotView( config: .init(safeArea: .zero, size: size, traits: traits), drawHierarchyInKeyWindow: drawHierarchyInKeyWindow, diff --git a/Tests/SnapshotTestingTests/SnapshotTestingTests.swift b/Tests/SnapshotTestingTests/SnapshotTestingTests.swift index a5fe97f1..93ce66a5 100644 --- a/Tests/SnapshotTestingTests/SnapshotTestingTests.swift +++ b/Tests/SnapshotTestingTests/SnapshotTestingTests.swift @@ -43,7 +43,7 @@ final class SnapshotTestingTests: XCTestCase { """) } - @available(macOS 10.13, *) + @available(macOS 10.13, tvOS 11.0, *) func testAnyAsJson() throws { struct User: Encodable { let id: Int, name: String, bio: String } let user = User(id: 1, name: "Blobby", bio: "Blobbed around the world.") @@ -289,6 +289,24 @@ final class SnapshotTestingTests: XCTestCase { #endif } + func testImagePrecision() throws { + #if os(iOS) || os(tvOS) || os(macOS) + let imageURL = URL(fileURLWithPath: String(#file), isDirectory: false) + .deletingLastPathComponent() + .appendingPathComponent("__Fixtures__/testImagePrecision.reference.png") + #if os(iOS) || os(tvOS) + let image = try XCTUnwrap(UIImage(contentsOfFile: imageURL.path)) + #elseif os(macOS) + let image = try XCTUnwrap(NSImage(byReferencing: imageURL)) + #endif + + assertSnapshot(matching: image, as: .image(precision: 0.995), named: "exact") + if #available(iOS 11.0, tvOS 11.0, macOS 10.13, *) { + assertSnapshot(matching: image, as: .image(perceptualPrecision: 0.98), named: "perceptual") + } + #endif + } + func testSCNView() { // #if os(iOS) || os(macOS) || os(tvOS) // // NB: CircleCI crashes while trying to instantiate SCNView. diff --git a/Tests/SnapshotTestingTests/__Fixtures__/testImagePrecision.reference.png b/Tests/SnapshotTestingTests/__Fixtures__/testImagePrecision.reference.png new file mode 100644 index 00000000..7f3ac46b Binary files /dev/null and b/Tests/SnapshotTestingTests/__Fixtures__/testImagePrecision.reference.png differ diff --git a/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.exact.png b/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.exact.png new file mode 100644 index 00000000..a62a125a Binary files /dev/null and b/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.exact.png differ diff --git a/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.perceptual.png b/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.perceptual.png new file mode 100644 index 00000000..fb72d3c9 Binary files /dev/null and b/Tests/SnapshotTestingTests/__Snapshots__/SnapshotTestingTests/testImagePrecision.perceptual.png differ