From a49e2952daab434b2a46d4097401648cc71154b6 Mon Sep 17 00:00:00 2001 From: Ralf Ebert Date: Tue, 4 Feb 2025 12:25:04 +0100 Subject: [PATCH] only support image comparison based on memcmp on the cgImage data --- .../Snapshotting/CALayer.swift | 4 +- .../SnapshotTesting/Snapshotting/CGPath.swift | 4 +- .../Snapshotting/SceneKit.swift | 58 ------------- .../Snapshotting/SpriteKit.swift | 58 ------------- .../Snapshotting/SwiftUIView.swift | 4 +- .../Snapshotting/UIBezierPath.swift | 2 +- .../Snapshotting/UIImage.swift | 81 ++++--------------- .../SnapshotTesting/Snapshotting/UIView.swift | 4 +- .../Snapshotting/UIViewController.swift | 8 +- 9 files changed, 25 insertions(+), 198 deletions(-) delete mode 100644 Sources/SnapshotTesting/Snapshotting/SceneKit.swift delete mode 100644 Sources/SnapshotTesting/Snapshotting/SpriteKit.swift diff --git a/Sources/SnapshotTesting/Snapshotting/CALayer.swift b/Sources/SnapshotTesting/Snapshotting/CALayer.swift index 74c512c12..dbab2b0a8 100644 --- a/Sources/SnapshotTesting/Snapshotting/CALayer.swift +++ b/Sources/SnapshotTesting/Snapshotting/CALayer.swift @@ -59,12 +59,12 @@ /// human eye. /// - traits: A trait collection override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init() + traits: UITraitCollection = .init() ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + scale: traits.displayScale ).pullback { layer in renderer(bounds: layer.bounds, for: traits).image { ctx in layer.setNeedsLayout() diff --git a/Sources/SnapshotTesting/Snapshotting/CGPath.swift b/Sources/SnapshotTesting/Snapshotting/CGPath.swift index 65470605c..435a0ea9f 100644 --- a/Sources/SnapshotTesting/Snapshotting/CGPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/CGPath.swift @@ -67,11 +67,11 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, + scale: CGFloat = 1, drawingMode: CGPathDrawingMode = .eoFill ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + scale: scale ).pullback { path in let bounds = path.boundingBoxOfPath let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift deleted file mode 100644 index 94ff90459..000000000 --- a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift +++ /dev/null @@ -1,58 +0,0 @@ -#if os(iOS) || os(macOS) || os(tvOS) - import SceneKit - #if os(macOS) - import Cocoa - #elseif os(iOS) || os(tvOS) - import UIKit - #endif - - #if os(macOS) - extension Snapshotting where Value == SCNScene, Format == NSImage { - /// A snapshot strategy for comparing SceneKit scenes based on pixel equality. - /// - /// - Parameters: - /// - precision: The percentage of pixels that must match. - /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a - /// match. 98-99% mimics - /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the - /// human eye. - /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) - -> Snapshotting - { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) - } - } - #elseif os(iOS) || os(tvOS) - extension Snapshotting where Value == SCNScene, Format == UIImage { - /// A snapshot strategy for comparing SceneKit scenes based on pixel equality. - /// - /// - Parameters: - /// - precision: The percentage of pixels that must match. - /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a - /// match. 98-99% mimics - /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the - /// human eye. - /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) - -> Snapshotting - { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) - } - } - #endif - - extension Snapshotting where Value == SCNScene, Format == Image { - fileprivate static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize) - -> Snapshotting - { - return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision - ).pullback { scene in - let view = SCNView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) - view.scene = scene - return view - } - } - } -#endif diff --git a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift deleted file mode 100644 index ad515050a..000000000 --- a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift +++ /dev/null @@ -1,58 +0,0 @@ -#if os(iOS) || os(macOS) || os(tvOS) - import SpriteKit - #if os(macOS) - import Cocoa - #elseif os(iOS) || os(tvOS) - import UIKit - #endif - - #if os(macOS) - extension Snapshotting where Value == SKScene, Format == NSImage { - /// A snapshot strategy for comparing SpriteKit scenes based on pixel equality. - /// - /// - Parameters: - /// - precision: The percentage of pixels that must match. - /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a - /// match. 98-99% mimics - /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the - /// human eye. - /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) - -> Snapshotting - { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) - } - } - #elseif os(iOS) || os(tvOS) - extension Snapshotting where Value == SKScene, Format == UIImage { - /// A snapshot strategy for comparing SpriteKit scenes based on pixel equality. - /// - /// - Parameters: - /// - precision: The percentage of pixels that must match. - /// - perceptualPrecision: The percentage a pixel must match the source pixel to be considered a - /// match. 98-99% mimics - /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the - /// human eye. - /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) - -> Snapshotting - { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) - } - } - #endif - - extension Snapshotting where Value == SKScene, Format == Image { - fileprivate static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize) - -> Snapshotting - { - return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision - ).pullback { scene in - let view = SKView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) - view.presentScene(scene) - return view - } - } - } -#endif diff --git a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift index 8d85e1f0b..0af029749 100644 --- a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift @@ -38,8 +38,6 @@ /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, - precision: Float = 1, - perceptualPrecision: Float = 1, layout: SwiftUISnapshotLayout = .sizeThatFits, traits: UITraitCollection = .init() ) @@ -60,7 +58,7 @@ } return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + scale: traits.displayScale ).asyncPullback { view in var config = config diff --git a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift index 6b48d622d..eb0f5357b 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift @@ -20,7 +20,7 @@ precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1 ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + scale: scale ).pullback { path in let bounds = path.bounds let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/UIImage.swift b/Sources/SnapshotTesting/Snapshotting/UIImage.swift index 8e99a08c0..788cba32a 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIImage.swift @@ -18,7 +18,7 @@ /// `UITraitCollection`s default value of `0.0`, the screens scale is used. /// - Returns: A new diffing strategy. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + precision: Float = 1, scale: CGFloat? = nil ) -> Diffing { let imageScale: CGFloat if let scale = scale, scale != 0.0 { @@ -33,23 +33,9 @@ ) { old, new in guard let message = compare( - old, new, precision: precision, perceptualPrecision: perceptualPrecision) + old, new) else { return nil } - if isSwiftTesting { - return (message, []) - } - let difference = SnapshotTesting.diff(old, new) - let oldAttachment = XCTAttachment(image: old) - oldAttachment.name = "reference" - let isEmptyImage = new.size == .zero - let newAttachment = XCTAttachment(image: isEmptyImage ? emptyImage() : new) - newAttachment.name = "failure" - let differenceAttachment = XCTAttachment(image: difference) - differenceAttachment.name = "difference" - return ( - message, - [oldAttachment, newAttachment, differenceAttachment] - ) + return (message, []) } } @@ -80,13 +66,11 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - scale: The scale of the reference image stored on disk. - public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + public static func image(scale: CGFloat? = nil ) -> Snapshotting { return .init( pathExtension: "png", - diffing: .image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale) + diffing: .image(scale: scale) ) } } @@ -96,7 +80,7 @@ private let imageContextBitsPerComponent = 8 private let imageContextBytesPerPixel = 4 - private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float) + private func compare(_ old: UIImage, _ new: UIImage) -> String? { guard let oldCgImage = old.cgImage else { @@ -130,36 +114,9 @@ return "Newly-taken snapshot's data could not be loaded." } if memcmp(oldData, newerData, byteCount) == 0 { return nil } - if precision >= 1, perceptualPrecision >= 1 { + else { return "Newly-taken snapshot does not match reference." } - if perceptualPrecision < 1, #available(iOS 11.0, tvOS 11.0, *) { - return perceptuallyCompare( - CIImage(cgImage: oldCgImage), - CIImage(cgImage: newCgImage), - pixelPrecision: precision, - perceptualPrecision: perceptualPrecision - ) - } else { - let byteCountThreshold = Int((1 - precision) * Float(byteCount)) - var differentByteCount = 0 - // NB: We are purposely using a verbose 'while' loop instead of a 'for in' loop. When the - // compiler doesn't have optimizations enabled, like in test targets, a `while` loop is - // significantly faster than a `for` loop for iterating through the elements of a memory - // buffer. Details can be found in [SR-6983](https://github.com/apple/swift/issues/49531) - var index = 0 - while index < byteCount { - defer { index += 1 } - if oldBytes[index] != newerBytes[index] { - differentByteCount += 1 - } - } - if differentByteCount > byteCountThreshold { - let actualPrecision = 1 - Float(differentByteCount) / Float(byteCount) - return "Actual image precision \(actualPrecision) is less than required \(precision)" - } - } - return nil } private func context(for cgImage: CGImage, data: UnsafeMutableRawPointer? = nil) -> CGContext? { @@ -262,9 +219,7 @@ } } } - let failingPixelPercent = - Float(failingPixelCount) - / Float(deltaOutputImage.extent.width * deltaOutputImage.extent.height) + let failingPixelPercent = Float(failingPixelCount) / Float(deltaOutputImage.extent.width * deltaOutputImage.extent.height) actualPixelPrecision = 1 - failingPixelPercent } @@ -273,9 +228,9 @@ // DeltaE is in a 0-100 scale, so we need to divide by 100 to transform it to a percentage. let minimumPerceptualPrecision = 1 - min(maximumDeltaE / 100, 1) return """ - The percentage of pixels that match \(actualPixelPrecision) is less than required \(pixelPrecision) - The lowest perceptual color precision \(minimumPerceptualPrecision) is less than required \(perceptualPrecision) - """ + The percentage of pixels that match \(actualPixelPrecision) is less than required \(pixelPrecision) + The lowest perceptual color precision \(minimumPerceptualPrecision) is less than required \(perceptualPrecision) + """ } extension CIImage { @@ -300,18 +255,15 @@ } func renderSingleValue(in context: CIContext) -> Float? { - guard let buffer = render(in: context) else { return nil } - defer { buffer.free() } - return buffer.data.load(fromByteOffset: 0, as: Float.self) + guard let buffer = render(in: context) else { return nil } + defer { buffer.free() } + return buffer.data.load(fromByteOffset: 0, as: Float.self) } func render(in context: CIContext, format: CIFormat = CIFormat.Rh) -> vImage_Buffer? { // Some hardware configurations (virtualized CPU renderers) do not support 32-bit float output formats, // so use a compatible 16-bit float format and convert the output value to 32-bit floats. - guard - var buffer16 = try? vImage_Buffer( - width: Int(extent.width), height: Int(extent.height), bitsPerPixel: 16) - else { return nil } + guard var buffer16 = try? vImage_Buffer(width: Int(extent.width), height: Int(extent.height), bitsPerPixel: 16) else { return nil } defer { buffer16.free() } context.render( self, @@ -322,8 +274,7 @@ colorSpace: nil ) guard - var buffer32 = try? vImage_Buffer( - width: Int(buffer16.width), height: Int(buffer16.height), bitsPerPixel: 32), + var buffer32 = try? vImage_Buffer(width: Int(buffer16.width), height: Int(buffer16.height), bitsPerPixel: 32), vImageConvert_Planar16FtoPlanarF(&buffer16, &buffer32, 0) == kvImageNoError else { return nil } return buffer32 diff --git a/Sources/SnapshotTesting/Snapshotting/UIView.swift b/Sources/SnapshotTesting/Snapshotting/UIView.swift index 7244f67d1..b72ae7cf7 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIView.swift @@ -22,8 +22,6 @@ /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, - precision: Float = 1, - perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) @@ -31,7 +29,7 @@ { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + scale: traits.displayScale ).asyncPullback { view in snapshotView( config: .init(safeArea: .zero, size: size ?? view.frame.size, traits: .init()), diff --git a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift index b08b8bf59..ec98ab431 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift @@ -20,8 +20,6 @@ /// - traits: A trait collection override. public static func image( on config: ViewImageConfig, - precision: Float = 1, - perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) @@ -29,7 +27,7 @@ { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + scale: traits.displayScale ).asyncPullback { viewController in snapshotView( config: size.map { .init(safeArea: config.safeArea, size: $0, traits: config.traits) } @@ -57,8 +55,6 @@ /// - traits: A trait collection override. public static func image( drawHierarchyInKeyWindow: Bool = false, - precision: Float = 1, - perceptualPrecision: Float = 1, size: CGSize? = nil, traits: UITraitCollection = .init() ) @@ -66,7 +62,7 @@ { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + scale: traits.displayScale ).asyncPullback { viewController in snapshotView( config: .init(safeArea: .zero, size: size, traits: traits),