-
Notifications
You must be signed in to change notification settings - Fork 543
VideoToolbox macOS xcode26.0 b1
Alex Soto edited this page Jun 9, 2025
·
1 revision
#VideoToolbox.framework
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTBase.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTBase.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTBase.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTBase.h 2025-05-29 01:30:17
@@ -10,6 +10,7 @@
#ifndef VTBASE_H
#define VTBASE_H
+#include <TargetConditionals.h>
#include <Availability.h>
#include <stdint.h>
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTCompressionProperties.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTCompressionProperties.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTCompressionProperties.h 2025-04-19 02:36:24
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTCompressionProperties.h 2025-05-30 23:54:24
@@ -3,7 +3,7 @@
Framework: VideoToolbox
- Copyright 2007-2022 Apple Inc. All rights reserved.
+ Copyright 2007-2025 Apple Inc. All rights reserved.
Standard Video Toolbox compression properties.
*/
@@ -68,11 +68,12 @@
kVTCompressionPropertyKey_MaxKeyFrameInterval, and kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration
may be used to configure frame dependencies in the video stream.
- kVTCompressionPropertyKey_AverageBitRate, kVTCompressionPropertyKey_DataRateLimits, kVTCompressionPropertyKey_ConstantBitRate may be used to configure the video data rate.
+ kVTCompressionPropertyKey_AverageBitRate, kVTCompressionPropertyKey_DataRateLimits, kVTCompressionPropertyKey_ConstantBitRate, and kVTCompressionPropertyKey_VariableBitRate may be used to configure the video data rate.
• kVTCompressionPropertyKey_AverageBitRate specifies the desired long term average bit rate. It is a soft limit, so the encoder may overshoot or undershoot and the average bit rate of the output video may be over or under the target.
• kVTCompressionPropertyKey_DataRateLimits specifies a hard data rate cap for a given time window. The encoder will not overshoot. kVTCompressionPropertyKey_AverageBitRate and kVTCompressionPropertyKey_DataRateLimits may be used together to specify an overall target bit rate while also establishing hard limits over a smaller window.
• kVTCompressionPropertyKey_ConstantBitRate is intended for legacy content distribution networks which require constant bitrate, and is not intended for general streaming scenarios.
+ • kVTCompressionPropertyKey_VariableBitRate specifies the desired variable bitrate target. Encoder automatically allocates higher bitrate for complex segments of the video. It can be used along with kVTCompressionPropertyKey_VBVMaxBitRate to specify the maximum birate encoder can use.
*/
#pragma mark Buffers
@@ -330,8 +331,8 @@
The property kVTCompressionPropertyKey_ExpectedFrameRate should be set along with kVTCompressionPropertyKey_ConstantBitRate
to ensure effective CBR rate control.
- This property is not compatible with kVTCompressionPropertyKey_DataRateLimits and
- kVTCompressionPropertyKey_AverageBitRate.
+ This property is not compatible with kVTCompressionPropertyKey_DataRateLimits,
+ kVTCompressionPropertyKey_AverageBitRate, and kVTCompressionPropertyKey_VariableBitRate.
The encoder will pad the frame if they are smaller than they need to be based on the Constant BitRate. This
property is not recommended for general streaming or export scenarios. It is intended for interoperability with
@@ -354,7 +355,68 @@
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_EstimatedAverageBytesPerFrame API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), visionos(1.0)) API_UNAVAILABLE(watchos); // Read Only, CFNumber (bytes per frame)
+/*!
+ @constant kVTCompressionPropertyKey_VariableBitRate
+ @abstract
+ Requires that the encoder use a variable bitrate (VBR) rate control algorithm and specifies the desired variable bitrate in bits per second.
+ @discussion
+ The actual peak bitrate present in the bitstream may be above or below this value based on other parameters such as kVTCompressionPropertyKey_VBVMaxBitRate.
+ This property key needs to be set to achieve Variable Bitrate (VBR) rate control.
+ This property key is not compatible with:
+ 1. kVTCompressionPropertyKey_AverageBitRate,
+ 2. kVTCompressionPropertyKey_ConstantBitRate,
+ 3. kVTCompressionPropertyKey_DataRateLimits,
+ 4. VTVideoEncoderSpecification_EnableLowLatencyRateControl = True.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_VariableBitRate API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber<UInt32>, Optional
+/*!
+ @constant kVTCompressionPropertyKey_VBVMaxBitRate
+ @abstract
+ Defines the maximum bitrate that can enter the video buffering verifier (VBV) model at any time in variable bitrate (VBR) mode.
+ @discussion
+ The value of this property must be greater than zero.
+ This property key is not compatible with:
+ 1. kVTCompressionPropertyKey_AverageBitRate,
+ 2. kVTCompressionPropertyKey_ConstantBitRate,
+ 3. kVTCompressionPropertyKey_DataRateLimits,
+ 4. VTVideoEncoderSpecification_EnableLowLatencyRateControl=True.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_VBVMaxBitRate API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber<UInt32>, Optional
+
+/*!
+ @constant kVTCompressionPropertyKey_VBVBufferDuration
+ @abstract
+ Capacity of the video buffering verifier (VBV) model in seconds.
+ @discussion
+ VBV model allows for larger variations in bitrates while avoiding decoder-side overflows or underflows.
+ A larger VBV model size may improve compression quality, but it requires more memory and may introduce delay.
+ The value of this property must be greater than 0.0.
+ The default value is set as 2.5 seconds.
+ This property key is compatible with constant bitrate (CBR) or variable bitrate (VBR) rate control.
+ This property key is incompatible with:
+ 1. kVTCompressionPropertyKey_AverageBitRate,
+ 2. kVTCompressionPropertyKey_DataRateLimits,
+ 3. VTVideoEncoderSpecification_EnableLowLatencyRateControl=True.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_VBVBufferDuration API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber<Float>, Optional
+
+/*!
+ @constant kVTCompressionPropertyKey_VBVInitialDelayPercentage
+ @abstract
+ Initial delay of the VBV model between storing the picture in the VBV buffer model and decoding of that picture, as a percentage of VBV buffer duration.
+ @discussion
+ This value should be specified as a number in the range of 0 to 100.
+ Larger value increases the delay but results in smoother playback.
+ Default value is 90, meaning 90% of the VBV buffer duration.
+ This property key is incompatible with:
+ 1. kVTCompressionPropertyKey_AverageBitRate,
+ 2. kVTCompressionPropertyKey_DataRateLimits,
+ 3. VTVideoEncoderSpecification_EnableLowLatencyRateControl=True.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_VBVInitialDelayPercentage API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber<Float>, Optional
+
+
#pragma mark Bitstream configuration
/*!
@@ -445,10 +507,16 @@
to insert based on the output color space. e.g. DolbyVision, HDR10, etc.
This property has no effect if the output color space is not HDR, or if
there is currently no underlying support for the HDR format.
+ kVTHDRMetadataInsertionMode_RequestSDRRangePreservation will
+ only insert metadata when the following is true:
+ transfer function is kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ
+ color primaries is kCVImageBufferColorPrimaries_ITU_R_2020
+ color matrix is kCVImageBufferYCbCrMatrix_ITU_R_2020
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_HDRMetadataInsertionMode API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0), visionos(1.0)) API_UNAVAILABLE(watchos); // Read/write, CFString, Optional, default is kVTHDRMetadataInsertionMode_Auto
VT_EXPORT const CFStringRef kVTHDRMetadataInsertionMode_None API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0), visionos(1.0)) API_UNAVAILABLE(watchos);
VT_EXPORT const CFStringRef kVTHDRMetadataInsertionMode_Auto API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0), visionos(1.0)) API_UNAVAILABLE(watchos);
+ VT_EXPORT const CFStringRef kVTHDRMetadataInsertionMode_RequestSDRRangePreservation API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
/*!
@constant kVTCompressionPropertyKey_H264EntropyMode
@@ -1090,7 +1158,7 @@
See also kVTCompressionPropertyKey_RecommendedParallelizationLimit
See also kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumDuration
*/
-VT_EXPORT const CFStringRef kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumFrameCount API_AVAILABLE(macos(14.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos); // Read-only, CFNumber<uint64_t>
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumFrameCount API_AVAILABLE(macos(14.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read-only, CFNumber<uint64_t>
/*!
@constant kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumDuration
@@ -1101,7 +1169,7 @@
See also kVTCompressionPropertyKey_RecommendedParallelizationLimit
See also kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumFrameCount
*/
-VT_EXPORT const CFStringRef kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumDuration API_AVAILABLE(macos(14.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos); // Read-only, CMTime as CFDictionary
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_RecommendedParallelizedSubdivisionMinimumDuration API_AVAILABLE(macos(14.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read-only, CMTime as CFDictionary
/*!
@constant kVTCompressionPropertyKey_PreserveDynamicHDRMetadata
@@ -1270,6 +1338,8 @@
The value will be set on the format description for output samples and may affect the decoded frame presentation.
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_HeroEye API_AVAILABLE(macos(14.0), ios(17.0), visionos(1.0)) API_UNAVAILABLE(tvos, watchos); // CFString, see kCMFormatDescriptionExtension_HeroEye
+VT_EXPORT const CFStringRef kVTHeroEye_Left API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
+VT_EXPORT const CFStringRef kVTHeroEye_Right API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
/*!
@constant kVTCompressionPropertyKey_StereoCameraBaseline
@@ -1326,7 +1396,10 @@
The value will be set on the format description for output samples and may affect the decoded frame presentation.
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_ProjectionKind API_AVAILABLE(macos(15.0), ios(18.0), visionos(2.0)) API_UNAVAILABLE(tvos, watchos); // CFString, see kCMFormatDescriptionExtension_ProjectionKind.
-
+VT_EXPORT const CFStringRef kVTProjectionKind_Rectilinear API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
+VT_EXPORT const CFStringRef kVTProjectionKind_Equirectangular API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
+VT_EXPORT const CFStringRef kVTProjectionKind_HalfEquirectangular API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
+VT_EXPORT const CFStringRef kVTProjectionKind_ParametricImmersive API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
/*!
@constant kVTCompressionPropertyKey_ViewPackingKind
@abstract
@@ -1335,8 +1408,110 @@
The value will be set on the format description for output samples and may affect the decoded frame presentation.
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_ViewPackingKind API_AVAILABLE(macos(15.0), ios(18.0), visionos(2.0)) API_UNAVAILABLE(tvos, watchos); // CFString, see kCMFormatDescriptionExtension_ViewPackingKind.
+VT_EXPORT const CFStringRef kVTViewPackingKind_SideBySide API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
+VT_EXPORT const CFStringRef kVTViewPackingKind_OverUnder API_AVAILABLE(macos(26.0), ios(26.0), visionos(26.0)) API_UNAVAILABLE(tvos, watchos);
/*!
+ @constant kVTCompressionPropertyKey_CameraCalibrationDataLensCollection
+ @abstract Specifies intrinsic and extrinsic parameters for single or multiple lenses.
+ @discussion
+ The property value is an array of dictionaries describing the camera calibration data for each lens. The camera calibration data includes intrinsics and extrinics with other parameters.
+ For a stereoscopic camera system, the left and right lens signaling can be done with the kVTCompressionPropertyCameraCalibrationKey_LensRole key and its value.
+ */
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_CameraCalibrationDataLensCollection API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFArray of CFDictionaries
+
+/*!
+ The following keys are required in each kVTCompressionPropertyKey_CameraCalibrationDataLensCollection dictionary.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensAlgorithmKind
+ @abstract Specifies the camera calibration methodology.
+ @discussion
+ If the algorithm kind is ParametricLens, the camera lens collection requires camera intrinsic and extrinsic parameters.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensDomain
+ @abstract Specifies the kind of lens (e.g., color).
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensIdentifier
+ @abstract Specifies a unique number associated with a lens.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensRole
+ @abstract Specifies the particular use of the lens in the camera system (e.g., left or right for a stereo system).
+ @discussion
+ For a stereoscopic camera system, one lens should have the left role and another should have the right role.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensDistortions
+ @abstract Specifies the first and second radial distortion coefficients(k1 and k2) used to correct the distortion that appeared as curved lines for straight lines and the first and second tangential distortion coefficients(p1 and p2) used to correct the distortion caused by a lens's improper alignment of physical elements.
+ @discussion
+ The values are in a CFArray of four CFNumbers in k1, k2, p1 and p2 order.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensFrameAdjustmentsPolynomialX
+ @abstract Specifies a three element polynomial for mapping x axis UV parameters with an adjustment using the equation `x' = polynomialX[0] + polynomialX[1]*x + polynomialX[2]*x^3`.
+ @discussion
+ The values are in a CFArray of three CFNumbers(float) in the order polynomialX[0], polynomialX[1] & polynomialX[2].
+ The polynomial transform origin is at the center of the frame. The default values of elements of polynomialX[] are [0.0, 1.0, 0.0].
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_LensFrameAdjustmentsPolynomialY
+ @abstract Specifies a three element polynomial for mapping y axis UV parameters with an adjustment using the equation `y' = polynomialY[0] + polynomialY[1]*y + polynomialY[2]*y^3`.
+ @discussion
+ The values are in a CFArray of three CFNumbers(float) in the order polynomialY[0], polynomialY[1] & polynomialY[2].
+ The polynomial transform origin is at the center of the frame. The default values of elements of polynomialY[] are [0.0, 1.0, 0.0].
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_RadialAngleLimit
+ @abstract Specifies the outer limit of the calibration validity in degrees of angle eccentric from the optical axis.
+ @discussion
+ The value is linked to radial distortion corrections with k1 and k2.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrix
+ @abstract Specifies the 3x3 camera intrinsic matrix for camera calibration.
+ @discussion
+ Camera intrinsic matrix is a CFData containing a matrix_float3x3, which is column-major. Each element is in IEEE754 native-endian 32-bit floating point. It has the following contents:
+ fx s cx
+ 0 fy cy
+ 0 0 1
+ fx and fy are the focal length in pixels. For square pixels, they will have the same value.
+ cx and cy are the coordinates of the principal point. The origin is the upper left of the frame.
+ s is an optional skew factor.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrixProjectionOffset
+ @abstract Specifies the offset of the point of perspective relative to the rectilinear projection.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrixReferenceDimensions
+ @abstract Specifies the image dimensions to which the camera’s intrinsic matrix values are relative.
+ @discussion
+ Values are width and height in a CFDictionary. Dictionary keys are compatible with CGSize dictionary, namely "Width" and "Height".
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_ExtrinsicOriginSource
+ @abstract Identifies how the origin of the camera system's extrinsics are determined.
+ @discussion
+ The 'blin' value indicates the center of transform is determined by the point mid way along the dimensions indicated by the StereoCameraSystemBaselineBox held in the StereoCameraSystemBox.
+ Each left and right lens within a stereoscopic camera system is equidistant from this point, so the 'blin' value is halved when associated with the respective left and right lenses.
+
+ @constant kVTCompressionPropertyCameraCalibrationKey_ExtrinsicOrientationQuaternion
+ @abstract Specifies a camera’s orientation to a world or scene coordinate system. The orientation value is a unit quaternion(ix, iy, and iz) instead of the classical 3x3 matrix.
+ @discussion
+ The values are in a CFArray of three CFNumbers in ix, iy, and iz order.
+ */
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensAlgorithmKind API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFString one of
+VT_EXPORT const CFStringRef kVTCameraCalibrationLensAlgorithmKind_ParametricLens API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensDomain API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFString one of
+VT_EXPORT const CFStringRef kVTCameraCalibrationLensDomain_Color API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensIdentifier API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFNumber(int32)
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensRole API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFString one of
+VT_EXPORT const CFStringRef kVTCameraCalibrationLensRole_Mono API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCameraCalibrationLensRole_Left API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCameraCalibrationLensRole_Right API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensDistortions API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFArray[CFNumber(float)]
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_RadialAngleLimit API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFNumber(float)
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensFrameAdjustmentsPolynomialX API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFArray[CFNumber(float)]
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_LensFrameAdjustmentsPolynomialY API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFArray[CFNumber(float)]
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrix API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFData(matrix_float3x3)
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrixProjectionOffset API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFNumber(float)
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_IntrinsicMatrixReferenceDimensions API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CGSize dictionary
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_ExtrinsicOriginSource API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFString one of
+VT_EXPORT const CFStringRef kVTCameraCalibrationExtrinsicOriginSource_StereoCameraSystemBaseline API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+VT_EXPORT const CFStringRef kVTCompressionPropertyCameraCalibrationKey_ExtrinsicOrientationQuaternion API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // CFArray[CFNumber(float)], , ix, iy & iz order
+
+/*!
@constant kVTCompressionPropertyKey_SuggestedLookAheadFrameCount
@abstract
Requests that the encoder retain the specified number of frames during encoding. These frames will be used for additional analysis and statistics
@@ -1356,15 +1531,79 @@
@constant kVTCompressionPropertyKey_SpatialAdaptiveQPLevel
@abstract
Control spatial adaptation of the quantization parameter (QP) based on per-frame statistics.
- If set to kVTAdaptiveQPLevel_Disable, spatial QP adaptation is not applied based on per-frame statistics.
- If set to kVTAdaptiveQPLevel_Default, video encoder is allowed to apply spatial QP adaptation for each macro block (or coding unit) within a video frame.
- QP adaptation is based on spatial characteristics of a frame and the level of spatial QP adaptation is decided internally by the rate controller.
+ If set to kVTQPModulationLevel_Disable, spatial QP adaptation is not applied based on per-frame statistics.
+ If set to kVTQPModulationLevel_Default, video encoder is allowed to apply spatial QP adaptation for each macro block (or coding unit) within a video frame.
+ QP adaptation is based on spatial characteristics of a frame and the level of spatial QP adaptation is decided internally by the rate controller.
+ @discussion
+ This property must be disabled when low latency rate control is enabled. Support for this property is codec dependent.
*/
VT_EXPORT const CFStringRef kVTCompressionPropertyKey_SpatialAdaptiveQPLevel API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos); // Read/write, CFNumberRef, Optional
enum {
kVTQPModulationLevel_Default = -1,
kVTQPModulationLevel_Disable = 0,
};
+
+#pragma mark Encoder Settings Assistant
+
+/*!
+@constant kVTCompressionPropertyKey_SupportedPresetDictionaries
+@abstract
+ Where supported by video encoders, returns a dictionary whose keys are the available compression presets (prefixed by `kVTCompressionPreset_`) and the values are dictionaries containing the corresponding settings property key/value pairs.
+@discussion
+ Clients can select a compression preset for their encoding needs and use its encoder settings to configure the encoder.
+ Clients may also use the encoder settings as a base configuration that they can customize as they require.
+
+ See also kVTCompressionPreset_HighQuality, kVTCompressionPreset_Balanced, kVTCompressionPreset_HighSpeed, kVTCompressionPreset_VideoConferencing.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPropertyKey_SupportedPresetDictionaries API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0)); // Read-only, CFDictionary
+
+/*!
+@constant kVTCompressionPreset_HighQuality
+@abstract
+ A preset to achieve a high compression quality.
+@discussion
+ An encoder configured using this preset is expected to achieve a higher quality with a slower encoding than an encoder configured with the preset kVTCompressionPreset_Balanced or kVTCompressionPreset_HighSpeed.
+ The presets kVTCompressionPreset_Balanced and kVTCompressionPreset_HighSpeed may be preferred for a faster encoding.
+
+ See also kVTCompressionPreset_Balanced, kVTCompressionPreset_HighSpeed, kVTCompressionPreset_VideoConferencing.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPreset_HighQuality API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+
+/*!
+@constant kVTCompressionPreset_Balanced
+@abstract
+ A preset to provide a balanced compression quality and encoding speed.
+@discussion
+ An encoder configured using this preset is expected to achieve a higher quality than an encoder configured with the preset kVTCompressionPreset_HighSpeed.
+ The preset kVTCompressionPreset_HighSpeed may be preferred for a faster encoding.
+ The preset kVTCompressionPreset_HighQuality may be preferred for a higher compression quality.
+
+ See also kVTCompressionPreset_HighQuality, kVTCompressionPreset_HighSpeed, kVTCompressionPreset_VideoConferencing.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPreset_Balanced API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+
+/*!
+@constant kVTCompressionPreset_HighSpeed
+@abstract
+ A preset to provide a high-speed encoding.
+@discussion
+ An encoder configured using this preset is expected to achieve a faster encoding at a lower compression quality than an encoder configured with the preset kVTCompressionPreset_HighQuality or kVTCompressionPreset_Balanced.
+ The presets kVTCompressionPreset_HighQuality and kVTCompressionPreset_Balanced may be preferred for a higher compression quality.
+
+ See also kVTCompressionPreset_HighQuality, kVTCompressionPreset_Balanced, kVTCompressionPreset_VideoConferencing.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPreset_HighSpeed API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
+
+/*!
+@constant kVTCompressionPreset_VideoConferencing
+@abstract
+ A preset to achieve low-latency encoding for real-time communication applications.
+@discussion
+ This preset requires setting kVTVideoEncoderSpecification_EnableLowLatencyRateControl to kCFBooleanTrue for encoding in the low-latency mode.
+
+ See also kVTCompressionPreset_HighQuality, kVTCompressionPreset_Balanced, kVTCompressionPreset_HighSpeed.
+*/
+VT_EXPORT const CFStringRef kVTCompressionPreset_VideoConferencing API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), watchos(26.0), visionos(26.0));
CM_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTErrors.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTErrors.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTErrors.h 2025-04-19 02:06:25
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTErrors.h 2025-05-31 00:15:18
@@ -114,12 +114,17 @@
frames following the sync frame that cannot be decoded due to missing references. Dropping these frames
has no impact to playback since the non-decodeable frames will not be rendered.
If kVTDecodeInfo_SkippedLeadingFrameDropped is set, kVTDecodeInfo_FrameDropped will also be set.
+ @constant kVTDecodeInfo_FrameInterrupted
+ The kVTDecodeInfo_FrameInterrupted bit may be set if the frame was decoded successfully but the decoded
+ content was not provided in the output callback. When this bit is set, the imageBuffer provided to the output
+ handler may either be NULL or contain only black pixels.
*/
typedef CF_OPTIONS(UInt32, VTDecodeInfoFlags) {
kVTDecodeInfo_Asynchronous = 1UL << 0,
kVTDecodeInfo_FrameDropped = 1UL << 1,
kVTDecodeInfo_ImageBufferModifiable = 1UL << 2,
kVTDecodeInfo_SkippedLeadingFrameDropped = 1UL << 3,
+ kVTDecodeInfo_FrameInterrupted = 1UL << 4,
};
// Informational status for encoding -- non-error flags
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h 2025-04-19 02:06:27
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h 2025-05-31 00:15:19
@@ -10,13 +10,19 @@
#ifndef VTFRAMEPROCESSOR_H
#define VTFRAMEPROCESSOR_H
+#ifdef __OBJC__
+
+#import <CoreMedia/CMBase.h>
#import <VideoToolbox/VTFrameProcessorConfiguration.h>
#import <VideoToolbox/VTFrameProcessorParameters.h>
#import <VideoToolbox/VTFrameProcessorFrame.h>
#import <VideoToolbox/VTFrameProcessorErrors.h>
+#if ! TARGET_OS_SIMULATOR
#import <VideoToolbox/VTFrameProcessor_MotionBlur.h>
#import <VideoToolbox/VTFrameProcessor_FrameRateConversion.h>
#import <VideoToolbox/VTFrameProcessor_OpticalFlow.h>
+#import <VideoToolbox/VTFrameProcessor_TemporalNoiseFilter.h>
+#endif // ! TARGET_OS_SIMULATOR
#import <Metal/Metal.h>
/*!
@@ -42,8 +48,7 @@
@discussion The VTFrameProcessor class is the main class to perform frame processing. Users can specify a video effect by passing a VTFrameProcessorConfiguration based object to the startSessionWithConfiguration call. Once a session is created, the processWithParameters method is called in a loop to process the frames one by one. Once all the frames are processed, endSession needs to called to finish all pending processing. The caller needs to ensure that all buffers passed to the processWithParameters interface are unmodified (inclduing attachments) until the function returns or the callback is received in the case of asynchronous mode.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
@interface VTFrameProcessor : NSObject
- (instancetype) init;
@@ -85,7 +90,19 @@
- (void) processWithParameters:(id<VTFrameProcessorParameters>)parameters
completionHandler:(void (^)(id<VTFrameProcessorParameters> , NSError * _Nullable) )completionHandler NS_SWIFT_NAME(process(parameters:completionHandler:));
+/*!
+ @method processWithParameters:frameOutputHandler
+ @abstract Used with VTFrameProcessor configurations which allow multiple output frames from a single processing call, such as frame rate conversion processor cases when the client needs access to output frames as they become available, rather than waiting for all output frames to be complete.
+ @discussion This interface is suitable for low-latnecy scenarios when a call would generate multiple output frames, but waiting for all frames to be generated before beginning to use the frames is not ideal. Because the frames that are returned may be used as references for frames still being generated, the output frames are strictly read-only. If you want to modify the frames, you must create a copy first.
+ @param parameters
+ A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
+ @param frameOutputHandler
+ This frame output handler will be called once for each destination frame in the provided parameters if no errors are encountered. The output handler will receive the same parameters object that was provided to the original call, a flag indicating if this is the final output to be called for this processing request, and the CMTime value associated with the VTFrameProcessorFrame that it is being called for. An NSError parameter will contain an error code if processing was not successful.
+ */
+- (void) processWithParameters:(id<VTFrameProcessorParameters>)parameters
+ frameOutputHandler:(void (^)(id<VTFrameProcessorParameters> , CMTime, BOOL, NSError * _Nullable) )frameOutputHandler NS_REFINED_FOR_SWIFT;
+
/*!
@method processWithCommandBuffer:parameters
@abstract This API provides a Metal API friendly version of processWithParameters.
@@ -111,5 +128,7 @@
@end
NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
#endif // VTFRAMEPROCESSOR_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h 2025-04-19 02:06:27
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h 2025-05-29 01:30:18
@@ -10,6 +10,10 @@
#ifndef VTFRAMEPROCESSORCONFIGURATION_H
#define VTFRAMEPROCESSORCONFIGURATION_H
+#ifdef __OBJC__
+
+#import <CoreMedia/CMBase.h>
+#import <Foundation/Foundation.h>
#import <CoreMedia/CMFormatDescription.h>
NS_HEADER_AUDIT_BEGIN(nullability)
@@ -21,67 +25,69 @@
@discussion VTFrameProcessorConfiguration protocol conformance is used to start an frame processing session. These properties can be queried on an implementation conforming to VTFrameProcessorConfiguration without starting a session.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
NS_SWIFT_SENDABLE
@protocol VTFrameProcessorConfiguration <NSObject>
@required
/*!
- @property processorSupported
+ @property supported
@abstract Returns a Boolean indicating whether the processor supported on the current config.
*/
-@property (class, nonatomic, readonly) Boolean processorSupported;
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
/*!
@property frameSupportedPixelFormats
@abstract Returns a list of supported pixel formats for the current configuration
*/
-@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
/*!
@property sourcePixelBufferAttributes
@abstract Returns a dictionary of CVPixelBuffer attributes which source and reference frames passed to the processor must conform to.
*/
-@property (nonatomic, readonly) NSDictionary * sourcePixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
/*!
@property destinationPixelBufferAttributes
@abstract Returns a dictionary of CVPixelBuffer attributes which output frames passed to the processor must conform to.
*/
-@property (nonatomic, readonly) NSDictionary * destinationPixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
-@optional
+@optional // WARNING: Optional properties must be refined for swift
/*!
@property nextFrameCount
@abstract returns the number of "next" frames that this processor requires for processing.
*/
-@property (nonatomic, readonly) NSInteger nextFrameCount;
+@property (nonatomic, readonly) NSInteger nextFrameCount NS_REFINED_FOR_SWIFT;
/*!
@property previousFrameCount
@abstract returns the number of "previous" frames that this processor requires for processing.
*/
-@property (nonatomic, readonly) NSInteger previousFrameCount;
+@property (nonatomic, readonly) NSInteger previousFrameCount NS_REFINED_FOR_SWIFT;
/*!
@property maximumDimensions
- @abstract returns the maximum dimension for a sourceFrame for the processor
+ @abstract returns the maximum dimensions for a sourceFrame for the processor
*/
-@property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions;
+@property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions NS_REFINED_FOR_SWIFT;
/*!
@property minimumDimensions
- @abstract returns the minimum dimension for a sourceFrame for the processor
+ @abstract returns the minimum dimensions for a sourceFrame for the processor
*/
-@property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions;
+@property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions NS_REFINED_FOR_SWIFT;
@end
NS_HEADER_AUDIT_END(nullability)
-#endif // VTFRAMEPROCESSORPARAMETERS_H
+#endif // __OBJC__
+
+#endif // VTFRAMEPROCESSORCONFIGURATION_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h 2025-05-29 01:30:17
@@ -10,8 +10,13 @@
#ifndef VTFRAMEPROCESSORERRORS_H
#define VTFRAMEPROCESSORERRORS_H
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+#ifdef __OBJC__
+#import <CoreMedia/CMBase.h>
+#import <Foundation/Foundation.h>
+
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
+
extern NSErrorDomain _Nonnull const VTFrameProcessorErrorDomain;
/*!
@@ -44,6 +49,8 @@
Returned if one of the provided parameters is not valid.
@constant VTFrameProcessorInvalidFrameTiming
Returned if one of the provided VTFrameProcessorFrame objects has a PTS which is not supported by the processor, either invalid or out-of-order.
+ @constant VTFrameProcessorAssetDownloadFailed
+ Returned if download of a required model asset for the processor failed
*/
@@ -62,7 +69,10 @@
VTFrameProcessorProcessingError = -19740,
VTFrameProcessorInvalidParameterError = -19741,
VTFrameProcessorInvalidFrameTiming = -19742,
+ VTFrameProcessorAssetDownloadFailed = -19743,
};
+
+#endif // __OBJC__
#endif // VTFRAMEPROCESSORERRORS_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h 2025-04-19 02:06:27
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h 2025-05-29 01:30:18
@@ -10,6 +10,9 @@
#ifndef VTFRAMEPROCESSORFRAME_H
#define VTFRAMEPROCESSORFRAME_H
+#ifdef __OBJC__
+
+#import <CoreMedia/CMBase.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CMTime.h>
@@ -19,9 +22,8 @@
@class VTFrameProcessorFrame
@abstract Helper class to wrap video frames that will be sent to the processor, as source frames, reference frames, or output frames. Instances retain the buffer backing them.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
@interface VTFrameProcessorFrame : NSObject
/*!
@@ -57,9 +59,7 @@
@class VTFrameProcessorOpticalFlow
@abstract Helper class to wrap optical flow that will be sent to the processor. Instances retain the buffers backing them.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
-
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
@interface VTFrameProcessorOpticalFlow : NSObject
/*!
@@ -92,5 +92,7 @@
NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
#endif // VTFRAMEPROCESSORFRAME_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h 2025-05-29 01:30:18
@@ -10,6 +10,10 @@
#ifndef VTFRAMEPROCESSORPARAMETERS_H
#define VTFRAMEPROCESSORPARAMETERS_H
+#ifdef __OBJC__
+
+#import <CoreMedia/CMBase.h>
+#import <Foundation/Foundation.h>
#import <VideoToolbox/VTFrameProcessorFrame.h>
NS_HEADER_AUDIT_BEGIN(nullability)
@@ -19,7 +23,7 @@
@abstract VTFrameProcessorParameters is the base protocol for input and output processing parameters for a VTFrameProcessor processing implementation.. An instance of a class corresponding to this protocol is passed to processFrameWithParameters calls, and in async versions of those APIs, the same instance is returned in the completion.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
@protocol VTFrameProcessorParameters <NSObject>
@required
@@ -31,9 +35,28 @@
@property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
+@optional // WARNING: Optional properties must be refined for swift
+
+/**
+ * @property destinationFrame
+ * @abstract VTFrameProcessorFrame that contains the destination frame for processors which output a single processed frame.
+*/
+
+@property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame NS_REFINED_FOR_SWIFT;
+
+/**
+ * @property destinationFrames
+ * @abstract NSArray of VTFrameProcessorFrame that contains the destination frames for processors which may output more than one processed frame.
+*/
+
+@property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> * destinationFrames NS_REFINED_FOR_SWIFT;
+
+
@end
NS_HEADER_AUDIT_END(nullability)
+
+#endif // __OBJC__
#endif // VTFRAMEPROCESSORPARAMETERS_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h 2025-05-31 21:37:04
@@ -10,6 +10,11 @@
#ifndef VTFRAMEPROCESSOR_FRAMERATECONVERSION_H
#define VTFRAMEPROCESSOR_FRAMERATECONVERSION_H
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
#import <VideoToolbox/VTFrameProcessorConfiguration.h>
#import <VideoToolbox/VTFrameProcessorParameters.h>
#import <VideoToolbox/VTFrameProcessorFrame.h>
@@ -23,7 +28,7 @@
/*!
@brief Quality prioritization levels to favor quality or performance.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTFrameRateConversionConfigurationQualityPrioritization) {
VTFrameRateConversionConfigurationQualityPrioritizationNormal = 1,
VTFrameRateConversionConfigurationQualityPrioritizationQuality = 2,
@@ -33,7 +38,7 @@
@brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTFrameRateConversionConfigurationRevision) {
VTFrameRateConversionConfigurationRevision1 = 1, // revision 1
} NS_SWIFT_NAME(VTFrameRateConversionConfiguration.Revision);
@@ -41,7 +46,7 @@
/*!
@brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTFrameRateConversionParametersSubmissionMode) {
VTFrameRateConversionParametersSubmissionModeRandom = 1, // Frames are submitted in non-sequential order
VTFrameRateConversionParametersSubmissionModeSequential = 2, // Frames are submitted sequentially following presentation time order
@@ -59,8 +64,8 @@
@discussion This configuration enables the FrameRateConversion on a VTFrameProcesing session.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-NS_SWIFT_SENDABLE __attribute__((objc_subclassing_restricted))
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+NS_SWIFT_SENDABLE
@interface VTFrameRateConversionConfiguration : NSObject <VTFrameProcessorConfiguration>
#pragma mark --- init function(s).
@@ -136,7 +141,7 @@
* @property frameSupportedPixelFormats
* @abstract list of source frame supported pixel formats for current configuration
*/
-@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
/**
* @property sourcePixelBufferAttributes
@@ -151,10 +156,11 @@
@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
/*!
- @property processorSupported
- @abstract reports that this processor is supported
+ @property supported
+ @abstract reports whether this processor is supported
*/
-@property (class, nonatomic, readonly) Boolean processorSupported;
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+@property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
@end
@@ -166,9 +172,7 @@
@discussion VTFrameRateConversionParameters are frame level parameters.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
-
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
@interface VTFrameRateConversionParameters : NSObject <VTFrameProcessorParameters>
/*!
@@ -242,5 +246,8 @@
@end
NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
#endif // VTFRAMEPROCESSOR_FRAMERATECONVERSION_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h 2025-05-31 21:14:04
@@ -0,0 +1,185 @@
+/*
+ File: VTFrameProcessor_LowLatencyFrameInterpolation.h
+
+ Framework: VideoToolbox
+
+ Copyright © 2024-2025 Apple Inc. All rights reserved.
+
+*/
+
+#ifndef VTFRAMEPROCESSOR_LOWLATENCYFRAMEINTERPOLATION_H
+#define VTFRAMEPROCESSOR_LOWLATENCYFRAMEINTERPOLATION_H
+
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
+#import <VideoToolbox/VTFrameProcessorConfiguration.h>
+#import <VideoToolbox/VTFrameProcessorParameters.h>
+#import <VideoToolbox/VTFrameProcessorFrame.h>
+
+
+NS_HEADER_AUDIT_BEGIN(nullability, sendability)
+
+
+/*!
+ @class VTLowLatencyFrameInterpolationConfiguration
+ @abstract Configuration that is used to program VTFrameProcessor for Low Latency Frame Interpolation. This can either do purely temporal interpolation (Frame Rate Conversion) or it can do temporal and spatial interpolation (Scaling and Frame Rate Conversion).
+
+ @discussion This processor requires a source frame and a previous frame. It does temporal scaling, interpolating frames between the previous frame and the source frame. When performing both temporal and spatial interpolation, the processor can only perform 2x upscaling, and a single frame of temporal interpolation. When performing spatial scaling, the processor will produce upscaled intermediate frames as well as an upscaled sourceFrame but will not upscale the previous reference frame provided.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
+NS_SWIFT_SENDABLE
+@interface VTLowLatencyFrameInterpolationConfiguration : NSObject <VTFrameProcessorConfiguration>
+
+#pragma mark --- init function(s).
+/*!
+ @abstract Creates a new VTLowLatencyFrameInterpolationConfiguration with specified frame width and height, configured for temporal interpolation (Frame Rate Conversion).
+
+ @param frameWidth Width of source frame in pixels.
+
+ @param frameHeight Height of source frame in pixels.
+
+ @param numberOfInterpolatedFrames The number of uniformly spaced frames that you want to have available for interpolation.
+
+ @discussion The available interpolation points will be be the next value of (2^x -1) which is greater than or equal to numberOfInterpolatedFrames. For example, if 1 interpolated frame is requested, 1 interpolation point at 0.5 is available. If 2 interpolated frames are requested, 3 interpolation points at 0.25, 0.5 and 0.75 are available. Not all available interpolation points need to be used. Setting a higher numberOfInterpolatedFrames increases the resolution of interpolation in some cases, but will also increase latency.
+*/
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight
+ numberOfInterpolatedFrames:(NSInteger)numberOfInterpolatedFrames;
+
+
+/*!
+ @abstract Creates a new VTLowLatencyFrameInterpolationConfiguration with specified frame width and height, configured for spatial scaling as well as temporal scaling.
+
+ @param frameWidth Width of source frame in pixels.
+
+ @param frameHeight Height of source frame in pixels.
+
+ @param spatialScaleFactor The requested spatial scale factor as an integer. Currently, only 2x spatial scaling is supported.
+
+ @discussion When configured for spatial scaling, the VTLowLatencyFrameInterpolation processor only supports 2x spatial upscaling and a single frame of temporal interpolation at a 0.5 interpolation phase. Setting the numberOfInterpolatedFrames property will be ignored in this case.
+*/
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight
+ spatialScaleFactor:(NSInteger)spatialScaleFactor;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property frameWidth
+ * @abstract Returns the width of source frames in pixels.
+*/
+@property (nonatomic, readonly) NSInteger frameWidth;
+
+/**
+ * @property frameHeight
+ * @abstract Returns the height of source frames in pixels.
+*/
+@property (nonatomic, readonly) NSInteger frameHeight;
+
+/**
+ * @property spatialScaleFactor
+ * @abstract Returns the configured spatial scale factor as an integer.
+*/
+@property (nonatomic, readonly) NSInteger spatialScaleFactor;
+
+/**
+ * @property numberOfInterpolatedFrames
+ * @abstract Returns the number of uniformly spaced frames that the processor is configured for..
+*/
+@property (nonatomic, readonly) NSInteger numberOfInterpolatedFrames;
+
+/**
+ * @property frameSupportedPixelFormats
+ * @abstract Returns a list of supported pixel formats for current configuration
+ */
+
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
+
+/**
+ * @property sourcePixelBufferAttributes
+ * @abstract Returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source and reference frames
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
+
+/**
+ * @property destinationPixelBufferAttributes
+ * @abstract Returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
+
+/*!
+ @property supported
+ @abstract reports whether this processor is supported
+*/
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+
+@end
+
+
+/*!
+ @class VTLowLatencyFrameInterpolationParameters
+ @abstract VTLowLatencyFrameInterpolationParameters object contains both input and output parameters needed for the Temporal Noise Filter Frame Processor. This object is used in the processWithParameters call of VTFrameProcessor class.
+
+ @discussion VTLowLatencyFrameInterpolationParameters are frame level parameters.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
+@interface VTLowLatencyFrameInterpolationParameters : NSObject <VTFrameProcessorParameters>
+
+/*!
+ @abstract Creates a new VTLowLatencyFrameInterpolationParameters used to generate interpolated frames between a previous frame and a sourceFrame.
+
+ @param sourceFrame Current source frame. Must be non nil.
+
+ @param previousFrame Previous frame used for interpolation. Must be non nil.
+
+ @param interpolationPhase The list of interpolation phase locations for the frames to be interpolated. Must be greater than 0 and less than 1.0 0.5 is midway between the previous frame and the source frame. If spatial scaling has been enabled, the only supported interpolation phase is 0.5.
+
+ @param destinationFrames The list of VTFrameProcessorFrame to receive the interpolated frames. This must have the same number of elements as the the interpolationPhase. If spatial scaling is enabled, it must also contain an element to hold the scaled version of sourceFrame.
+
+*/
+- (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+ previousFrame:(VTFrameProcessorFrame *)previousFrame
+ interpolationPhase:(NSArray<NSNumber *> *) interpolationPhase
+ destinationFrames:(NSArray<VTFrameProcessorFrame *> *)destinationFrames NS_REFINED_FOR_SWIFT;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property sourceFrame
+ * @abstract Returns the source frame that was provided when the VTLowLatencyFrameInterpolationParameters object was created.
+*/
+@property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
+
+/**
+ * @property previousFrame
+ * @abstract Returns the previous frame that was provided when the VTLowLatencyFrameInterpolationParameters object was created.
+*/
+@property(nonatomic, readonly) VTFrameProcessorFrame * previousFrame;
+
+/**
+ * @property interpolationPhase
+ * @abstract Returns the array of interpolation [phases that were provided when the VTLowLatencyFrameInterpolationParameters object was created.
+*/
+@property (nonatomic, readonly) NSArray<NSNumber *> * interpolationPhase NS_REFINED_FOR_SWIFT;
+
+/**
+ * @property destinationFrames
+ * @abstract Returns the array of destination frames that were provided when the VTLowLatencyFrameInterpolationParameters object was created.
+*/
+@property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> * destinationFrames;
+
+@end
+
+NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
+
+#endif // VTFRAMEPROCESSOR_LOWLATENCYFRAMEINTERPOLATION_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h 2025-05-31 21:14:04
@@ -0,0 +1,164 @@
+/*
+ File: VTFrameProcessor_LowLatencySuperResolutionScaler.h
+
+ Framework: VideoToolbox
+
+ Copyright © 2024-2025 Apple Inc. All rights reserved.
+
+*/
+
+#ifndef VTFRAMEPROCESSOR_LOWLATENCYSUPERRESOLUTIONSCALER_H
+#define VTFRAMEPROCESSOR_LOWLATENCYSUPERRESOLUTIONSCALER_H
+
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
+#import <VideoToolbox/VTFrameProcessorConfiguration.h>
+#import <VideoToolbox/VTFrameProcessorParameters.h>
+#import <VideoToolbox/VTFrameProcessorFrame.h>
+
+
+NS_HEADER_AUDIT_BEGIN(nullability, sendability)
+
+
+/*!
+ @class VTLowLatencySuperResolutionScalerConfiguration
+ @abstract Creates an object which is used to configure VTFrameProcessor for Low Latency Super Resolution Scaler processing.
+
+ @discussion VTLowLatencySuperResolutionScalerConfiguration is used to configure a VTFrameProcessor. This interface can also queried for important operating details, like the pixel buffer attributes required for frames submitted to the processor.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
+NS_SWIFT_SENDABLE
+@interface VTLowLatencySuperResolutionScalerConfiguration : NSObject <VTFrameProcessorConfiguration>
+
+#pragma mark --- init function(s).
+/*!
+ @abstract Creates a new VTLowLatencySuperResolutionScalerConfiguration with specified frame width and height.
+
+ @param frameWidth Width of source frame in pixels.
+
+ @param frameHeight Height of source frame in pixels.
+
+ @param scaleFactor The scale factor to be applied. This must be a supported value returned by supportedScaleFactorsForFrameWidth:frameHeight.
+*/
+- (instancetype)initWithFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight
+ scaleFactor:(float)scaleFactor;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property frameWidth
+ * @abstract Width of source frame in pixels.
+*/
+@property (nonatomic, readonly) NSInteger frameWidth;
+
+/**
+ * @property frameHeight
+ * @abstract Height of source frame in pixels.
+*/
+@property (nonatomic, readonly) NSInteger frameHeight;
+
+/**
+ * @property frameSupportedPixelFormats
+ * @abstract list of pixel formats for source frames for the current configuration
+ */
+
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
+
+/**
+ * @property sourcePixelBufferAttributes
+ * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source and reference frames
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
+
+/**
+ * @property destinationPixelBufferAttributes
+ * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
+
+/**
+ * @property scaleFactor
+ * @abstract Returns the scale factor that the configuration was initialized with.
+*/
+@property (nonatomic, readonly) float scaleFactor;
+
+/*!
+ @property maximumDimensions
+ @abstract returns the maximum dimensions for a sourceFrame for the processor
+*/
+@property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions;
+
+/*!
+ @property minimumDimensions
+ @abstract returns the minimum dimensions for a sourceFrame for the processor
+*/
+@property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions;
+
+/*!
+ @property supported
+ @abstract reports whether this processor is supported on the current config.
+*/
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+
+/**
+ * Returns the supported scale factors for the provided input dimensions.
+ *
+ * @abstract returns an array of supported scale factors values, or an empty list if the dimensions are unsupported.
+*/
++ (NSArray<NSNumber*>*) supportedScaleFactorsForFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight NS_REFINED_FOR_SWIFT;
+
+@end
+
+
+/*!
+ @class VTLowLatencySuperResolutionScalerParameters
+ @abstract VTLowLatencySuperResolutionScalerParameters object contains both input and output parameters needed for the Low Latency Super Resolution Scaler Frame Processor. This object is used in the processWithParameters call of VTFrameProcessor class.
+
+ @discussion VTLowLatencySuperResolutionScalerParameters are frame level parameters.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
+
+@interface VTLowLatencySuperResolutionScalerParameters : NSObject <VTFrameProcessorParameters>
+
+/*!
+ @abstract Creates a new VTLowLatencySuperResolutionScalerParameters object.
+
+ @param sourceFrame Current source frame. Must be non nil.
+
+ @param destinationFrame User allocated pixel buffer that will receive the results. Must be non nil.
+*/
+- (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+ destinationFrame:(VTFrameProcessorFrame *)destinationFrame;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property sourceFrame
+ * @abstract sourceFrame Current source frame. Must be non nil
+*/
+@property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
+
+/**
+ * @property destinationFrame
+ * @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the results.
+*/
+
+@property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
+
+@end
+
+NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
+
+#endif // VTFRAMEPROCESSOR_LOWLATENCYSUPERRESOLUTIONSCALER_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h 2025-05-31 21:41:33
@@ -10,6 +10,11 @@
#ifndef VTFRAMEPROCESSOR_MOTIONBLUR_H
#define VTFRAMEPROCESSOR_MOTIONBLUR_H
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
#import <VideoToolbox/VTFrameProcessorConfiguration.h>
#import <VideoToolbox/VTFrameProcessorParameters.h>
#import <VideoToolbox/VTFrameProcessorFrame.h>
@@ -23,7 +28,7 @@
/*!
@brief Quality prioritization levels to favor quality or performance.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTMotionBlurConfigurationQualityPrioritization) {
VTMotionBlurConfigurationQualityPrioritizationNormal = 1,
VTMotionBlurConfigurationQualityPrioritizationQuality = 2,
@@ -32,7 +37,7 @@
/*!
@brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTMotionBlurConfigurationRevision) {
VTMotionBlurConfigurationRevision1 = 1, // revision 1
} NS_SWIFT_NAME(VTMotionBlurConfiguration.Revision);
@@ -40,7 +45,7 @@
/*!
@brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTMotionBlurParametersSubmissionMode) {
VTMotionBlurParametersSubmissionModeRandom = 1, // Frames are submitted in non-sequential order
VTMotionBlurParametersSubmissionModeSequential = 2, // Frames are submitted sequentially following presentation time order
@@ -55,8 +60,8 @@
@discussion This configuration enables the MotionBlur on a VTFrameProcesing session.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-NS_SWIFT_SENDABLE __attribute__((objc_subclassing_restricted))
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+NS_SWIFT_SENDABLE
@interface VTMotionBlurConfiguration : NSObject <VTFrameProcessorConfiguration>
#pragma mark --- init function(s).
@@ -132,7 +137,7 @@
* @property frameSupportedPixelFormats
* @abstract list of source frame supported pixel formats for current configuration
*/
-@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
/**
* @property sourcePixelBufferAttributes
@@ -148,14 +153,14 @@
/*!
- @property processorSupported
- @abstract reports that this processor is supported
+ @property supported
+ @abstract reports whether this processor is supported
*/
-@property (class, nonatomic, readonly) Boolean processorSupported;
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+@property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
@end
-
/*!
@class VTMotionBlurParameters
@abstract VTMotionBlurParameters object contains both input and output parameters needed to run the MotionBlur processor on a frame. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationFrame where the output frame is returned (as VTFrameProcessorFrame) back to the caller function once the processWithParameters completes.
@@ -163,9 +168,7 @@
@discussion VTMotionBlurParameters are frame level parameters.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
-
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
@interface VTMotionBlurParameters : NSObject <VTFrameProcessorParameters>
/*!
@@ -258,5 +261,8 @@
@end
NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
#endif // VTFRAMEPROCESSOR_MOTIONBLUR_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h 2025-05-31 21:01:54
@@ -10,6 +10,11 @@
#ifndef VTFRAMEPROCESSOR_OPTICALFLOW_H
#define VTFRAMEPROCESSOR_OPTICALFLOW_H
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
#import <VideoToolbox/VTFrameProcessorConfiguration.h>
#import <VideoToolbox/VTFrameProcessorParameters.h>
#import <VideoToolbox/VTFrameProcessorFrame.h>
@@ -23,7 +28,7 @@
/*!
@brief Quality prioritization levels to favor quality or performance.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTOpticalFlowConfigurationQualityPrioritization) {
VTOpticalFlowConfigurationQualityPrioritizationNormal = 1,
VTOpticalFlowConfigurationQualityPrioritizationQuality = 2,
@@ -32,7 +37,7 @@
/*!
@brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTOpticalFlowConfigurationRevision) {
VTOpticalFlowConfigurationRevision1 = 1, // revision 1
} NS_SWIFT_NAME(VTOpticalFlowConfiguration.Revision);
@@ -40,7 +45,7 @@
/*!
@brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
typedef NS_ENUM(NSInteger, VTOpticalFlowParametersSubmissionMode) {
VTOpticalFlowParametersSubmissionModeRandom = 1, // Frames are submitted in non-sequential order
VTOpticalFlowParametersSubmissionModeSequential = 2, // Frames are submitted sequentially following presentation time order
@@ -56,8 +61,8 @@
@discussion This configuration enables the OpticalFlow on a VTFrameProcessing session.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-NS_SWIFT_SENDABLE __attribute__((objc_subclassing_restricted))
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+NS_SWIFT_SENDABLE
@interface VTOpticalFlowConfiguration : NSObject <VTFrameProcessorConfiguration>
#pragma mark --- init function(s).
@@ -126,7 +131,7 @@
* @property frameSupportedPixelFormats
* @abstract list of source frame supported pixel formats for current configuration
*/
-@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
/**
* @property sourcePixelBufferAttributes
@@ -140,11 +145,13 @@
*/
@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
+
/*!
- @property processorSupported
- @abstract reports that this processor is supported
+ @property supported
+ @abstract reports whether this processor is supported
*/
-@property (class, nonatomic, readonly) Boolean processorSupported;
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+@property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
@end
@@ -156,9 +163,7 @@
@discussion VTOpticalFlowParameters are frame level parameters.
*/
-API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
-__attribute__((objc_subclassing_restricted))
-
+API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
@interface VTOpticalFlowParameters : NSObject <VTFrameProcessorParameters>
/*!
@@ -212,5 +217,8 @@
@end
NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
#endif // VTFRAMEPROCESSOR_OPTICALFLOW_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h 2025-05-31 00:15:19
@@ -0,0 +1,296 @@
+/*
+ File: VTFrameProcessor_SuperResolutionScaler.h
+
+ Framework: VideoToolbox
+
+ Copyright 2023-2024 Apple Inc. All rights reserved.
+
+*/
+
+#ifndef VTFRAMEPROCESSOR_SUPERRESOLUTION_H
+#define VTFRAMEPROCESSOR_SUPERRESOLUTION_H
+
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
+#import <VideoToolbox/VTFrameProcessorConfiguration.h>
+#import <VideoToolbox/VTFrameProcessorParameters.h>
+#import <VideoToolbox/VTFrameProcessorFrame.h>
+
+/*!
+ @brief Interfaces for creating and using a SuperResolution processor
+
+ @details The VTSuperResolutionScaler processor Configuration and Parameters objects are used with the VTFrameProcessor interface defined in VTFrameProcessor.h.
+ */
+
+/*!
+ @brief Quality prioritization levels to favor quality or performance.
+*/
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationQualityPrioritization) {
+ VTSuperResolutionScalerConfigurationQualityPrioritizationNormal = 1,
+} NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.QualityPrioritization);
+
+/*!
+ @brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
+ */
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationRevision) {
+ VTSuperResolutionScalerConfigurationRevision1 = 1, // revision 1
+} NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.Revision);
+
+/*!
+@brief List of SuperResolution input types.
+*/
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationInputType) {
+ VTSuperResolutionScalerConfigurationInputTypeVideo = 1,
+ VTSuperResolutionScalerConfigurationInputTypeImage = 2,
+} NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.InputType);
+
+/*!
+@brief List of SuperResolution input types.
+*/
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationModelStatus) {
+ VTSuperResolutionScalerConfigurationModelStatusDownloadRequired = 0,
+ VTSuperResolutionScalerConfigurationModelStatusDownloading = 1,
+ VTSuperResolutionScalerConfigurationModelStatusReady = 2,
+} NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.ModelStatus);
+
+/*!
+ @brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
+ */
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+typedef NS_ENUM(NSInteger, VTSuperResolutionScalerParametersSubmissionMode) {
+ VTSuperResolutionScalerParametersSubmissionModeRandom = 1, // Frames are submitted in non-sequential order
+ VTSuperResolutionScalerParametersSubmissionModeSequential = 2, // Frames are submitted sequentially following presentation time order
+} NS_SWIFT_NAME(VTSuperResolutionScalerParameters.SubmissionMode);
+
+
+NS_HEADER_AUDIT_BEGIN(nullability, sendability)
+
+/*!
+ @class VTSuperResolutionScalerConfiguration
+ @abstract Configuration that is used to set up the SuperResolution Processor.
+
+ @discussion This configuration enables the SuperResolution on a VTFrameProcessing session. IMPORTANT: The VTSuperResolutionScaler processor may require ML models which need to be downloaded by the framework in order to operate. Before using calling startSessionWithConfiguration with a VTSuperResolutionScalerConfiguration, it is important that you verify that the necessary models are present by checking the configurationModelStatus on the configuration object. If models are not available, model download can be triggered using the downloadConfigurationModelWithCompletionHandler method on the configuration object. Best practice is to confirm availability of models and drive download with user awareness and interaction before engaging workflows where the processor is needed.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+NS_SWIFT_SENDABLE
+@interface VTSuperResolutionScalerConfiguration : NSObject <VTFrameProcessorConfiguration>
+
+#pragma mark --- init function(s).
+/*!
+ @abstract Creates a new VTSuperResolutionScalerConfiguration with specified flow width and height.
+ @discussion init will return nil if dimensions are out of range or revision is unsupported.
+ @param frameWidth Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
+ @param frameHeight Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
+ @param scaleFactor Indicates the scale factor between input and output.
+ @param inputType Indicates the type of input (video / image ).
+ @param usePrecomputedFlow Boolean value to indicate that Optical Flow will be provided by the user, if false this configuration will compute the optical flow on the fly.
+ @param qualityPrioritization Used to control quality and performance levels. See VTSuperResolutionScalerConfigurationQualityPrioritization for more info.
+ @param revision The specific algorithm or configuration revision that is to be used to perform the request.
+*/
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight
+ scaleFactor:(NSInteger)scaleFactor
+ inputType:(VTSuperResolutionScalerConfigurationInputType)inputType
+ usePrecomputedFlow:(BOOL)usePrecomputedFlow
+ qualityPrioritization:(VTSuperResolutionScalerConfigurationQualityPrioritization)qualityPrioritization
+ revision:(VTSuperResolutionScalerConfigurationRevision)revision;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property frameWidth
+ * @abstract Width of source frame in pixels.
+ */
+@property (nonatomic, readonly) NSInteger frameWidth;
+
+/**
+ * @property frameHeight
+ * @abstract Height of source frame in pixels.
+ */
+@property (nonatomic, readonly) NSInteger frameHeight;
+
+/**
+ * @property inputType
+ * @abstract Indicates the type of input.
+*/
+@property (nonatomic, readonly) VTSuperResolutionScalerConfigurationInputType inputType;
+
+/**
+ * @property precomputedFlow
+ * @abstract Indicates that caller will provide optical flow.
+*/
+@property (nonatomic, readonly, getter=usesPrecomputedFlow) BOOL precomputedFlow;
+
+/**
+ * @property scaleFactor
+ * @abstract Indicates the scale factor between input and output.
+*/
+@property (nonatomic, readonly) NSInteger scaleFactor;
+
+/**
+ * @property qualityPrioritization
+ * @abstract parameter used to control quality and performance levels. See VTSuperResolutionScalerConfigurationQualityPrioritization for more info.
+*/
+@property (nonatomic, readonly) VTSuperResolutionScalerConfigurationQualityPrioritization qualityPrioritization;
+
+/*!
+ @property revision
+ @abstract The specific algorithm or configuration revision that is to be used to perform the request.
+ */
+@property (nonatomic, readonly) VTSuperResolutionScalerConfigurationRevision revision;
+
+/*!
+ @property supportedRevisions
+ @abstract Provides the collection of currently-supported algorithm or configuration revisions for the class of configuration.
+ @discussion This property allows clients to introspect at runtime what revisions are available for each configuration.
+ */
+@property (class, nonatomic, readonly) NSIndexSet* supportedRevisions;
+
+/*!
+ @property defaultRevision
+ @abstract Provides the default revision of a particular algorithm or configuration.
+ */
+@property (class, nonatomic, readonly) VTSuperResolutionScalerConfigurationRevision defaultRevision;
+
+/**
+ * @property frameSupportedPixelFormats
+ * @abstract list of source frame supported pixel formats for current configuration
+ */
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
+
+/**
+ * @property sourcePixelBufferAttributes
+ * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
+
+/**
+ * @property destinationPixelBufferAttributes
+ * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
+*/
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
+
+/*!
+ @property configurationModelStatus
+ @abstract reports the download status of models required to use VTSuperResolutionScaler for the current configuration.
+*/
+@property (nonatomic, readonly) VTSuperResolutionScalerConfigurationModelStatus configurationModelStatus;
+
+/*!
+ @abstract This interface requests that models associated with the VTSuperResolutionScalerConfiguration be downloaded.
+
+ @discussion This interface can be used to download model assets required for the current VTSuperResolutionScalerConfiguration if the state is currently VTSuperResolutionScalerConfigurationModelStatusDownloadRequired. The processorModelStatus class property can be queried to see if models are all already present. If a download has been initiated, processorModelPercentageAvailable can be queried to determine what percentage of the model models are avialable.
+ If the download fails, the completion handler will return an NSError, and the status will go back to VTSuperResolutionScalerConfigurationModelStatusDownloadRequired. If the download succeeds, the NSError return value will be nil.
+*/
+- (void)downloadConfigurationModelWithCompletionHandler:(void (^)( NSError * _Nullable error))completionHandler;
+
+/*!
+ @property configurationModelPercentageAvailable
+ @abstract Returns a floating point value between 0.0 and 1.0 indicating the percentage of required model assets that have been downloaded.
+*/
+@property (nonatomic, readonly) float configurationModelPercentageAvailable;
+
+
+/*!
+ @property supported
+ @abstract reports whether this processor is supported
+*/
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+
+/*!
+ @property supportedScaleFactors
+ @abstract reports the set of supported scale factors that can be used when initializing a VTSuperResolutionScalerConfiguration.
+*/
+@property (class, nonatomic, readonly) NSArray<NSNumber*> * supportedScaleFactors NS_REFINED_FOR_SWIFT;
+
+@end
+
+
+/*!
+ @class VTSuperResolutionScalerParameters
+ @abstract VTSuperResolutionScalerParameters object contains both input and output parameters needed to run the SuperResolution processor on a frame. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationFrame where the output frame is returned (as VTFrameProcessorFrame) back to the caller function once the processWithParameters completes.
+
+ @discussion VTSuperResolutionScalerParameters are frame level parameters.
+*/
+
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
+@interface VTSuperResolutionScalerParameters : NSObject <VTFrameProcessorParameters>
+
+/*!
+ @abstract Creates a new VTSuperResolutionScalerParameters .
+ @discussion init will return nil if sourceFrame or destinationFrame is nil, sourceFrame and reference frames are different pixelFormats.
+ @param sourceFrame Current source frame. Must be non nil.
+ @param previousFrame The Previous source frame in presentation time order. For the first frame this can be set to nil.
+ @param previousOutputFrame The Previous output frame in presentation time order. For the first frame this can be set to nil.
+ @param opticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow between sourceFrame and previousFrame frame. Only needed if optical flow is pre-computed.
+ @param submissionMode Set to VTSuperResolutionScalerParametersSubmissionModeSequential to indicate that current submission follow presentation time order without jump or skip when compared to previous submission. VTSuperResolutionScalerParametersSubmissionModeSequential will yield better performance. Set to VTSuperResolutionScalerParametersSubmissionModeRandom to indicate a skip or a jump in frame sequence.
+@param destinationFrame User allocated pixel buffer that will receive the results.
+*/
+- (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+ previousFrame:(VTFrameProcessorFrame * _Nullable)previousFrame
+ previousOutputFrame:(VTFrameProcessorFrame * _Nullable)previousOutputFrame
+ opticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)opticalFlow
+ submissionMode:(VTSuperResolutionScalerParametersSubmissionMode)submissionMode
+ destinationFrame:(VTFrameProcessorFrame *)destinationFrame;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/**
+ * @property sourceFrame
+ * @abstract sourceFrame Current source frame. Must be non nil
+*/
+
+@property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
+
+/**
+ * @property previousFrame
+ * @abstract Previous source frame in presentation time order. For the first frame this will be nil.
+*/
+
+@property(nonatomic, readonly, nullable) VTFrameProcessorFrame * previousFrame;
+
+/**
+ * @property previousOutputFrame
+ * @abstract Previous output frame in presentation time order. For the first frame this will be nil.
+*/
+
+@property(nonatomic, readonly, nullable) VTFrameProcessorFrame * previousOutputFrame;
+
+/**
+ * @property opticalFlow
+ * @abstract Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with the previous frame. Only needed if optical flow is pre-computed. For the first frame this will be nil.
+*/
+
+@property(nonatomic, readonly, nullable) VTFrameProcessorOpticalFlow * opticalFlow;
+
+/**
+ * @property submissionMode
+ * @abstract A VTSuperResolutionScalerSubmissionMode value describing the processing request in this Parameters object .
+*/
+@property (nonatomic, readonly) VTSuperResolutionScalerParametersSubmissionMode submissionMode;
+
+/**
+ * @property destinationFrame
+ * @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the results.
+*/
+
+@property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
+
+@end
+
+NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
+
+#endif // VTFRAMEPROCESSOR_SUPERRESOLUTION_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h 2025-05-30 23:54:24
@@ -0,0 +1,197 @@
+/*
+ File: VTFrameProcessor_TemporalNoiseFilter.h
+
+ Framework: VideoToolbox
+
+ Copyright © 2024-2025 Apple Inc. All rights reserved.
+
+*/
+
+#ifndef VTFRAMEPROCESSOR_TEMPORALNOISEFILTER_H
+#define VTFRAMEPROCESSOR_TEMPORALNOISEFILTER_H
+
+#include <CoreMedia/CMBase.h>
+
+#if ! TARGET_OS_SIMULATOR
+#ifdef __OBJC__
+
+#import <VideoToolbox/VTFrameProcessorConfiguration.h>
+#import <VideoToolbox/VTFrameProcessorParameters.h>
+#import <VideoToolbox/VTFrameProcessorFrame.h>
+
+
+NS_HEADER_AUDIT_BEGIN(nullability, sendability)
+/*!
+ @class VTTemporalNoiseFilterConfiguration
+ @abstract A configuration object to initiate VTFrameProcessor and use Temporal Noise Filter processor.
+
+ @discussion
+ The class properties of VTTemporalNoiseFilterConfiguration help to identify the capabilities of Temporal Noise Filter Processor on the current platform, prior to initiating a session.
+ The availability of Temporal Noise Filter processor in the current platform can be confirmed by checking the VTTemporalNoiseFilterConfiguration.isSupported class property.
+ Verify the processor's capability to process source frames by ensuring that the dimensions are no less than VTTemporalNoiseFilterConfiguration.minimumDimensions and no greater than VTTemporalNoiseFilterConfiguration.maximumDimensions.
+ Use the instance properties such as frameSupportedPixelFormats, sourcePixelBufferAttributes, and destinationPixelBufferAttributes to ensure that the input and output pixel buffer formats and attributes of the processor align with the client's specific requirements.
+ The properties previousFrameCount and nextFrameCount represent the maximum number of preceding and subsequent reference frames, used in the processing of a source frame, to achieve optimum noise reduction quality.
+ */
+
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(tvos, watchos)
+NS_SWIFT_SENDABLE
+VT_EXPORT @interface VTTemporalNoiseFilterConfiguration : NSObject <VTFrameProcessorConfiguration>
+
+#pragma mark --- init function(s).
+/*!
+ @abstract Creates a new VTTemporalNoiseConfiguration with specified width and height.
+
+ @param frameWidth Width of source frame in pixels.
+
+ @param frameHeight Height of source frame in pixels.
+ */
+- (instancetype)initWithFrameWidth:(NSInteger)frameWidth
+ frameHeight:(NSInteger)frameHeight;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/*!
+ @property frameWidth
+ @abstract Width of source frame in pixels.
+ */
+@property (nonatomic, readonly) NSInteger frameWidth;
+
+/*!
+ @property frameHeight
+ @abstract Height of source frame in pixels.
+ */
+@property (nonatomic, readonly) NSInteger frameHeight;
+
+/*!
+ @property frameSupportedPixelFormats
+ @abstract List of supported pixel formats for source frames.
+ */
+@property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
+
+/*!
+ @property sourcePixelBufferAttributes
+ @abstract Supported pixel buffer attributes for source frames.
+ */
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
+
+/*!
+ @property destinationPixelBufferAttributes
+ @abstract Supported pixel buffer attributes for destination frames.
+ */
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
+
+/*!
+ @property nextFrameCount
+ @abstract Maximum number of future reference frames used to process a source frame.
+ */
+@property (nonatomic, readonly) NSInteger nextFrameCount;
+
+/*!
+ @property previousFrameCount
+ @abstract Maximum number of past reference frames used to process a source frame.
+ */
+@property (nonatomic, readonly) NSInteger previousFrameCount;
+
+/*!
+ @property maximumDimensions
+ @abstract The maximum dimensions of a source frame, supported by the processor.
+ */
+@property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions;
+
+/*!
+ @property minimumDimensions
+ @abstract The minimum dimensions of a source frame, supported by the processor.
+ */
+@property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions;
+
+/*!
+ @property supported
+ @abstract reports whether this processor is supported
+*/
+@property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
+
+@end
+
+
+/*!
+ @class VTTemporalNoiseFilterParameters
+ @abstract VTTemporalNoiseFilterParameters object encapsulates the frame-level parameters necessary for processing a source frame using Temporal Noise Filter processor.
+
+ @discussion
+ This object is intended for sending input parameters into the processWithParameters method of the VTFrameProcessor class.
+ Temporal Noise Filter processor utilizes past and future reference frames, provided in presentation time order, to reduce noise from the source frame. The previousFrameCount and nextFrameCount properties in VTTemporalNoiseFilterConfiguration represent the maximum number of past and future reference frames that can be used by the processor to achieve optimum noise reduction quality. The number of reference frames provided shall depend on their availability, but at a minimum, one reference frame, either past or future, must be provided.
+ The parameter destinationFrame stores the output frame that is returned to the caller upon the successful completion of the processWithParameters operation.
+ */
+API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(tvos, watchos)
+VT_EXPORT @interface VTTemporalNoiseFilterParameters : NSObject <VTFrameProcessorParameters>
+
+/*!
+ @abstract Creates a new VTTemporalNoiseFilterParameters object.
+
+ @param sourceFrame Current source frame. Must be non nil.
+
+ @param nextFrames Future reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the nextFrameCount property in VTTemporalNoiseFilterConfiguration.
+
+ @param previousFrames Past reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the previousFrameCount property in VTTemporalNoiseFilterConfiguration.
+
+ @param destinationFrame User allocated pixel buffer that will receive the output frame. The pixel format of the destinationFrame must match with that of the sourceFrame.
+
+ @param filterStrength Used to control strength of the noise filtering. The value can range from the minimum strength of 0.0 to the maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to processing the source frame.
+
+ @param discontinuity Marks sequence discontinuity, forcing the processor to reset prior to processing the source frame.
+ */
+- (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+ nextFrames:(NSArray<VTFrameProcessorFrame *> * _Nullable)nextFrames
+ previousFrames:(NSArray<VTFrameProcessorFrame *> * _Nullable)previousFrames
+ destinationFrame:(VTFrameProcessorFrame *)destinationFrame
+ filterStrength:(float)filterStrength
+ discontinuity:(Boolean)discontinuity;
+
+- (instancetype) init NS_UNAVAILABLE;
++ (instancetype) new NS_UNAVAILABLE;
+
+/*!
+ @property sourceFrame
+ @abstract Current source frame. Must be non-nil.
+ */
+@property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
+
+/*!
+ @property nextFrames
+ @abstract Future reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the nextFrameCount property in VTTemporalNoiseFilterConfiguration.
+ */
+@property(nonatomic, readonly, nullable) NSArray<VTFrameProcessorFrame *> * nextFrames;
+
+/*!
+ @property previousFrames
+ @abstract Past reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the previousFrameCount property in VTTemporalNoiseFilterConfiguration.
+ */
+@property(nonatomic, readonly, nullable) NSArray<VTFrameProcessorFrame *> * previousFrames;
+
+/*!
+ @property filterStrength
+ @abstract Parameter used to control strength of the noise filtering. The value can range from the minimum strength of 0.0 to the maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to processing the source frame.
+ */
+@property (nonatomic) float filterStrength;
+
+/*!
+ @property discontinuity
+ @abstract Marks sequence discontinuity, forcing the processor to reset prior to processing the source frame.
+ */
+@property (nonatomic) Boolean discontinuity;
+
+/*!
+ @property destinationFrame
+ @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the output frame.
+ */
+@property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
+
+@end
+
+NS_HEADER_AUDIT_END(nullability, sendability)
+
+#endif // __OBJC__
+#endif // ! TARGET_OS_SIMULATOR
+
+#endif // VTFRAMEPROCESSOR_TEMPORALNOISEFILTER_H
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h 2025-05-31 21:14:04
@@ -0,0 +1,230 @@
+/*
+ File: VTMotionEstimationSession.h
+
+ Framework: VideoToolbox
+
+ Copyright 2022-2023 Apple Inc. All rights reserved.
+
+ Video Toolbox client API for generating motion vectors from CVPixelBuffers.
+*/
+
+#ifndef VTMotionEstimationSession_h
+#define VTMotionEstimationSession_h
+
+#include <CoreFoundation/CoreFoundation.h>
+#include <CoreVideo/CoreVideo.h>
+#include <CoreMedia/CMBase.h>
+#include <VideoToolbox/VTBase.h>
+#include <VideoToolbox/VTSession.h>
+
+#if defined(__cplusplus)
+extern "C"
+{
+#endif
+
+#pragma pack(push)
+#pragma pack()
+
+#if __BLOCKS__
+
+CF_IMPLICIT_BRIDGING_ENABLED
+
+/*!
+ @enum VTMotionEstimationFrameFlags
+ @abstract Directives for the motion estimation session and the motion estimation processor passed from the client into
+ motionEstimationFrameFlags parameter of VTMotionEstimationSessionEstimateMotionVectors.
+
+ @constant kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer
+ A hint to the motion estimation session that the client will reuse the currentBuffer as referenceBuffer in the next call
+ to VTMotionEstimationSessionEstimateMotionVectors. Using this flag allows the motion estimation processor to make some
+ optimizations.
+*/
+typedef CF_OPTIONS(uint32_t, VTMotionEstimationFrameFlags) {
+ kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer = 1<<0,
+} CF_REFINED_FOR_SWIFT;
+
+/*!
+ @enum VTMotionEstimationInfoFlags
+ @abstract Directives for the client passed into the VTMotionEstimationOutputHandler from the
+ motion estimation session or the motion estimation processor.
+*/
+typedef CF_OPTIONS(uint32_t, VTMotionEstimationInfoFlags) {
+ kVTMotionEstimationInfoFlags_Reserved0 = 1<<0,
+} CF_REFINED_FOR_SWIFT;
+
+/*!
+ @typedef VTMotionEstimationSessionRef
+ @abstract A reference to a Video Toolbox Motion Estimation Session.
+ @discussion
+ A motion estimation session supports two CVPixelBuffers of the same size and type,
+ and returns motion vectors in the form of a CVPixelBuffer. The session is a
+ reference-counted CF object. To create a motion estimation session, call
+ VTMotionEstimationSessionCreate; then you can optionally configure the session using
+ VTSessionSetProperty; then to create motion estimations, call
+ VTMotionEstimationSessionCreateMotionEstimation. When you are done with the session,
+ you should call VTMotionEstimationSessionInvalidate to tear it down and CFRelease to
+ release your object reference.
+ */
+typedef struct CM_BRIDGED_TYPE(id) OpaqueVTMotionEstimationSession* VTMotionEstimationSessionRef CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @function VTMotionEstimationSessionGetTypeID
+ @abstract Get the CFTypeID for a VTMotionEstimationSession.
+ @discussion
+ Get the CFTypeID for a VTMotionEstimationSession.
+*/
+VT_EXPORT CFTypeID VTMotionEstimationSessionGetTypeID( void ) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @function VTMotionEstimationSessionCreate
+ @abstract Creates a session for creating CVPixelBuffer of motion vectors from two CVPixelBuffers.
+ @discussion
+ The function creates a session for transferring images between CVPixelBuffers.
+ @param allocator
+ An allocator for the session. Pass NULL to use the default allocator.
+ @param motionVectorProcessorSelectionOptions
+ Available creation Options:
+ kVTMotionEstimationSessionCreationOption_MotionVectorSize CFNumber 16 or 4.
+ The size of the block of pixels 16x16 or 4x4. Default is 16x16.
+ kVTMotionEstimationSessionCreationOption_UseMultiPassSearch can be supplied with kCFBooleanTrue to provide higher quality motion estimation.
+ True motion achieves higher quality by running the motion estimator in multiple passes. The default is kCFBooleanFalse.
+ kVTMotionEstimationSessionCreationOption_Label CFString
+ This option assigns a label for logging and resource tracking.
+ @param width
+ The width of frames, in pixels.
+ @param height
+ The height of frames in pixels.
+ @param motionEstimationSessionOut
+ Points to a variable to receive the new pixel transfer session.
+
+*/
+VT_EXPORT OSStatus
+VTMotionEstimationSessionCreate(
+ CM_NULLABLE CFAllocatorRef allocator,
+ CM_NULLABLE CFDictionaryRef motionVectorProcessorSelectionOptions,
+ uint32_t width,
+ uint32_t height,
+ CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE VTMotionEstimationSessionRef * CM_NONNULL motionEstimationSessionOut) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @function VTMotionEstimationSessionCopySourcePixelBufferAttributes
+ @abstract Copy the expected attributes for source pixel buffers
+ @discussion
+ The function provides a cf dictionary of attributes that must be released. This is
+ routine is for clients to query the VTMotionEstimationSession for the native source
+ attributes. If a client provides an input CVPixelBuffer that is not compatible with the
+ the attributes returned by this function, VTMotionEstimationSession will automatically
+ convert the input pixel buffer into a compatible pixel buffer for processing.
+ @param session
+ The motion estimation session.
+ @param attributesOut
+ Points to a variable to receive the attributes dictionary.
+
+*/
+VT_EXPORT OSStatus
+VTMotionEstimationSessionCopySourcePixelBufferAttributes(
+ CM_NONNULL VTMotionEstimationSessionRef motionEstimationSession,
+ CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE CFDictionaryRef * CM_NONNULL attributesOut) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @function VTMotionEstimationSessionInvalidate
+ @abstract Tears down a motion estimation session.
+ @discussion
+ When you are done with a motion estimation session you created, call VTMotionEstimationSessionInvalidate
+ to tear it down and then CFRelease to release your object reference. When a motion estimation session's
+ retain count reaches zero, it is automatically invalidated, but since sessions may be retained by multiple
+ parties, it can be hard to predict when this will happen. Calling VTMotionEstimationSessionInvalidate
+ ensures a deterministic, orderly teardown.
+*/
+VT_EXPORT void
+VTMotionEstimationSessionInvalidate(
+ CM_NONNULL VTMotionEstimationSessionRef session ) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @typedef VTMotionEstimationOutputHandler
+ @abstract Block invoked when frame processing is complete.
+ @discussion
+ When the client requests a motion estimation, the client passes in a callback block to be called
+ for that result of that request. If the VTMotionEstimationSessionCreateMotionEstimation call returns
+ an error, the block will not be called.
+ @param status
+ noErr if processing request was successful; an error code if motion estimation was not successful.
+ @param infoFlags
+ A bit field containing information about the processing operation.
+ @param additionalInfo
+ Additional processing information about the processing operation that can not fit in infoFlags.
+ Currently, this is expected to be NULL.
+ @param motionVectorPixelBuffer
+ A CVPixelBuffer containing the motion vector information, if processing request was successful;
+ otherwise, NULL.
+ */
+typedef void (^VTMotionEstimationOutputHandler)(
+ OSStatus status,
+ VTMotionEstimationInfoFlags infoFlags,
+ CM_NULLABLE CFDictionaryRef additionalInfo,
+ CM_NULLABLE CVPixelBufferRef motionVectors);
+
+/*!
+ @function VTMotionEstimationSessionEstimateMotionVectors
+ @abstract Given two CVPixelBuffers, creates a CVPixelBuffer representing the motion estimate.
+ @discussion
+ The motion estimation session will compare the reference frame to the current frame, and
+ generate motion vectors in the form of a CVPixelBuffer.
+ @param session
+ The motion estimation session.
+ @param referenceImage
+ The reference image.
+ @param currentImage
+ The current image.
+ @param motionEstimationFrameFlags
+ A bit field with per-frame options. See kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer.
+ @param additionalFrameOptions
+ A way to pass additional information that will not fit in motionEstimationFrameFlags; currently expected to be NULL.
+ @param outputHandler
+ The block to be called when the processing request is completed. If the
+ VTMotionEstimationSessionCreateMotionEstimation call returns an error, the block will not
+ be called.
+ @result
+ If the call was successful, noErr; otherwise an error code, such as kVTMotionEstimationNotSupportedErr.
+*/
+VT_EXPORT OSStatus
+VTMotionEstimationSessionEstimateMotionVectors(
+ CM_NONNULL VTMotionEstimationSessionRef session,
+ CM_NONNULL CVPixelBufferRef referenceImage,
+ CM_NONNULL CVPixelBufferRef currentImage,
+ VTMotionEstimationFrameFlags motionEstimationFrameFlags,
+ CM_NULLABLE CFDictionaryRef additionalFrameOptions,
+ CM_NONNULL VTMotionEstimationOutputHandler outputHandler ) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @function VTMotionEstimationSessionCompleteFrames
+ @abstract Directs the motion estimation session to emit all pending frames and waits for completion.
+ @discussion
+ Directs the motion estimation session to emit all pending frames, then waits for all outstanding
+ requests to complete, then returns.
+*/
+VT_EXPORT OSStatus
+VTMotionEstimationSessionCompleteFrames(
+ CM_NONNULL VTMotionEstimationSessionRef session) CF_REFINED_FOR_SWIFT
+ API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
+
+CF_IMPLICIT_BRIDGING_DISABLED
+
+// See VTSession.h for property access APIs on VTMotionEstimationSession.
+// See VTMotionEstimationSessionProperties.h for standard property keys and values for pixel transfer sessions.
+
+#endif // __BLOCKS__
+
+#pragma pack(pop)
+
+#if defined(__cplusplus)
+}
+#endif
+
+#endif /* VTMotionEstimationSession_h */
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h 1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h 2025-05-29 01:30:18
@@ -0,0 +1,98 @@
+/*
+ File: VTMotionEstimationSessionProperties.h
+
+ Framework: VideoToolbox
+
+ Copyright 2022 Apple Inc. All rights reserved.
+
+*/
+
+#ifndef _VT_MOTION_ESTIMATION_SESSION_PROPERTIES_H_
+#define _VT_MOTION_ESTIMATION_SESSION_PROPERTIES_H_
+
+#include <CoreMedia/CMBase.h>
+#include <CoreFoundation/CoreFoundation.h>
+
+#if defined(__cplusplus)
+extern "C"
+{
+#endif
+
+#pragma pack(push)
+#pragma pack()
+
+#pragma mark Creation Options
+
+/*!
+ @header
+ @abstract
+ Video Toolbox motion estimation session creation options
+
+ @discussion
+ These keys may be used in the motionVectorProcessorSelectionOptions parameter to
+ VTMotionEstimationSessionCreate to configure the created session.
+*/
+
+/*!
+ @constant kVTMotionEstimationSessionCreationOption_MotionVectorSize
+ @abstract
+ The size of the search blocks used in VTMotionEstimationSession.
+ @discussion
+ VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
+ kVTMotionEstimationSessionCreationOption_MotionVectorSize can be supplied with CFNumber to override the default search block size.
+ Currently supported motion vector size is 4 or 16, meaning 4x4 or 16x16 respectively. 16x16 is the default if this key is not provided.
+*/
+VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_MotionVectorSize API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber
+
+/*!
+ @constant kVTMotionEstimationSessionCreationOption_UseMultiPassSearch
+ @abstract
+ An option used for higher quality motion estimation
+ @discussion
+ VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
+ kVTMotionEstimationSessionCreationOption_UseMultiPassSearch can be supplied with kCFBooleanTrue to provide higher quality motion estimation.
+ True motion achieves higher quality by running the motion estimator in multiple passes. The default is kCFBooleanFalse.
+*/
+VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_UseMultiPassSearch API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFBoolean
+
+/*!
+ @constant kVTMotionEstimationSessionCreationOption_DetectTrueMotion
+ @abstract
+ Renamed to kVTMotionEstimationSessionCreationOption_UseMultiPassSearch
+*/
+VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_DetectTrueMotion API_UNAVAILABLE(macos, ios, tvos, visionos, watchos); // Read/write, CFBoolean DEPRECATED
+
+/*!
+ @constant kVTMotionEstimationSessionCreationOption_Label
+ @abstract
+ A label used for logging and resource tracking.
+ @discussion
+ VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
+ kVTMotionEstimationSessionCreationOption_Label can be supplied with CFString to specify a label used in logging and
+ resource tracking.
+*/
+VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_Label API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFString
+
+#pragma mark Properties
+
+/*!
+ @header
+ @abstract
+ Video Toolbox motion estimation session properties
+
+ @discussion
+ This file defines private properties used to configure motion estimation sessions after creation.
+
+ Clients can query supported properties by calling VTSessionCopySupportedPropertyDictionary,
+ and use VTSessionSetProperty and VTSessionCopyProperty.
+*/
+
+// there are none yet
+
+#pragma pack(pop)
+
+#if defined(__cplusplus)
+}
+#endif
+
+#endif // _VT_MOTION_ESTIMATION_SESSION_PROPERTIES_H_
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTPixelRotationSession.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTPixelRotationSession.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTPixelRotationSession.h 2025-04-19 05:05:37
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTPixelRotationSession.h 2025-05-29 01:30:18
@@ -56,7 +56,7 @@
@abstract Tears down a pixel rotation session.
@discussion
When you are done with an image rotation session you created, call VTPixelRotationSessionInvalidate
- to tear it down and then VTPixelRotationSessionRelease to release your object reference.
+ to tear it down and then CFRelease to release your object reference.
When an pixel rotation session's retain count reaches zero, it is automatically invalidated, but
since sessions may be retained by multiple parties, it can be hard to predict when this will happen.
Calling VTPixelRotationSessionInvalidate ensures a deterministic, orderly teardown.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingProperties.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingProperties.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingProperties.h 2025-04-19 02:06:26
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingProperties.h 2025-05-29 01:30:17
@@ -58,6 +58,14 @@
*/
VT_EXPORT const CFStringRef kVTRAWProcessingPropertyKey_OutputColorAttachments API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos); // Read Only, CFDictionaryRef
+/*!
+ @constant kVTRAWProcessingPropertyKey_MetadataForSidecarFile
+ @abstract
+ This property, if supported, returns the current processing metadata on the RAW Processor. The returned value can be used by the caller to create, or overwrite an existing sidecar file.
+ @discussion
+ This property is not supported by all RAWProcessors. The metadata returned represents a fully-formed sidecar file, and should be compatible with the MediaExtension FormatReader.
+*/
+VT_EXPORT const CFStringRef kVTRAWProcessingPropertyKey_MetadataForSidecarFile API_AVAILABLE(macos(26.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos); // Read Only, CFData
#pragma pack(pop)
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingSession.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingSession.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingSession.h 2025-04-19 02:06:27
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTRAWProcessingSession.h 2025-05-29 01:30:18
@@ -39,6 +39,7 @@
The session reference is a reference-counted CF object.
*/
typedef struct CM_BRIDGED_TYPE(id) OpaqueVTRAWProcessingSession* VTRAWProcessingSessionRef API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos) CM_SWIFT_NONSENDABLE;
+CM_SWIFT_INIT_FOR_CF_TYPE(VTRAWProcessingSession, API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos));
/*!
@typedef VTRAWProcessingParameterChangeHandler
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTUtilities.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTUtilities.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTUtilities.h 2025-04-19 03:05:37
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTUtilities.h 2025-05-29 01:30:17
@@ -67,7 +67,7 @@
The format description for the video format for which information is being requested.
@param mediaExtensionPropertiesOut
If a Media Extension video decoder will be used to decode the specified format, this pointer will return a dictionary with a set of properties describing the extension video decoder. The dictionary keys are VTExtensionPropertiesKey values.
- @result If the function succeeds and a Media Extension video decoder will be used to decode this format, the return value will be noErr. If the function succeeds but a Media Extension video decoder will not be used to decode this format, the return value will be kVTCouldNotFindExtensionErr. Otherwise, the return value will be an error code describing the failure.
+ @result If the function succeeds and a Media Extension video decoder will be used to decode this format, the return value will be noErr. If the function succeeds but a Media Extension video decoder will not be used to decode this format, the return value will be kVTCouldNotFindExtensionErr. If a Media Extension video decoder for the format was found but is disabled, the function will return kVTExtensionDisabledErr. Otherwise, the return value will be an error code describing the failure.
*/
VT_EXPORT OSStatus VTCopyVideoDecoderExtensionProperties( CMFormatDescriptionRef CM_NONNULL formatDesc, CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE CFDictionaryRef * CM_NONNULL mediaExtensionPropertiesOut ) CF_REFINED_FOR_SWIFT API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos);
@@ -79,7 +79,7 @@
The format description for the video format for which information is being requested.
@param mediaExtensionPropertiesOut
If a Media Extension RAW processor will be used to process the specified format, this pointer will return a dictionary with a set of properties describing the extension RAW processor. The dictionary keys VTExtensionPropertiesKey values.
- @result If the function succeeds and a Media Extension RAW processor will be used to process this format, the return value will be noErr. If the function succeeds but a Media Extension RAW processor will not be used to process this format, the return value will be kVTCouldNotFindExtensionErr. Otherwise, the return value will be an error code describing the failure.
+ @result If the function succeeds and a Media Extension RAW processor will be used to process this format, the return value will be noErr. If the function succeeds but a Media Extension RAW processor will not be used to process this format, the return value will be kVTCouldNotFindExtensionErr. If a Media Extension RAW processor for the format was found but is disabled, the function will return kVTExtensionDisabledErr. Otherwise, the return value will be an error code describing the failure.
*/
VT_EXPORT OSStatus VTCopyRAWProcessorExtensionProperties( CMFormatDescriptionRef CM_NONNULL formatDesc, CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE CFDictionaryRef * CM_NONNULL mediaExtensionPropertiesOut ) CF_REFINED_FOR_SWIFT API_AVAILABLE(macos(15.0)) API_UNAVAILABLE(ios, tvos, watchos, visionos);
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.h 2025-04-19 02:06:25
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.h 2025-05-29 01:30:16
@@ -21,10 +21,19 @@
#include <VideoToolbox/VTRAWProcessingSession.h>
#include <VideoToolbox/VTRAWProcessingProperties.h>
#include <VideoToolbox/VTProfessionalVideoWorkflow.h>
-#ifdef __OBJC__
-#include <VideoToolbox/VTFrameProcessor.h>
-#endif // __OBJC__
#endif // ( !TARGET_OS_IPHONE || TARGET_OS_MACCATALYST )
-#if ( TARGET_OS_OSX || TARGET_OS_IOS || TARGET_OS_TV || TARGET_OS_VISION )
#include <VideoToolbox/VTHDRPerFrameMetadataGenerationSession.h>
-#endif // ( TARGET_OS_OSX || TARGET_OS_IOS || TARGET_OS_TVOS || TARGET_OS_XR )
+#include <VideoToolbox/VTMotionEstimationSession.h>
+#include <VideoToolbox/VTMotionEstimationSessionProperties.h>
+#include <VideoToolbox/VTFrameProcessor.h>
+#include <VideoToolbox/VTFrameProcessorConfiguration.h>
+#include <VideoToolbox/VTFrameProcessorErrors.h>
+#include <VideoToolbox/VTFrameProcessorFrame.h>
+#include <VideoToolbox/VTFrameProcessorParameters.h>
+#include <VideoToolbox/VTFrameProcessor_FrameRateConversion.h>
+#include <VideoToolbox/VTFrameProcessor_MotionBlur.h>
+#include <VideoToolbox/VTFrameProcessor_OpticalFlow.h>
+#include <VideoToolbox/VTFrameProcessor_TemporalNoiseFilter.h>
+#include <VideoToolbox/VTFrameProcessor_SuperResolutionScaler.h>
+#include <VideoToolbox/VTFrameProcessor_LowLatencySuperResolutionScaler.h>
+#include <VideoToolbox/VTFrameProcessor_LowLatencyFrameInterpolation.h>