Skip to content

ARKit iOS xcode26.0 b1

Alex Soto edited this page Jun 9, 2025 · 1 revision

#ARKit.framework

diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAnchor.h	2025-05-23 10:44:14
@@ -11,8 +11,6 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-#define AR_ANCHOR_PROTOCOLS ARAnchorCopying, NSSecureCoding
-
 @class ARAnchor;
 /**
  An anchor object that can be copied from values of an existing anchor.
@@ -45,13 +43,14 @@
 
 @end
 
-
 /**
  Object representing a physical location and orientation in 3D space.
  */
 API_AVAILABLE(ios(11.0))
 NS_SWIFT_SENDABLE
-@interface ARAnchor : NSObject <AR_ANCHOR_PROTOCOLS>
+@interface ARAnchor : NSObject <ARAnchorCopying,
+                                NSSecureCoding
+                                >
 
 /**
  Unique identifier of the anchor.
@@ -65,7 +64,7 @@
 
 /**
  Identifier of the session that owns the anchor.
- 
+
  @discussion The session identifier will be assigned to anchor when added to the session.
  */
 @property (nonatomic, nullable, readonly) NSUUID *sessionIdentifier API_AVAILABLE(ios(13.0));
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAppClipCodeAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAppClipCodeAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAppClipCodeAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARAppClipCodeAnchor.h	2025-05-23 10:44:14
@@ -8,8 +8,6 @@
 
 #import <ARKit/ARAnchor.h>
 
-#define AR_APPCLIPCODE_ANCHOR_PROTOCOLS <ARTrackable>
-
 NS_ASSUME_NONNULL_BEGIN
 
 /**
@@ -19,10 +17,10 @@
 typedef NS_ENUM(NSInteger, ARAppClipCodeURLDecodingState) {
     /** App clip code tracking is attempting to decode the URL. */
     ARAppClipCodeURLDecodingStateDecoding,
-	
+
     /** App clip code tracking failed to decode the URL. */
     ARAppClipCodeURLDecodingStateFailed,
-    
+
     /** App clip code tracking decoded the URL. */
     ARAppClipCodeURLDecodingStateDecoded
 } NS_SWIFT_NAME(ARAppClipCodeAnchor.URLDecodingState);
@@ -32,7 +30,7 @@
  */
 API_AVAILABLE(ios(14.3))
 NS_SWIFT_SENDABLE
-@interface ARAppClipCodeAnchor : ARAnchor AR_APPCLIPCODE_ANCHOR_PROTOCOLS
+@interface ARAppClipCodeAnchor : ARAnchor <ARTrackable>
 
 
 /**
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARBodyAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARBodyAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARBodyAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARBodyAnchor.h	2025-05-23 10:44:14
@@ -15,15 +15,12 @@
 @class ARSkeleton3D;
 
 
-#define AR_BODY_ANCHOR_PROTOCOLS <ARTrackable>
-
-
 /**
  An anchor representing a body in the world.
  */
 API_AVAILABLE(ios(13.0))
 NS_SWIFT_SENDABLE
-@interface ARBodyAnchor : ARAnchor AR_BODY_ANCHOR_PROTOCOLS
+@interface ARBodyAnchor : ARAnchor <ARTrackable>
 
 /**
  The tracked skeleton in 3D.
@@ -34,7 +31,7 @@
 /**
  The factor between estimated physical size and default size of the skeleton.
  @see -[ARSkeletonDefinition neutralBodySkeleton3D]
- 
+
  @discussion This value will be estimated if automaticSkeletonScaleEstimationEnabled is set to true on the ARBodyTrackingConfiguration.
  It is used to correct the transform's translation. Default value is 1.0.
  */
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCamera.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCamera.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCamera.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCamera.h	2025-05-23 10:44:14
@@ -6,11 +6,12 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
+#import <TargetConditionals.h>
+
+#import <ARKit/ARTrackingStatusTypes.h>
 #import <CoreGraphics/CoreGraphics.h>
-#import <UIKit/UIKit.h>
+#import <Foundation/Foundation.h>
 #import <simd/simd.h>
-#import <ARKit/ARTrackingStatusTypes.h>
 
 NS_ASSUME_NONNULL_BEGIN
 
@@ -28,7 +29,7 @@
 
 /**
  The camera’s orientation defined as Euler angles.
- 
+
  @dicussion The order of components in this vector matches the axes of rotation:
                1. Pitch (the x component) is the rotation about the node’s x-axis (in radians)
                2. Yaw   (the y component) is the rotation about the node’s y-axis (in radians)
@@ -83,9 +84,12 @@
 */
 @property (nonatomic, readonly) simd_float4x4 projectionMatrix;
 
+
+typedef NS_ENUM(NSInteger, UIInterfaceOrientation);
+
 /**
  Creates a projection matrix for the camera given rendering parameters.
- 
+
  @discussion The projection matrix returned provides an aspect fill for the provided viewport size and orientation.
  If zFar is set to 0, an infinite projection matrix will be returned.
  @param orientation Viewport orientation.
@@ -93,11 +97,14 @@
  @param zNear Near depth limit.
  @param zFar Far depth limit.
  */
-- (simd_float4x4)projectionMatrixForOrientation:(UIInterfaceOrientation)orientation viewportSize:(CGSize)viewportSize zNear:(CGFloat)zNear zFar:(CGFloat)zFar;
+- (simd_float4x4)projectionMatrixForOrientation:(UIInterfaceOrientation)orientation
+                                   viewportSize:(CGSize)viewportSize
+                                          zNear:(CGFloat)zNear
+                                           zFar:(CGFloat)zFar;
 
 /**
  Project a 3D point in world coordinate system into 2D viewport space.
- 
+
  @param point 3D point in world coordinate system.
  @param orientation Viewport orientation.
  @param viewportSize Viewport (or image) size.
@@ -107,7 +114,7 @@
 
 /**
  Unproject a 2D point from the viewport onto a plane in 3D world coordinates.
- 
+
  @discussion A 2D point in the viewport coordinate space can refer to any point along a line segment
  in the 3D coordinate space. Unprojecting calculates the 3D position of the point along this line segment that intersects the provided plane.
  @param point A point in the viewport coordinate system with origin at top-left.
@@ -115,16 +122,19 @@
  The coordinate system’s positive Y axis is assumed to be the normal of the plane.
  @return 3D position in world coordinates or a NAN values if unprojection is not possible.
  */
-- (simd_float3)unprojectPoint:(CGPoint)point ontoPlaneWithTransform:(simd_float4x4)planeTransform orientation:(UIInterfaceOrientation)orientation viewportSize:(CGSize)viewportSize
-API_AVAILABLE(ios(12.0)) NS_REFINED_FOR_SWIFT;
+- (simd_float3)unprojectPoint:(CGPoint)point
+       ontoPlaneWithTransform:(simd_float4x4)planeTransform
+                  orientation:(UIInterfaceOrientation)orientation
+                 viewportSize:(CGSize)viewportSize API_AVAILABLE(ios(12.0))NS_REFINED_FOR_SWIFT;
 
 /**
  Returns the view matrix for the camera with a given interface orientation.
- 
+
  @discussion The view matrix can be used to transform geometry from world space into camera space for a given orientation.
  @param orientation The interface orientation that will be used to render the camera’s view.
  */
 - (simd_float4x4)viewMatrixForOrientation:(UIInterfaceOrientation)orientation;
+
 
 /** Unavailable */
 - (instancetype)init NS_UNAVAILABLE;
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCoachingOverlayView.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCoachingOverlayView.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCoachingOverlayView.h	2025-04-19 10:18:10
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARCoachingOverlayView.h	2025-05-25 02:37:41
@@ -132,7 +132,6 @@
 @end
 
 NS_ASSUME_NONNULL_END
-
 #else
 #import <ARKitUI/ARCoachingOverlayView.h> 
 #endif // #if (defined(USE_ARKIT_PUBLIC_HEADERS) \&\& USE_ARKIT_PUBLIC_HEADERS) || !__has_include(<ARKitUI/ARCoachingOverlayView.h>)
\ No newline at end of file
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARConfiguration.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARConfiguration.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARConfiguration.h	2025-04-19 05:18:13
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARConfiguration.h	2025-05-23 10:44:14
@@ -6,9 +6,9 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
 #import <AVFoundation/AVFoundation.h>
 #import <CoreLocation/CoreLocation.h>
+#import <Foundation/Foundation.h>
 
 #import <ARKit/ARPlaneDetectionTypes.h>
 
@@ -19,14 +19,13 @@
 @class ARVideoFormat;
 @class ARWorldMap;
 
-
 /**
 Option set indicating semantic understanding types of the image frame.
 */
 API_AVAILABLE(ios(13.0))
 typedef NS_OPTIONS(NSUInteger, ARFrameSemantics) {
     /** No semantic operation is run. */
-    ARFrameSemanticNone                                = 0,
+    ARFrameSemanticNone = 0,
 
     /**
      Person segmentation.
@@ -34,8 +33,8 @@
      @see -[ARFrame segmentationBuffer]
      @see ARSegmentationClass
     */
-    ARFrameSemanticPersonSegmentation                  = (1 << 0),
-    
+    ARFrameSemanticPersonSegmentation = (1 << 0),
+
     /**
      Person segmentation with depth.
      @discussion A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
@@ -43,30 +42,30 @@
      @see -[ARFrame estimatedDepthData]
      @see -[ARFrame segmentationBuffer]
      */
-    ARFrameSemanticPersonSegmentationWithDepth         = (1 << 1) | (1 << 0),
-    
+    ARFrameSemanticPersonSegmentationWithDepth = (1 << 1) | (1 << 0),
+
     /**
      Body detection.
      @discussion Once activated an ARFrame will contain information about a detected body.
      @see -[ARFrame detectedBody]
      @see ARBody2D
      */
-    ARFrameSemanticBodyDetection                       = (1 << 2),
-    
+    ARFrameSemanticBodyDetection = (1 << 2),
+
     /**
      Scene Depth.
      @discussion Each capturedImage will have an associated scene depth data.
      @see - [ARFrame sceneDepth]
     */
     ARFrameSemanticSceneDepth API_AVAILABLE(ios(14.0)) = (1 << 3),
-    
+
     /**
      Smoothed Scene Depth.
      @discussion Each capturedImage will have an associated scene depth data that is temporally smoothed.
      @see - [ARFrame smoothedSceneDepth]
     */
     ARFrameSemanticSmoothedSceneDepth API_AVAILABLE(ios(14.0)) = (1 << 4),
-    
+
 } NS_SWIFT_NAME(ARConfiguration.FrameSemantics);
 
 /**
@@ -76,16 +75,15 @@
 typedef NS_ENUM(NSInteger, ARWorldAlignment) {
     /** Aligns the world with gravity that is defined by vector (0, -1, 0). */
     ARWorldAlignmentGravity,
-    
+
     /** Aligns the world with gravity that is defined by the vector (0, -1, 0)
      and heading (w.r.t. True North) that is given by the vector (0, 0, -1). */
     ARWorldAlignmentGravityAndHeading,
-    
+
     /** Aligns the world with the camera’s orientation. */
     ARWorldAlignmentCamera
 } NS_SWIFT_NAME(ARConfiguration.WorldAlignment);
 
-
 /**
  Enum constants for indicating the mode of environment texturing to run.
  */
@@ -93,32 +91,30 @@
 typedef NS_ENUM(NSInteger, AREnvironmentTexturing) {
     /** No texture information is gathered. */
     AREnvironmentTexturingNone,
-    
+
     /** Texture information is gathered for the environment.
      Environment textures will be generated for AREnvironmentProbes added to the session. */
     AREnvironmentTexturingManual,
-    
+
     /** Texture information is gathered for the environment and probes automatically placed in the scene. */
     AREnvironmentTexturingAutomatic
 } NS_SWIFT_NAME(ARWorldTrackingConfiguration.EnvironmentTexturing);
 
-
 /**
  Types of scene reconstruction.
  */
 API_AVAILABLE(ios(13.4))
 typedef NS_OPTIONS(NSUInteger, ARSceneReconstruction) {
     /** No scene reconstruction is run. */
-    ARSceneReconstructionNone                   = 0,
-    
+    ARSceneReconstructionNone = 0,
+
     /** Scene reconstruction generates a mesh of the world */
-    ARSceneReconstructionMesh                   = (1 << 0),
-    
+    ARSceneReconstructionMesh = (1 << 0),
+
     /** Scene reconstruction generates a mesh of the world with classification for each face. */
     ARSceneReconstructionMeshWithClassification = (1 << 1) | (1 << 0)
 } NS_SWIFT_NAME(ARConfiguration.SceneReconstruction);
 
-
 /**
  An object to describe and configure the Augmented Reality techniques to be used in an ARSession.
  */
@@ -162,10 +158,9 @@
 /**
  The type of semantic understanding to provide with each frame.
 
- @discussion Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the set of frame semantics. For example, when running a session with
- a configuration of type ARWorldTrackingConfiguration one would need to use `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
- An exception is thrown if the option
- is not supported. Defaults to ARFrameSemanticNone.
+ @discussion Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the set of frame semantics.
+ For example, when running a session with a configuration of type ARWorldTrackingConfiguration one would need to use `+[ ARWorldTrackingConfiguration
+ supportsFrameSemantics:]` to perform said check. An exception is thrown if the option is not supported. Defaults to ARFrameSemanticNone.
  @see ARFrameSemantics
  @see +[ARConfiguration supportsFrameSemantics:]
 */
@@ -174,20 +169,20 @@
 /**
  Determines whether the type of frame semantics is supported by the device and ARConfiguration class.
 
- @discussion Semantic frame understanding is not supported on all devices. Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the
- set of frame semantics. For example, when running a session with a configuration of type ARWorldTrackingConfiguration one would need to use
+ @discussion Semantic frame understanding is not supported on all devices. Use the `supportsFrameSemantics` class method to check if the configuration
+ type you intend to run supports the set of frame semantics. For example, when running a session with a configuration of type
+ ARWorldTrackingConfiguration one would need to use
  `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
  @see ARFrameSemantics
 */
 + (BOOL)supportsFrameSemantics:(ARFrameSemantics)frameSemantics API_AVAILABLE(ios(13.0));
 
-
 /**
  Returns a pointer to the capture device of the camera that's used for rendering, so developers can adjust capture settings.
  @discussion May return nil if it is not recommended to modify capture settings, for example if the primary camera is used for tracking.
  */
 @property (class, nonatomic, nullable, readonly) AVCaptureDevice *configurableCaptureDeviceForPrimaryCamera
-API_AVAILABLE(ios(16.0));
+        API_AVAILABLE(ios(16.0));
 
 /**
  Returns a video format using a 4K resolution from the list of supported video formats.
@@ -196,8 +191,10 @@
 @property (class, nonatomic, nullable, readonly) ARVideoFormat *recommendedVideoFormatFor4KResolution API_AVAILABLE(ios(16.0));
 
 /**
- Returns a recommended video format that supports capturing high resolution frames with a significantly higher resolution than the streaming camera resolution.
- @discussion Using this format may consume more power. Other video formats may support capturing high resolution frames as well, albeit at a lower quality or resolution.
+ Returns a recommended video format that supports capturing high resolution frames with a significantly higher resolution than the streaming camera
+ resolution.
+ @discussion Using this format may consume more power. Other video formats may support capturing high resolution frames as well, albeit at a lower
+ quality or resolution.
  @see [ARSession captureHighResolutionFrameWithCompletion:]
  */
 @property (class, nonatomic, nullable, readonly) ARVideoFormat *recommendedVideoFormatForHighResolutionFrameCapturing API_AVAILABLE(ios(16.0));
@@ -213,10 +210,9 @@
 
 @end
 
-
 /**
  A configuration for running world tracking.
- 
+
  @discussion World tracking provides 6 degrees of freedom tracking of the device.
  By finding feature points in the scene, world tracking enables performing hit-tests against the frame.
  Tracking can no longer be resumed once the session is paused.
@@ -283,13 +279,14 @@
 
 /**
  Objects to detect in the scene.
- @discussion If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
+ @discussion If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the
+ session.
  */
 @property (nonatomic, copy) NSSet<ARReferenceObject *> *detectionObjects API_AVAILABLE(ios(12.0));
 
 /**
  Enable/disable a collaborative session. Disabled by default.
- 
+
  @discussion When enabled, ARSession will output collaboration data for other participants using its delegate didOutputCollaborationData.
  It is the responsibility of the caller to send the data to each participant. When data is received by a participant, it
  should be passed to the ARSession by calling updateWithCollaborationData.
@@ -305,7 +302,7 @@
  Enable or disable running Face Tracking using the front facing camera. Disabled by default.
  When enabled, ARSession detects faces (if visible in the front-facing camera image) and adds to its list of anchors,
  an ARFaceAnchor object representing each face.
- 
+
  @discussion The transform of the ARFaceAnchor objects will be in the world coordinate space.
  @see ARFaceAnchor
  */
@@ -341,7 +338,7 @@
 
 /**
  A configuration for running orientation tracking.
- 
+
  @discussion Orientation tracking provides 3 degrees of freedom tracking of the device.
  */
 API_AVAILABLE(ios(11.0))
@@ -358,10 +355,9 @@
 
 @end
 
-
 /**
  A configuration for running face tracking.
- 
+
  @discussion Face tracking uses the front facing camera to track the face in 3D providing details on the topology and expression of the face.
  A detected face will be added to the session as an ARFaceAnchor object which contains information about head pose, mesh, eye pose, and blend shape
  coefficients. If light estimation is enabled the detected face will be treated as a light probe and used to estimate the direction of incoming light.
@@ -389,8 +385,9 @@
 
 /**
  Enable or disable World Tracking. Disabled by default.
- 
- @discussion When enabled, ARSession uses the back facing camera to track the device's orientation and position in the world. The camera transform and the ARFaceAnchor transform will be in the world coordinate space.
+
+ @discussion When enabled, ARSession uses the back facing camera to track the device's orientation and position in the world. The camera transform and
+ the ARFaceAnchor transform will be in the world coordinate space.
  */
 @property (nonatomic, assign, getter=isWorldTrackingEnabled) BOOL worldTrackingEnabled API_AVAILABLE(ios(13.0));
 
@@ -399,10 +396,9 @@
 
 @end
 
-
 /**
  A configuration for running image tracking.
- 
+
  @discussion Image tracking provides 6 degrees of freedom tracking of known images. Four images may be tracked simultaneously.
  */
 API_AVAILABLE(ios(12.0))
@@ -432,10 +428,9 @@
 
 @end
 
-
 /**
  A configuration for scanning objects.
- 
+
  @discussion The object scanning configuration runs world tracking, capturing additional detail in order to create reference objects.
  Running object scanning will consume additional power in order to provide more detailed features.
  The createReferenceObject method can be called on the session to capture a scan of an object in the world.
@@ -461,10 +456,9 @@
 
 @end
 
-
 /**
  A configuration for running body tracking.
- 
+
  @discussion Body tracking provides 6 degrees of freedom tracking of a detected body in the scene. By default, ARFrameSemanticBodyDetection will be
  enabled.
  @see ARBodyAnchor
@@ -554,8 +548,9 @@
 
 /**
  A configuration for running positional tracking.
- 
- @discussion Positional tracking provides 6 degrees of freedom tracking of the device by running the camera at lowest possible resolution and frame rate.
+
+ @discussion Positional tracking provides 6 degrees of freedom tracking of the device by running the camera at lowest possible resolution and frame
+ rate.
  */
 API_AVAILABLE(ios(13.0))
 @interface ARPositionalTrackingConfiguration : ARConfiguration
@@ -581,7 +576,6 @@
 
 @end
 
-
 /**
  A configuration for running geographical world tracking.
 
@@ -591,7 +585,7 @@
 @interface ARGeoTrackingConfiguration : ARConfiguration
 
 /** Unavailable */
-@property(nonatomic, assign) ARWorldAlignment worldAlignment NS_UNAVAILABLE;
+@property (nonatomic, assign) ARWorldAlignment worldAlignment NS_UNAVAILABLE;
 
 /**
  The mode of environment texturing to run.
@@ -637,7 +631,8 @@
 
 /**
  Objects to detect in the scene.
- @discussion If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
+ @discussion If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the
+ session.
  */
 @property (nonatomic, copy) NSSet<ARReferenceObject *> *detectionObjects;
 
@@ -656,27 +651,27 @@
 
  @discussion This method will attempt to acquire a location fix on a background thread, then check availability.
 
- @param completionHandler Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
-        isAvailable - True if geo tracking is available at the current location, otherwise false.
-        error - An error that indicates why geo tracking is not available at the current location.
+ @param completionHandler Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial
+ queue. It takes the following parameters: isAvailable - True if geo tracking is available at the current location, otherwise false. error - An error
+ that indicates why geo tracking is not available at the current location.
  */
-+ (void)checkAvailabilityWithCompletionHandler:(void (^)(BOOL isAvailable, NSError * _Nullable error))completionHandler NS_SWIFT_DISABLE_ASYNC;
++ (void)checkAvailabilityWithCompletionHandler:(void (^)(BOOL isAvailable, NSError *_Nullable error))completionHandler NS_SWIFT_DISABLE_ASYNC;
 
 /**
 Determines the availability of geo tracking at the given location.
 
 @param coordinate Location at which to check.
-@param completionHandler Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
-       isAvailable - True if geo tracking is available at the given location, otherwise false.
-       error - An error that indicates why geo tracking is not available at the given location.
+@param completionHandler Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial
+queue. It takes the following parameters: isAvailable - True if geo tracking is available at the given location, otherwise false. error - An error
+that indicates why geo tracking is not available at the given location.
 */
-+ (void)checkAvailabilityAtCoordinate:(CLLocationCoordinate2D)coordinate completionHandler:(void (^)(BOOL isAvailable, NSError * _Nullable error))completionHandler NS_SWIFT_DISABLE_ASYNC;
++ (void)checkAvailabilityAtCoordinate:(CLLocationCoordinate2D)coordinate
+                    completionHandler:(void (^)(BOOL isAvailable, NSError *_Nullable error))completionHandler NS_SWIFT_DISABLE_ASYNC;
 
 - (instancetype)init;
 + (instancetype)new NS_SWIFT_UNAVAILABLE("Use init() instead");
 
 @end
-
 
 NS_ASSUME_NONNULL_END
 #else
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARError.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARError.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARError.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARError.h	2025-05-23 10:44:14
@@ -14,68 +14,68 @@
 FOUNDATION_EXTERN NSString *const ARErrorDomain;
 
 API_AVAILABLE(ios(11.0))
-typedef NS_ERROR_ENUM(ARErrorDomain, ARErrorCode) {
+typedef NS_ERROR_ENUM(ARErrorDomain, ARErrorCode){
     /** Unsupported configuration. */
-    ARErrorCodeUnsupportedConfiguration                               = 100,
-    
+    ARErrorCodeUnsupportedConfiguration = 100,
+
     /** A sensor required to run the session is not available. */
-    ARErrorCodeSensorUnavailable                                      = 101,
-    
+    ARErrorCodeSensorUnavailable = 101,
+
     /** A sensor failed to provide the required input. */
-    ARErrorCodeSensorFailed                                           = 102,
-    
+    ARErrorCodeSensorFailed = 102,
+
     /** App does not have permission to use the camera. The user may change this in settings. */
-    ARErrorCodeCameraUnauthorized                                     = 103,
-    
+    ARErrorCodeCameraUnauthorized = 103,
+
     /** App does not have permission to use the microphone. The user may change this in settings. */
-    ARErrorCodeMicrophoneUnauthorized                                 = 104,
-    
+    ARErrorCodeMicrophoneUnauthorized = 104,
+
     /** App does not have permission to use the location data of the device. The user may change this in settings. */
-    ARErrorCodeLocationUnauthorized          API_AVAILABLE(ios(14.0)) = 105,
-    
+    ARErrorCodeLocationUnauthorized API_AVAILABLE(ios(14.0)) = 105,
+
     /** A high-resolution frame is requested while another one is being captured. */
     ARErrorCodeHighResolutionFrameCaptureInProgress API_AVAILABLE(ios(16.0)) = 106,
-    
+
     /** High-resolution frame capture failed. */
     ARErrorCodeHighResolutionFrameCaptureFailed API_AVAILABLE(ios(16.0)) = 107,
-    
+
     /** World tracking has encountered a fatal error. */
-    ARErrorCodeWorldTrackingFailed                                    = 200,
+    ARErrorCodeWorldTrackingFailed = 200,
 
     /** Geo tracking is not available at this location. */
-    ARErrorCodeGeoTrackingNotAvailableAtLocation  API_AVAILABLE(ios(14.0)) = 201,
+    ARErrorCodeGeoTrackingNotAvailableAtLocation API_AVAILABLE(ios(14.0)) = 201,
 
     /** Geo tracking has encountered a runtime error. */
-    ARErrorCodeGeoTrackingFailed             API_AVAILABLE(ios(14.0)) = 202,
+    ARErrorCodeGeoTrackingFailed API_AVAILABLE(ios(14.0)) = 202,
 
     /** Invalid reference image */
-    ARErrorCodeInvalidReferenceImage         API_AVAILABLE(ios(11.3)) = 300,
+    ARErrorCodeInvalidReferenceImage API_AVAILABLE(ios(11.3)) = 300,
 
     /** Invalid reference object. */
-    ARErrorCodeInvalidReferenceObject        API_AVAILABLE(ios(12.0)) = 301,
-    
+    ARErrorCodeInvalidReferenceObject API_AVAILABLE(ios(12.0)) = 301,
+
     /** Invalid world map. */
-    ARErrorCodeInvalidWorldMap               API_AVAILABLE(ios(12.0)) = 302,
-    
+    ARErrorCodeInvalidWorldMap API_AVAILABLE(ios(12.0)) = 302,
+
     /** Invalid configuration. */
-    ARErrorCodeInvalidConfiguration          API_AVAILABLE(ios(12.0)) = 303,
+    ARErrorCodeInvalidConfiguration API_AVAILABLE(ios(12.0)) = 303,
 
     /** Invalid collaboration data.*/
-    ARErrorCodeInvalidCollaborationData      API_AVAILABLE(ios(13.2)) = 304,
-    
+    ARErrorCodeInvalidCollaborationData API_AVAILABLE(ios(13.2)) = 304,
+
     /** Insufficient features. */
-    ARErrorCodeInsufficientFeatures          API_AVAILABLE(ios(12.0)) = 400,
-    
+    ARErrorCodeInsufficientFeatures API_AVAILABLE(ios(12.0)) = 400,
+
     /** Object merge failed. */
-    ARErrorCodeObjectMergeFailed             API_AVAILABLE(ios(12.0)) = 401,
-    
+    ARErrorCodeObjectMergeFailed API_AVAILABLE(ios(12.0)) = 401,
+
     /** Unable to read or write to file. */
-    ARErrorCodeFileIOFailed                  API_AVAILABLE(ios(12.0)) = 500,
+    ARErrorCodeFileIOFailed API_AVAILABLE(ios(12.0)) = 500,
 
     /** Generic request failure. */
-    ARErrorCodeRequestFailed                 API_AVAILABLE(ios(14.0)) = 501,
+    ARErrorCodeRequestFailed API_AVAILABLE(ios(14.0)) = 501,
 
-    
+
 };
 
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFaceAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFaceAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFaceAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFaceAnchor.h	2025-05-23 10:44:14
@@ -9,70 +9,69 @@
 #import <ARKit/ARAnchor.h>
 #import <CoreGraphics/CoreGraphics.h>
 
-#define AR_FACE_ANCHOR_PROTOCOLS <ARTrackable>
-
 NS_ASSUME_NONNULL_BEGIN
 
 /**
  Blend shape locations of a face geometry.
  @discussion Each location defines an area of the face that can be displaced with a provided coefficient.
- @note Blend shapes are computed based on the captured image. For example, if the detected person has a closed right eye, the eye on the left side of the captured image will appear closed and reported as
  blend shape activation  by ARKit.
+ @note Blend shapes are computed based on the captured image. For example, if the detected person has a closed right eye, the eye on the left side of
+ the captured image will appear closed and reported as blend shape activation  by ARKit.
  @see -[ARFrame capturedImage]
  */
 typedef NSString *ARBlendShapeLocation NS_STRING_ENUM NS_SWIFT_NAME(ARFaceAnchor.BlendShapeLocation) API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowDownLeft           API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowDownRight          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowInnerUp            API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowOuterUpLeft        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowOuterUpRight       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekPuff              API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekSquintLeft        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekSquintRight       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeBlinkLeft           API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeBlinkRight          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookDownLeft        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookDownRight       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookInLeft          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookInRight         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookOutLeft         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookOutRight        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookUpLeft          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookUpRight         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeSquintLeft          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeSquintRight         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeWideLeft            API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeWideRight           API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawForward             API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawLeft                API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawOpen                API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawRight               API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthClose             API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthDimpleLeft        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthDimpleRight       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFrownLeft         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFrownRight        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFunnel            API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLeft              API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLowerDownLeft     API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLowerDownRight    API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPressLeft         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPressRight        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPucker            API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRight             API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRollLower         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRollUpper         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthShrugLower        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthShrugUpper        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthSmileLeft         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthSmileRight        API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthStretchLeft       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthStretchRight      API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthUpperUpLeft       API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthUpperUpRight      API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationNoseSneerLeft          API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationNoseSneerRight         API_AVAILABLE(ios(11.0));
-FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationTongueOut              API_AVAILABLE(ios(12.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowDownLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowDownRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowInnerUp API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowOuterUpLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationBrowOuterUpRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekPuff API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekSquintLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationCheekSquintRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeBlinkLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeBlinkRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookDownLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookDownRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookInLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookInRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookOutLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookOutRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookUpLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeLookUpRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeSquintLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeSquintRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeWideLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationEyeWideRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawForward API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawOpen API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationJawRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthClose API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthDimpleLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthDimpleRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFrownLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFrownRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthFunnel API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLowerDownLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthLowerDownRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPressLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPressRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthPucker API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRollLower API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthRollUpper API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthShrugLower API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthShrugUpper API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthSmileLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthSmileRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthStretchLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthStretchRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthUpperUpLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationMouthUpperUpRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationNoseSneerLeft API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationNoseSneerRight API_AVAILABLE(ios(11.0));
+FOUNDATION_EXTERN ARBlendShapeLocation const ARBlendShapeLocationTongueOut API_AVAILABLE(ios(12.0));
 
 @class ARFaceGeometry;
 
@@ -81,7 +80,7 @@
  */
 API_AVAILABLE(ios(11.0))
 NS_SWIFT_SENDABLE
-@interface ARFaceAnchor : ARAnchor AR_FACE_ANCHOR_PROTOCOLS
+@interface ARFaceAnchor : ARAnchor <ARTrackable>
 
 /**
  The face geometry updated based on the computed blend shapes.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFrame.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFrame.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFrame.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARFrame.h	2025-05-23 10:44:14
@@ -6,15 +6,14 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
 #import <CoreVideo/CoreVideo.h>
-#import <UIKit/UIKit.h>
+#import <Foundation/Foundation.h>
 #import <simd/simd.h>
+
+#import <ARKit/ARGeoTrackingTypes.h>
 #import <ARKit/ARHitTestResult.h>
 #import <ARKit/ARRaycastQuery.h>
 
-#import <ARKit/ARGeoTrackingTypes.h>
-
 @class ARAnchor;
 @class ARCamera;
 @class ARLightEstimate;
@@ -36,12 +35,11 @@
 */
 API_AVAILABLE(ios(13.0))
 typedef NS_ENUM(uint8_t, ARSegmentationClass) {
+    /* Pixel has been not been classified. */
+    ARSegmentationClassNone = 0,
 
-     /* Pixel has been not been classified. */
-    ARSegmentationClassNone      = 0,
-
     /* Pixel has been classified as person. */
-    ARSegmentationClassPerson    = 255
+    ARSegmentationClassPerson = 255
 
 } NS_SWIFT_NAME(ARFrame.SegmentationClass);
 
@@ -52,15 +50,15 @@
 typedef NS_ENUM(NSInteger, ARWorldMappingStatus) {
     /** World mapping is not available. */
     ARWorldMappingStatusNotAvailable,
-    
+
     /** World mapping is available but has limited features.
      For the device's current position, the session’s world map is not recommended for relocalization. */
     ARWorldMappingStatusLimited,
-    
+
     /** World mapping is actively extending the map with the user's motion.
      The world map will be relocalizable for previously visited areas but is still being updated for the current space. */
     ARWorldMappingStatusExtending,
-    
+
     /** World mapping has adequately mapped the visible area.
      The map can be used to relocalize for the device's current position. */
     ARWorldMappingStatusMapped
@@ -92,7 +90,7 @@
 /**
  A tileable texture that contains image noise matching the current camera streams
  noise properties.
- 
+
  @discussion A camera stream depicts image noise that gives the captured image
     a grainy look and varies with light conditions.
  The variations are stored along the depth dimension of the camera grain texture
@@ -102,7 +100,7 @@
 
 /**
  The frame’s camera grain intensity in range 0 to 1.
- 
+
  @discussion A camera stream depicts image noise that gives the captured image
  a grainy look and varies with light conditions.
  The camera grain intensity can be used to select a texture slice from the frames
@@ -195,7 +193,7 @@
 
 /**
  Searches the frame for objects corresponding to a point in the captured image.
- 
+
  @discussion A 2D point in the captured image’s coordinate space can refer to any point along a line segment
  in the 3D coordinate space. Hit-testing is the process of finding objects in the world located along this line segment.
  @param point A point in the image-space coordinate system of the captured image.
@@ -207,17 +205,23 @@
 
 /**
  Creates a raycast query originating from the point on the captured image, aligned along the center of the field of view of the camera.
- @discussion A 2D point in the captured image’s coordinate space and the field of view of the frame's camera is used to create a ray in the 3D cooridnate space originating at the point.
+ @discussion A 2D point in the captured image’s coordinate space and the field of view of the frame's camera is used to create a ray in the 3D
+ cooridnate space originating at the point.
  @param point A point in the image-space coordinate system of the captured image.
  Values should range from (0,0) - upper left corner to (1,1) - lower right corner.
  @param target Type of target where the ray should terminate.
  @param alignment Alignment of the target.
  */
-- (ARRaycastQuery *)raycastQueryFromPoint:(CGPoint)point allowingTarget:(ARRaycastTarget)target alignment:(ARRaycastTargetAlignment)alignment API_AVAILABLE(ios(13.0));
+- (ARRaycastQuery *)raycastQueryFromPoint:(CGPoint)point
+                           allowingTarget:(ARRaycastTarget)target
+                                alignment:(ARRaycastTargetAlignment)alignment API_AVAILABLE(ios(13.0));
 
+
+typedef NS_ENUM(NSInteger, UIInterfaceOrientation);
+
 /**
  Returns a display transform for the provided viewport size and orientation.
- 
+
  @discussion The display transform can be used to convert normalized points in the image-space coordinate system
  of the captured image to normalized points in the view’s coordinate space. The transform provides the correct rotation
  and aspect-fill for presenting the captured image in the given orientation and size.
@@ -225,6 +229,7 @@
  @param viewportSize The size of the viewport.
  */
 - (CGAffineTransform)displayTransformForOrientation:(UIInterfaceOrientation)orientation viewportSize:(CGSize)viewportSize;
+
 
 /** Unavailable */
 - (instancetype)init NS_UNAVAILABLE;
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARGeoAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARGeoAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARGeoAnchor.h	2025-04-19 05:18:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARGeoAnchor.h	2025-05-23 10:44:14
@@ -10,8 +10,6 @@
 #import <ARKit/ARGeoTrackingTypes.h>
 #import <CoreLocation/CoreLocation.h>
 
-#define AR_GEO_ANCHOR_PROTOCOLS <ARTrackable>
-
 NS_ASSUME_NONNULL_BEGIN
 
 /**
@@ -21,30 +19,31 @@
  */
 API_AVAILABLE(ios(14.0))
 NS_SWIFT_SENDABLE
-@interface ARGeoAnchor : ARAnchor AR_GEO_ANCHOR_PROTOCOLS
+@interface ARGeoAnchor : ARAnchor <ARTrackable>
 
 /**
  The coordinate where this anchor will be placed.
- 
+
  @discussion The anchor's transform will be automatically updated by the session when ARGeoTrackingConfiguration is set.
  */
 @property (nonatomic, readonly) CLLocationCoordinate2D coordinate;
 
 /**
  The distance to mean sea level, in meters (negative values indicate it's below sea level).
- 
+
  @discussion Only valid when altitudeSource is not ARAltitudeSourceUnknown.
  */
 @property (nonatomic, readonly) CLLocationDistance altitude NS_REFINED_FOR_SWIFT;
 
 /**
- The source of altitude information. If the user did not provide the altitude, ARKit populates this property to indicate the expected accuracy depending on the available altitude data.
+ The source of altitude information. If the user did not provide the altitude, ARKit populates this property to indicate the expected accuracy
+ depending on the available altitude data.
  */
 @property (nonatomic, readonly) ARAltitudeSource altitudeSource;
 
 /**
  Initializes a new ARGeoAnchor with the given coordinates.
- 
+
  @discussion ARKit will query the ground level altitude during runtime, and populate the altitude as soon as that information becomes available.
  @param coordinate Coordinates.
  */
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARImageAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARImageAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARImageAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARImageAnchor.h	2025-05-23 10:44:14
@@ -9,8 +9,6 @@
 #import <ARKit/ARAnchor.h>
 #import <CoreGraphics/CoreGraphics.h>
 
-#define AR_IMAGE_ANCHOR_PROTOCOLS <ARTrackable>
-
 @class ARReferenceImage;
 
 NS_ASSUME_NONNULL_BEGIN
@@ -20,7 +18,7 @@
  */
 API_AVAILABLE(ios(11.3))
 NS_SWIFT_SENDABLE
-@interface ARImageAnchor : ARAnchor AR_IMAGE_ANCHOR_PROTOCOLS
+@interface ARImageAnchor : ARAnchor <ARTrackable>
 
 /**
  Reference to the detected image.
@@ -29,7 +27,7 @@
 
 /**
  The factor between estimated physical size and provided size.
- 
+
  @discussion This value will be estimated if automaticImageScaleEstimationEnabled is set to true on the ARWorldTrackingConfiguration. It is used to
  correct the transform's translation. Default value is 1.0.
  */
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKit.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKit.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKit.h	2025-04-19 10:18:09
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKit.h	2025-05-25 02:37:41
@@ -4,6 +4,8 @@
 //  Copyright © 2016-2023 Apple Inc. All rights reserved.
 //
 
+#include <TargetConditionals.h>
+
 /** @framework ARKit
     @abstract A high-level Augmented Reality framework.
     @discussion ARKit lets you easily configure and use Augmented Reality techniques to track, detect, and render.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitCore.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitCore.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitCore.h	2025-04-19 05:18:13
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitCore.h	2025-05-23 10:44:13
@@ -4,54 +4,54 @@
 //
 //  Copyright © 2020 Apple Inc. All rights reserved.
 //
+
+#include <TargetConditionals.h>
+
 #ifdef __OBJC__
+
+
 #import <ARKit/ARKitFoundation.h>
 
+#import <ARKit/ARDepthData.h>
+
+#import <ARKit/ARAnchor.h>
+#import <ARKit/ARBody2D.h>
+#import <ARKit/ARBodyAnchor.h>
 #import <ARKit/ARCamera.h>
 #import <ARKit/ARCollaborationData.h>
 #import <ARKit/ARConfiguration.h>
+#import <ARKit/AREnvironmentProbeAnchor.h>
 #import <ARKit/ARError.h>
-#import <ARKit/ARPlaneDetectionTypes.h>
-#import <ARKit/ARTrackingStatusTypes.h>
+#import <ARKit/ARFaceAnchor.h>
+#import <ARKit/ARFaceGeometry.h>
 #import <ARKit/ARFrame.h>
+#import <ARKit/ARGeoAnchor.h>
+#import <ARKit/ARGeoTrackingTypes.h>
 #import <ARKit/ARHitTestResult.h>
+#import <ARKit/ARImageAnchor.h>
 #import <ARKit/ARLightEstimate.h>
+#import <ARKit/ARMatteGenerator.h>
+#import <ARKit/ARMeshAnchor.h>
+#import <ARKit/ARMeshGeometry.h>
+#import <ARKit/ARObjectAnchor.h>
+#import <ARKit/ARParticipantAnchor.h>
+#import <ARKit/ARPlaneAnchor.h>
+#import <ARKit/ARPlaneDetectionTypes.h>
+#import <ARKit/ARPlaneGeometry.h>
 #import <ARKit/ARPointCloud.h>
 #import <ARKit/ARRaycastQuery.h>
 #import <ARKit/ARRaycastResult.h>
 #import <ARKit/ARReferenceImage.h>
 #import <ARKit/ARReferenceObject.h>
 #import <ARKit/ARSession.h>
+#import <ARKit/ARSkeleton.h>
+#import <ARKit/ARSkeletonDefinition.h>
 #import <ARKit/ARTrackedRaycast.h>
+#import <ARKit/ARTrackingStatusTypes.h>
 #import <ARKit/ARVideoFormat.h>
 #import <ARKit/ARWorldMap.h>
 
-
-#import <ARKit/ARAnchor.h>
-#import <ARKit/AREnvironmentProbeAnchor.h>
-#import <ARKit/ARFaceAnchor.h>
-#import <ARKit/ARFaceGeometry.h>
-#import <ARKit/ARGeoAnchor.h>
-#import <ARKit/ARGeoTrackingTypes.h>
-#import <ARKit/ARImageAnchor.h>
-#import <ARKit/ARMeshAnchor.h>
-#import <ARKit/ARMeshGeometry.h>
-#import <ARKit/ARObjectAnchor.h>
-#import <ARKit/ARParticipantAnchor.h>
-#import <ARKit/ARPlaneAnchor.h>
-#import <ARKit/ARPlaneGeometry.h>
 #import <ARKit/ARAppClipCodeAnchor.h>
 
-
-
-#import <ARKit/ARBody2D.h>
-#import <ARKit/ARBodyAnchor.h>
-#import <ARKit/ARMatteGenerator.h>
-#import <ARKit/ARSkeleton.h>
-#import <ARKit/ARSkeletonDefinition.h>
-
-
-#import <ARKit/ARDepthData.h>
 #endif // __OBJC__
-
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitFoundation.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitFoundation.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitFoundation.h	2025-04-19 04:25:10
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitFoundation.h	2025-05-23 10:28:32
@@ -4,6 +4,9 @@
 //
 //  Copyright © 2020 Apple Inc. All rights reserved.
 //
+#include <TargetConditionals.h>
 
+
 #import <ARKit/ARDepthData.h>
+
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitUI.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitUI.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitUI.h	2025-04-19 10:18:10
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARKitUI.h	2025-05-25 02:37:41
@@ -4,6 +4,9 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
+#include <TargetConditionals.h>
+
+
 #import <ARKit/ARSCNView.h>
 #import <ARKit/ARSCNFaceGeometry.h>
 #import <ARKit/ARSCNPlaneGeometry.h>
@@ -12,5 +15,4 @@
 #endif //__has_include(<ARKit/ARSKView.h>)
 
 #import <ARKit/ARCoachingOverlayView.h>
-#import <ARKit/ARQuickLookPreviewItem.h>
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARPlaneAnchor.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARPlaneAnchor.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARPlaneAnchor.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARPlaneAnchor.h	2025-05-23 10:44:14
@@ -8,7 +8,6 @@
 
 #import <ARKit/ARAnchor.h>
 
-#define AR_PLANE_ANCHOR_PROTOCOLS
 
 NS_ASSUME_NONNULL_BEGIN
 
@@ -21,10 +20,10 @@
 typedef NS_ENUM(NSInteger, ARPlaneAnchorAlignment) {
     /** A plane that is horizontal with respect to gravity. */
     ARPlaneAnchorAlignmentHorizontal,
-    
+
     /** A plane that is vertical with respect to gravity. */
     ARPlaneAnchorAlignmentVertical API_AVAILABLE(ios(11.3)),
-    
+
 } NS_SWIFT_NAME(ARPlaneAnchor.Alignment);
 
 /**
@@ -88,7 +87,7 @@
  */
 API_AVAILABLE(ios(11.0))
 NS_SWIFT_SENDABLE
-@interface ARPlaneAnchor : ARAnchor AR_PLANE_ANCHOR_PROTOCOLS
+@interface ARPlaneAnchor : ARAnchor
 
 /**
  Determines whether plane classification is supported on this device.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARQuickLookPreviewItem.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARQuickLookPreviewItem.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARQuickLookPreviewItem.h	2025-04-19 10:18:10
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARQuickLookPreviewItem.h	1969-12-31 19:00:00
@@ -1,61 +0,0 @@
-#if (defined(USE_ARKIT_PUBLIC_HEADERS) && USE_ARKIT_PUBLIC_HEADERS) || !__has_include(<ARKitUI/ARQuickLookPreviewItem.h>)
-//
-//  ARQuickLookPreviewItem.h
-//  ARKit
-//
-//  Copyright © 2019 Apple Inc. All rights reserved.
-//
-
-#import <Foundation/Foundation.h>
-
-#import <QuickLook/QLPreviewItem.h>
-
-NS_ASSUME_NONNULL_BEGIN
-
-API_AVAILABLE(ios(13.0))
-@interface ARQuickLookPreviewItem : NSObject
-<QLPreviewItem>
-
-/**
- Creates an object representing the 3D content that will be previewed in AR Quick Look.
- 
- @discussion This object will be previewed in AR Quick Look using the default
- behavior:
- 
- - Start in Object mode without camera passthrough
- 
- - Display the Share button for 3D content sharing
- 
- - Allow scaling content in both Object and AR mode
- 
- This is a promise to the ARQuickLookPreviewItem class that the URL points to a 3D content file. This class does not deal with the file's existence or content, and leaves it up to QuickLook to handle and process the URL.
- 
- @param url A file URL to 3D content file (e.g. usdz).
- @return The preview object to display in AR Quick Look.
- */
-- (instancetype)initWithFileAtURL:(NSURL *)url NS_DESIGNATED_INITIALIZER;
-
-/**
- An optional canonical web page URL for the 3D content that will be shared.
- 
- @discussion If this is supplied, the URL to the canonical web page is shared instead of the 3D content file.
- For example, providing https://developer.apple.com/arkit/gallery/ as the canonical web page URL string will be shared via the Share button. If the web page URL string is malformed or not provided, then AR Quick Look will default to sharing the 3D content.
- */
-@property (nonatomic, strong, nullable) NSURL *canonicalWebPageURL;
-
-/**
- Whether or not AR Quick Look allows content scaling in AR mode.
- Defaults to `YES` which allows scaling content in AR mode.
- */
-@property (nonatomic, assign) BOOL allowsContentScaling;
-
-/** Unavailable */
-- (instancetype)init NS_UNAVAILABLE;
-+ (instancetype)new NS_UNAVAILABLE;
-
-@end
-
-NS_ASSUME_NONNULL_END
-#else
-#import <ARKitUI/ARQuickLookPreviewItem.h> 
-#endif // #if (defined(USE_ARKIT_PUBLIC_HEADERS) \&\& USE_ARKIT_PUBLIC_HEADERS) || !__has_include(<ARKitUI/ARQuickLookPreviewItem.h>)
\ No newline at end of file
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARReferenceObject.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARReferenceObject.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARReferenceObject.h	2025-04-19 05:18:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARReferenceObject.h	2025-05-23 10:44:15
@@ -7,9 +7,10 @@
 //
 
 #import <Foundation/Foundation.h>
-#import <UIKit/UIKit.h>
 #import <simd/simd.h>
 
+@class UIImage;
+
 NS_ASSUME_NONNULL_BEGIN
 
 API_AVAILABLE(ios(12.0))
@@ -22,7 +23,7 @@
  */
 API_AVAILABLE(ios(12.0))
 NS_SWIFT_SENDABLE
-@interface ARReferenceObject : NSObject<NSSecureCoding>
+@interface ARReferenceObject : NSObject <NSSecureCoding>
 
 /**
  An optional name used to identify the object.
@@ -59,7 +60,7 @@
 
 /**
  Returns the set of ARReferenceObjects in the specified resource group and bundle.
- 
+
  @param name The name of the resource group.
  @param bundle The bundle containing the image file or asset catalog. Specify nil to search the app’s main bundle.
  @return The set of reference objects or nil on error.
@@ -75,7 +76,7 @@
 
 /**
  Exports the object as an archive at the given URL.
- 
+
  @discussion The URL path should use ARReferenceObjectArchiveExtension (.arobject) for the file extension.
  If serialization across devices is desired, NSKeyedArchiver should be used instead.
  @param url The URL at which to write the exported object.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSession.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSession.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSession.h	2025-04-19 05:18:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSession.h	2025-05-23 10:44:15
@@ -6,11 +6,10 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
 #import <ARKit/ARConfiguration.h>
 #import <ARKit/ARGeoTrackingTypes.h>
+#import <Foundation/Foundation.h>
 
-
 NS_ASSUME_NONNULL_BEGIN
 
 @class ARAnchor;
@@ -23,8 +22,6 @@
 @class ARRaycastResult;
 @class ARTrackedRaycast;
 
-
-
 @protocol ARSessionDelegate;
 
 /**
@@ -36,16 +33,16 @@
 API_AVAILABLE(ios(11.0))
 typedef NS_OPTIONS(NSUInteger, ARSessionRunOptions) {
     /** The session will reset tracking. */
-    ARSessionRunOptionResetTracking           = (1 << 0),
-    
+    ARSessionRunOptionResetTracking = (1 << 0),
+
     /** The session will remove existing anchors. */
-    ARSessionRunOptionRemoveExistingAnchors   = (1 << 1),
-    
+    ARSessionRunOptionRemoveExistingAnchors = (1 << 1),
+
     /** The session will stop currently active tracked raycasts. */
-    ARSessionRunOptionStopTrackedRaycasts     = (1 << 2),
+    ARSessionRunOptionStopTrackedRaycasts = (1 << 2),
 
     /** The session will reset scene reconstruction. */
-    ARSessionRunOptionResetSceneReconstruction     = (1 << 3)
+    ARSessionRunOptionResetSceneReconstruction = (1 << 3)
 } NS_SWIFT_NAME(ARSession.RunOptions);
 
 /**
@@ -56,7 +53,7 @@
 
 /**
  Unique identifier of the running session.
- 
+
  @discussion The identifier may change after calling runWithConfiguration.
  */
 @property (atomic, strong, readonly) NSUUID *identifier API_AVAILABLE(ios(13.0));
@@ -64,7 +61,7 @@
 /**
  A delegate for receiving ARSession updates.
  */
-@property (nonatomic, weak, nullable) id <ARSessionDelegate> delegate;
+@property (nonatomic, weak, nullable) id<ARSessionDelegate> delegate;
 
 /**
  The dispatch queue on which the delegate calls are performed.
@@ -82,8 +79,6 @@
  */
 @property (nonatomic, copy, nullable, readonly) ARConfiguration *configuration;
 
-
-
 /**
  Runs the session with the provided configuration.
  @discussion Calling run on a session that has already started will
@@ -93,7 +88,6 @@
 
 - (void)runWithConfiguration:(ARConfiguration *)configuration NS_SWIFT_UNAVAILABLE("Use run(_:options:) instead");
 
-
 /**
  Runs the session with the provided configuration and options.
  @discussion Calling run on a session that has already started will
@@ -141,11 +135,12 @@
  worldMap - The current world map or nil if unavailable.
  error - An error that indicates why the world map is unavailable, or nil if a world map was provided.
  */
-- (void)getCurrentWorldMapWithCompletionHandler:(void (^)(ARWorldMap * _Nullable worldMap, NSError * _Nullable error))completionHandler API_AVAILABLE(ios(12.0));
+- (void)getCurrentWorldMapWithCompletionHandler:(void (^)(ARWorldMap *_Nullable worldMap, NSError *_Nullable error))completionHandler
+    API_AVAILABLE(ios(12.0));
 
 /**
  Creates a new reference object from scanned features within the provided bounds.
- 
+
  @discussion Reference objects can be stored and used to track 3D objects from previously scanned data.
  Creation requires that an ARObjectScanningConfiguration is used so that sufficient features are scanned.
  @param transform The transformation matrix that defines the rotation and translation of the bounds in
@@ -160,10 +155,10 @@
  error - An error that indicates why creation failed, or nil if a reference object was provided.
  */
 - (void)createReferenceObjectWithTransform:(simd_float4x4)transform
-                                     center:(simd_float3)center
-                                     extent:(simd_float3)extent
-                          completionHandler:(void (^)(ARReferenceObject * _Nullable referenceObject, NSError * _Nullable error))completionHandler
-NS_SWIFT_NAME(createReferenceObject(transform:center:extent:completionHandler:)) API_AVAILABLE(ios(12.0));
+                                    center:(simd_float3)center
+                                    extent:(simd_float3)extent
+                         completionHandler:(void (^)(ARReferenceObject *_Nullable referenceObject, NSError *_Nullable error))completionHandler
+    NS_SWIFT_NAME(createReferenceObject(transform:center:extent:completionHandler:)) API_AVAILABLE(ios(12.0));
 
 #pragma mark - Raycasting
 
@@ -184,19 +179,16 @@
  @return Tracked raycast object used to update or stop raycasting. This could be nil if the raycast fails or if the
          configuration is not `ARWorldTrackingConfiguration` or its subclasses.
  */
-- (nullable ARTrackedRaycast *)trackedRaycast:(ARRaycastQuery *)query updateHandler:(void (^)(NSArray<ARRaycastResult *> *))updateHandler API_AVAILABLE(ios(13.0));
+- (nullable ARTrackedRaycast *)trackedRaycast:(ARRaycastQuery *)query
+                                updateHandler:(void (^)(NSArray<ARRaycastResult *> *))updateHandler API_AVAILABLE(ios(13.0));
 
-
-#pragma mark - Scene Graph Query
-
-
 #pragma mark - Collaboration
 
 /**
  Update session with collaboration data.
- 
+
  @discussion Use this to update the session with collaboration data received from other participants.
- 
+
  @param collaborationData Collaboration data for updating the session.
  @see ARCollaborationData
  */
@@ -206,33 +198,46 @@
 
 /**
  Converts a position in world coordinate system into latitude, longitude and altitude.
- 
+
  @discussion This method requires the session to be running a geo tracking configuration which is in state ARGeoTrackingStateLocalized.
- 
+
  @param position Position in world coordinate system to be converted.
- @param completionHandler Completion handler to be called with the result. This handler is executed on the session's delegate queue. It has the parameters:
-        coordinate - Location coordinates.
-        altitude - Altitude.
-        error - Error if conversion is not available.
+ @param completionHandler Completion handler to be called with the result. This handler is executed on the session's delegate queue. It has the
+ parameters: coordinate - Location coordinates. altitude - Altitude. error - Error if conversion is not available.
  */
-- (void)getGeoLocationForPoint:(simd_float3)position completionHandler:(void (^)(CLLocationCoordinate2D coordinate, CLLocationDistance altitude, NSError * _Nullable error))completionHandler API_AVAILABLE(ios(14.0));
+- (void)getGeoLocationForPoint:(simd_float3)position
+             completionHandler:(void (^)(CLLocationCoordinate2D coordinate, CLLocationDistance altitude, NSError *_Nullable error))completionHandler
+    API_AVAILABLE(ios(14.0));
 
-
 #pragma mark - High Resolution Frame Capturing
 
 /**
- Requests a single, high resolution frame be captured at that moment in time.
- @discussion Some video formats do not support a significantly higher resolution than the streaming camera resolution. Use the @c isRecommendedForHighResolutionFrameCapturing method on the video format to check if the format is recommended.
+ Requests a single, high resolution frame to be captured.
+ @discussion Some video formats do not support a significantly higher still image resolution than the streaming camera resolution. Use the @c
+ isRecommendedForHighResolutionFrameCapturing method on the video format to check if the format is recommended.
  @see -[ARVideoFormat isRecommendedForHighResolutionFrameCapturing]
  @param completion Block being called when the call completes.
  */
 - (void)captureHighResolutionFrameWithCompletion:(void (^)(ARFrame *_Nullable frame, NSError *_Nullable error))completion API_AVAILABLE(ios(16.0));
 
+/**
+ Requests a single, high resolution frame to be captured.
+ @discussion Some video formats do not support a significantly higher still image resolution than the streaming camera resolution. Use the @c
+ isRecommendedForHighResolutionFrameCapturing method on the video format to check if the format is recommended. For passing customized photo settings
+ to this method, obtain a @c defaultPhotoSettings object from the video format and modify it.
+ @see -[ARVideoFormat isRecommendedForHighResolutionFrameCapturing]
+ @see -[ARVideoFormat defaultPhotoSettings]
+ @param photoSettings Custom AVCapturePhotoSettings to be used.
+ @param completion Block being called when the call completes.
+ */
+- (void)captureHighResolutionFrameUsingPhotoSettings:(nullable AVCapturePhotoSettings *)photoSettings
+                                          completion:(void (^)(ARFrame *_Nullable frame, NSError *_Nullable error))completion
+    API_AVAILABLE(ios(26.0));
+
 @end
 
 #pragma mark - ARSessionObserver
 
-
 API_AVAILABLE(ios(11.0))
 @protocol ARSessionObserver <NSObject>
 
@@ -240,26 +245,24 @@
 
 /**
  This is called when a session fails.
- 
+
  @discussion On failure the session will be paused.
  @param session The session that failed.
  @param error The error being reported (see ARError.h).
  */
 - (void)session:(ARSession *)session didFailWithError:(NSError *)error;
 
-
 /**
  This is called when the camera’s tracking state has changed.
- 
+
  @param session The session being run.
  @param camera The camera that changed tracking states.
  */
 - (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera;
 
-
 /**
  This is called when a session is interrupted.
- 
+
  @discussion A session will be interrupted and no longer able to track when
  it fails to receive required sensor data. This happens when video capture is interrupted,
  for example when the application is sent to the background or when there are
@@ -271,7 +274,7 @@
 
 /**
  This is called when a session interruption has ended.
- 
+
  @discussion A session will continue running from the last known state once
  the interruption has ended. If the device has moved, anchors will be misaligned.
  To avoid this, some applications may want to reset tracking (see ARSessionRunOptions)
@@ -283,7 +286,7 @@
 /**
  This is called after a session resumes from a pause or interruption to determine
  whether or not the session should attempt to relocalize.
- 
+
  @discussion To avoid misaligned anchors, apps may wish to attempt a relocalization after
  a session pause or interruption. If YES is returned: the session will begin relocalizing
  and tracking state will switch to limited with reason relocalizing. If successful, the
@@ -297,7 +300,7 @@
 
 /**
  This is called when the session outputs a new audio sample buffer.
- 
+
  @param session The session being run.
  @param audioSampleBuffer The captured audio sample buffer.
  */
@@ -305,9 +308,9 @@
 
 /**
  This is called when the session generated new collaboration data.
- 
+
  @discussion This data should be sent to all participants.
- 
+
  @param session The session that produced world tracking collaboration data.
  @param data Collaboration data to be sent to participants.
  @see ARCollaborationData
@@ -320,14 +323,12 @@
  @param session The session being run.
  @param geoTrackingStatus Latest geo tracking status.
  */
-- (void)session:(ARSession *)session didChangeGeoTrackingStatus:(ARGeoTrackingStatus*)geoTrackingStatus API_AVAILABLE(ios(14.0));
+- (void)session:(ARSession *)session didChangeGeoTrackingStatus:(ARGeoTrackingStatus *)geoTrackingStatus API_AVAILABLE(ios(14.0));
 
-
 @end
 
 #pragma mark - ARSessionDelegate
 
-
 API_AVAILABLE(ios(11.0))
 @protocol ARSessionDelegate <ARSessionObserver>
 
@@ -335,7 +336,7 @@
 
 /**
  This is called when a new frame has been updated.
- 
+
  @param session The session being run.
  @param frame The frame that has been updated.
  */
@@ -343,29 +344,27 @@
 
 /**
  This is called when new anchors are added to the session.
- 
+
  @param session The session being run.
  @param anchors An array of added anchors.
  */
-- (void)session:(ARSession *)session didAddAnchors:(NSArray<__kindof ARAnchor*>*)anchors;
+- (void)session:(ARSession *)session didAddAnchors:(NSArray<__kindof ARAnchor *> *)anchors;
 
 /**
  This is called when anchors are updated.
- 
+
  @param session The session being run.
  @param anchors An array of updated anchors.
  */
-- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<__kindof ARAnchor*>*)anchors;
+- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<__kindof ARAnchor *> *)anchors;
 
 /**
  This is called when anchors are removed from the session.
- 
+
  @param session The session being run.
  @param anchors An array of removed anchors.
  */
-- (void)session:(ARSession *)session didRemoveAnchors:(NSArray<__kindof ARAnchor*>*)anchors;
-
-
+- (void)session:(ARSession *)session didRemoveAnchors:(NSArray<__kindof ARAnchor *> *)anchors;
 
 @end
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeleton.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeleton.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeleton.h	2025-04-19 05:18:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeleton.h	2025-05-23 10:44:15
@@ -36,7 +36,7 @@
  Tracking state for a given joint.
  
  @param jointIndex The index of the joint.
- @return True if the joint is tracked. False otherwise.
+ @return YES if the joint is tracked. NO otherwise.
  */
 - (BOOL)isJointTracked:(NSInteger)jointIndex;
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeletonDefinition.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeletonDefinition.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeletonDefinition.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARSkeletonDefinition.h	2025-05-23 10:44:14
@@ -10,18 +10,17 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
+typedef NSString *ARSkeletonJointName NS_TYPED_ENUM NS_SWIFT_NAME(ARSkeleton.JointName) API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRoot API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameHead API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftHand API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightHand API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftFoot API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightFoot API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftShoulder API_AVAILABLE(ios(13.0));
+FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightShoulder API_AVAILABLE(ios(13.0));
 
-typedef NSString *ARSkeletonJointName NS_TYPED_ENUM NS_SWIFT_NAME(ARSkeleton.JointName)        API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRoot                            API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameHead                            API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftHand                        API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightHand                       API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftFoot                        API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightFoot                       API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameLeftShoulder                    API_AVAILABLE(ios(13.0));
-FOUNDATION_EXTERN ARSkeletonJointName const ARSkeletonJointNameRightShoulder                   API_AVAILABLE(ios(13.0));
 
-
 @class ARSkeleton2D;
 @class ARSkeleton3D;
 
@@ -75,7 +74,7 @@
 
 /**
  Returns the index for a given joint identifier.
- 
+
  @param jointName Name of a given joint.
  @discussion This function returns NSNotFound if an invalid joint name is passed.
  @return Joint index.
@@ -89,7 +88,6 @@
 @end
 
 NS_ASSUME_NONNULL_END
-
 #else
 #import <ARKitCore/ARSkeletonDefinition.h> 
 #endif // #if (defined(USE_ARKIT_PUBLIC_HEADERS) \&\& USE_ARKIT_PUBLIC_HEADERS) || !__has_include(<ARKitCore/ARSkeletonDefinition.h>)
\ No newline at end of file
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARTrackingStatusTypes.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARTrackingStatusTypes.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARTrackingStatusTypes.h	2025-04-19 05:18:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARTrackingStatusTypes.h	2025-05-23 10:44:14
@@ -15,10 +15,10 @@
 typedef NS_ENUM(NSInteger, ARTrackingState) {
     /** Tracking is not available. */
     ARTrackingStateNotAvailable,
-    
+
     /** Tracking is limited. See tracking reason for details. */
     ARTrackingStateLimited,
-    
+
     /** Tracking is Normal. */
     ARTrackingStateNormal,
 } NS_REFINED_FOR_SWIFT;
@@ -30,16 +30,16 @@
 typedef NS_ENUM(NSInteger, ARTrackingStateReason) {
     /** Tracking is not limited. */
     ARTrackingStateReasonNone,
-    
+
     /** Tracking is limited due to initialization in progress. */
     ARTrackingStateReasonInitializing,
-    
+
     /** Tracking is limited due to a excessive motion of the camera. */
     ARTrackingStateReasonExcessiveMotion,
-    
+
     /** Tracking is limited due to a lack of features visible to the camera. */
     ARTrackingStateReasonInsufficientFeatures,
-    
+
     /** Tracking is limited due to a relocalization in progress. */
     ARTrackingStateReasonRelocalizing API_AVAILABLE(ios(11.3)),
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARVideoFormat.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARVideoFormat.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARVideoFormat.h	2025-04-19 05:18:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/ARKit.framework/Headers/ARVideoFormat.h	2025-05-23 10:44:14
@@ -6,12 +6,14 @@
 //  Copyright © 2016-2021 Apple Inc. All rights reserved.
 //
 
-#import <Foundation/Foundation.h>
-#import <CoreGraphics/CoreGraphics.h>
 #import <AVFoundation/AVCaptureDevice.h>
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
 
 NS_ASSUME_NONNULL_BEGIN
 
+@class AVCapturePhotoSettings;
+
 API_AVAILABLE(ios(11.3))
 NS_SWIFT_NAME(ARConfiguration.VideoFormat)
 @interface ARVideoFormat : NSObject <NSCopying>
@@ -20,13 +22,13 @@
  Indicates the physical position of an AVCaptureDevice's hardware on the system.
  */
 @property (nonatomic, readonly) AVCaptureDevicePosition captureDevicePosition
-API_AVAILABLE(ios(13.0));
+        API_AVAILABLE(ios(13.0));
 
 /**
  Indicates the type of AVCaptureDevice.
  */
 @property (nonatomic, readonly) AVCaptureDeviceType captureDeviceType
-API_AVAILABLE(ios(14.5));
+        API_AVAILABLE(ios(14.5));
 
 /**
  Image resolution.
@@ -47,6 +49,20 @@
  Indicates if the video format supports high dynamic range (HDR) streaming.
  */
 @property (nonatomic, readonly, getter=isVideoHDRSupported) BOOL videoHDRSupported API_AVAILABLE(ios(16.0));
+
+/**
+ The color space ARKit uses to configure the capture session when this video format is selected.
+ */
+@property (nonatomic, readonly) AVCaptureColorSpace defaultColorSpace API_AVAILABLE(ios(26.0));
+
+/**
+ The default AVCapturePhotoSettings object that ARKit uses when capturing a high resolution frame using this video format.
+ @discussion Calling this getter will return a new instance that may be mutated to customize settings. Pass that instance to
+ `captureHighResolutionFrameUsingPhotoSettings:completion:` to capture a high resolution frame with custom settings.
+ @see [ARSession captureHighResolutionFrameUsingPhotoSettings:completion:]
+ @return An AVCapturePhotoSettings object.
+ */
+@property (nonatomic, readonly) AVCapturePhotoSettings *defaultPhotoSettings API_AVAILABLE(ios(26.0));
 
 /** Unavailable */
 - (instancetype)init NS_UNAVAILABLE;
Clone this wiki locally