Skip to content

AVFAudio macOS xcode26.0 b1

Alex Soto edited this page Jun 9, 2025 · 1 revision

#AVFAudio.framework

diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h	2025-04-19 03:36:09
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h	2025-05-24 05:09:10
@@ -2,7 +2,7 @@
 /*!
 	@file		AVAudioApplication.h
 	@framework	AudioSession.framework
-	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
+	@copyright	(c) 2009-2024 Apple Inc. All rights reserved.
 */
 
 #ifndef AVAudioApplication_h
@@ -53,18 +53,18 @@
 	@brief	Notification sent to registered listeners when the application's input is muted
 			or unmuted.
 
-	Check the notification's userInfo dictionary for the mute state AVAudioApplicationInputMuteStateKey
+	Check the notification's userInfo dictionary for the mute state `AVAudioApplicationMuteStateKey`
 	which will have a boolean value 0 for unmuted or value 1 for muted.
 	@note this notification will only be dispatched for state changes when there is an active record session (i.e. record or playAndRecord category).
 		Setting the `inputMuted` state while the record session is not active is allowed and will be stored, but it will not trigger a notification for the
 		state change. When the record session subsequently goes active, the `inputMuted` state will be applied, and this notification will be dispatched
 		with the latest input muted state.
 */
-OS_EXPORT NSNotificationName const AVAudioApplicationInputMuteStateChangeNotification API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) NS_SWIFT_NAME(AVAudioApplication.inputMuteStateChangeNotification);
+OS_EXPORT NSNotificationName const AVAudioApplicationInputMuteStateChangeNotification API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0), visionos(26.0)) NS_SWIFT_NAME(AVAudioApplication.inputMuteStateChangeNotification);
 
 /// Keys for AVAudioApplicationInputMuteStateChangeNotification
 /// Value is NSNumber type with boolean value 0 for unmuted or value 1 for muted (samples zeroed out)
-OS_EXPORT NSString *const AVAudioApplicationMuteStateKey API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) NS_SWIFT_NAME(AVAudioApplication.muteStateKey);
+OS_EXPORT NSString *const AVAudioApplicationMuteStateKey API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0), visionos(26.0)) NS_SWIFT_NAME(AVAudioApplication.muteStateKey);
 
 
 /// Class containing methods that relate to an application bundle's audio (i.e. a collection of one or more AVAudioSession instances)
@@ -81,9 +81,9 @@
 /// @brief Set the muted/unmuted state of the application's audio input. When set true, inputs
 /// (microphone etc.) of all audio clients relating to this application will have their samples zeroed out.
 /// @note - this is per-application input muting and doesn't affect the hardware mute state.
-- (BOOL)setInputMuted:(BOOL)muted error:(NSError**)outError API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+- (BOOL)setInputMuted:(BOOL)muted error:(NSError**)outError API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0), visionos(26.0));
 /// Get the input muted state - return value is boolean 0 for unmuted or value 1 for muted (input samples zeroed out)
-@property(readonly, nonatomic, getter=isInputMuted) BOOL inputMuted API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+@property(readonly, nonatomic, getter=isInputMuted) BOOL inputMuted API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0), visionos(26.0));
 
 
 /*!
@@ -101,7 +101,7 @@
 			perform your input muting logic within this block, and to perform your UI updates for input mute state changes within the handler
 			for AVAudioApplicationInputMuteStateChangeNotification. This handler should be set by the process doing the call's audio I/O.
  */
-- (BOOL)setInputMuteStateChangeHandler:(BOOL (^_Nullable)(BOOL inputShouldBeMuted))inputMuteHandler error:(NSError**)outError API_AVAILABLE(macos(14.0)) API_UNAVAILABLE(ios, watchos, tvos, macCatalyst);
+- (BOOL)setInputMuteStateChangeHandler:(BOOL (^_Nullable)(BOOL inputShouldBeMuted))inputMuteHandler error:(NSError**)outError API_AVAILABLE(macos(14.0)) API_UNAVAILABLE(ios, watchos, tvos, macCatalyst, visionos);
 
 
 /// Returns an enum indicating whether the user has granted or denied permission to record, or has
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioConverter.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioConverter.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioConverter.h	2025-04-19 02:13:48
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioConverter.h	2025-05-24 01:25:08
@@ -150,13 +150,14 @@
 		convertToBuffer:error:withInputFromBlock: will return as much output as could be converted
 		with the input already supplied.
 */
-typedef AVAudioBuffer * __nullable (^AVAudioConverterInputBlock)(AVAudioPacketCount inNumberOfPackets, AVAudioConverterInputStatus* outStatus);
+typedef AVAudioBuffer * __nullable (^ NS_SWIFT_SENDABLE AVAudioConverterInputBlock)(AVAudioPacketCount inNumberOfPackets, AVAudioConverterInputStatus* outStatus);
 
 /*!
 	@class AVAudioConverter
 	@abstract
 		Converts streams of audio between various formats.
 */
+NS_SWIFT_SENDABLE
 API_AVAILABLE(macos(10.11), ios(9.0), watchos(2.0), tvos(9.0))
 @interface AVAudioConverter : NSObject {
 @private
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFile.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFile.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFile.h	2025-04-19 03:09:46
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFile.h	2025-05-24 01:25:08
@@ -25,8 +25,9 @@
 		Reads and writes are always sequential, but random access is possible by setting the
 		framePosition property.
 */
-API_AVAILABLE(macos(10.10), ios(8.0), watchos(2.0), tvos(9.0))
+NS_SWIFT_SENDABLE API_AVAILABLE(macos(10.10), ios(8.0), watchos(2.0), tvos(9.0))
 @interface AVAudioFile : NSObject
+- (instancetype)init API_DEPRECATED("Deprecated - use initForReading or initForWriting", macos(10.10, 26.0), ios(8.0, 26.0), watchos(2.0, 26.0), tvos(9.0, 26.0), macCatalyst(10.10, 26.0));
 
 /*! @method initForReading:error:
 	@abstract Open a file for reading.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h	2025-04-19 02:33:06
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h	2025-05-24 01:32:31
@@ -45,7 +45,7 @@
 	
 		Instances of this class are immutable.
 */
-API_AVAILABLE(macos(10.10), ios(8.0), watchos(2.0), tvos(9.0))
+NS_SWIFT_SENDABLE API_AVAILABLE(macos(10.10), ios(8.0), watchos(2.0), tvos(9.0))
 @interface AVAudioFormat : NSObject <NSSecureCoding> {
 @private
 	AudioStreamBasicDescription _asbd;
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h	2025-04-19 03:09:46
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h	2025-05-24 01:32:32
@@ -85,6 +85,8 @@
 	AVAudioVoiceProcessingOtherAudioDuckingLevel duckingLevel;
 } AVAudioVoiceProcessingOtherAudioDuckingConfiguration API_AVAILABLE(macos(14.0), ios(17.0)) API_UNAVAILABLE(tvos, watchos);
 
+@class CASpatialAudioExperience;
+
 /*!	@class AVAudioIONode
 	@abstract 
 		Base class for a node that performs audio input or output in the engine.
@@ -271,6 +273,18 @@
 API_AVAILABLE(macos(10.10), ios(8.0), watchos(2.0), tvos(9.0))
 @interface AVAudioOutputNode : AVAudioIONode
 - (instancetype)init NS_UNAVAILABLE; // fetch instance via -[AVAudioEngine outputNode].
+
+/*! @property intendedSpatialExperience
+    @abstract
+        The AVAudioEngine output node's intended spatial experience.
+    @discussion
+        Only useful for engines that have an output node and are not
+        configured in any manual rendering mode. The default value of
+        CAAutomaticSpatialAudio means the engine uses its AVAudioSession's intended
+        spatial experience. See CASpatialAudioExperience for more details.
+ */
+@property (nonnull, copy) CASpatialAudioExperience *intendedSpatialExperience API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) NS_REFINED_FOR_SWIFT;
+
 @end
 
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioPlayer.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioPlayer.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioPlayer.h	2025-04-19 04:39:40
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioPlayer.h	2025-05-24 01:17:36
@@ -16,6 +16,7 @@
 
 @class NSData, NSURL, NSError;
 @class AVAudioSessionChannelDescription;
+@class CASpatialAudioExperience;
 @protocol AVAudioPlayerDelegate;
 
 API_AVAILABLE(macos(10.7), ios(2.2), watchos(3.0), tvos(9.0))
@@ -114,6 +115,13 @@
 /* This property is nil valued until set. */
 /* The array must have the same number of channels as returned by the numberOfChannels property. */
 @property(nonatomic, copy, nullable) NSArray<AVAudioSessionChannelDescription *> *channelAssignments API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos) ; /* Array of AVAudioSessionChannelDescription objects */
+
+/* The AVAudioPlayer's intended spatial experience.
+ 
+ The default value of CAAutomaticSpatialAudio means the player uses its
+ AVAudioSession's intended spatial experience. See CASpatialAudioExperience
+ for more details */
+@property (nonnull, copy) CASpatialAudioExperience *intendedSpatialExperience API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) NS_REFINED_FOR_SWIFT;
 
 @end
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2025-04-19 02:40:07
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2025-05-24 04:07:12
@@ -2,7 +2,7 @@
 /*!
 	@file		AVAudioSession.h
 	@framework	AudioSession.framework
-	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
+	@copyright	(c) 2009-2024 Apple Inc. All rights reserved.
 */
 
 #ifndef AudioSession_AVAudioSession_h
@@ -20,6 +20,7 @@
 // Forward declarations
 @class NSError, NSString, NSNumber;
 @class AVAudioChannelLayout;
+@class AVAudioSessionSwiftImpl;
 
 // =================================================================================================
 #pragma mark-- iOS/tvOS/watchOS AVAudioSession interface --
@@ -28,6 +29,8 @@
 API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos) 
 @interface AVAudioSession : NSObject {
 @private
+//	Reenable once rdar://135815013 is unblocked
+//	AVAudioSessionSwiftImpl *_swiftImpl;
 	void *_impl;
 }
 
@@ -206,6 +209,26 @@
 /// Returns YES if device model supports echo cancellation and the audio category is PlayAndRecord and the mode is Default.
 @property(readonly, nonatomic) BOOL isEchoCancelledInputAvailable API_AVAILABLE(ios(18.2)) API_UNAVAILABLE(watchos, tvos) API_UNAVAILABLE(macos);
 
+/// Sets a Boolean value to inform the system to mute the session's output audio. The default value is false (unmuted).
+///
+/// This property is supported with all categories and modes, except for
+/// ``AVAudioSessionCategoryPlayAndRecord`` where it is only supported with ``AVAudioSessionModeDefault``.
+/// Changing the mode to non-default mode with ``AVAudioSessionCategoryPlayAndRecord``
+/// category will cause the session to unmute.
+///
+/// Changes in output mute state can be observed via ``AVAudioSessionOutputMuteStateChangeNotification``.
+/// If this value is set to true, ``AVAudioSessionUserIntentToUnmuteOutputNotification``
+/// may be sent when a user hints to unmute by changing the volume.
+///
+/// - Note: This will not mute system sounds and haptics.
+///
+/// - Parameters:
+/// - `muted`: A Boolean value to set the audio output to the desired muted state.
+/// - `error`: A pointer to an error object. If an error occurs, the framework sets the pointer to an error object that describes the failure.
+- (BOOL)setOutputMuted:(BOOL)muted error:(NSError **)outError API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos, macos);
+/// A Boolean value that indicates whether audio output is in a muted state.
+@property(readonly, getter=isOutputMuted) BOOL outputMuted API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos, macos);
+
 @end
 
 // -------------------------------------------------------------------------------------------------
@@ -230,7 +253,8 @@
 	deactivation is requested, the session will be deactivated, but the method will return NO and
 	populate the NSError with the code property set to AVAudioSessionErrorCodeIsBusy to indicate the
 	misuse of the API. Prior to iOS 8, the session would have remained active if it had running I/Os
-	at the time of the deactivation request.
+	at the time of the deactivation request. Starting in iOS 19.0, deactivating while IO is running will
+	no longer return AVAudioSessionErrorCodeIsBusy.
 */
 - (BOOL)setActive:(BOOL)active error:(NSError **)outError API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 - (BOOL)setActive:(BOOL)active withOptions:(AVAudioSessionSetActiveOptions)options error:(NSError **)outError API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
@@ -372,7 +396,7 @@
 /// The current hardware output latency in seconds.
 @property (readonly) NSTimeInterval outputLatency API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
-/// The current hardware IO buffer duration in seconds.
+/// The current hardware IO buffer duration in seconds. Is key-value observable.
 @property (readonly) NSTimeInterval IOBufferDuration API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /// Get an array of channel layouts that the current route supports.
@@ -432,6 +456,9 @@
 	Note that this property only applies to the session's current category and mode. For
     example, if the session's current category is AVAudioSessionCategoryPlayback, there will be
     no available inputs.
+ 
+	On iOS, clients can listen to AVAudioSessionAvailableInputsChangeNotification to
+	be notified when this changes.
 */
 @property (readonly, nullable) NSArray<AVAudioSessionPortDescription *> *availableInputs API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
@@ -490,79 +517,7 @@
 @end
 
 #if TARGET_OS_VISION
-/*!
- The perceived "size" or "immersivity" of the sound. Use Small for least
- immersive and Large for most immersive.
- */
-typedef NS_ENUM(NSInteger, AVAudioSessionSoundStageSize) {
-    /// The audio session determines its own sound stage size based on
-    /// a handful of factors
-    AVAudioSessionSoundStageSizeAutomatic = 0,
-    
-    /// A smaller, front-focused sound stage
-    AVAudioSessionSoundStageSizeSmall     = 1,
-    
-    /// A medium-immersive sound stage
-    AVAudioSessionSoundStageSizeMedium    = 2,
-    
-    /// A fully-immersive sound stage
-    AVAudioSessionSoundStageSizeLarge     = 3,
-} NS_SWIFT_NAME(AVAudioSession.SoundStageSize);
 
-/*!
- When the intended spatial experience is HeadTracked, the anchoring strategy
- provides additional information about the reference point for spatialization.
- */
-typedef NS_ENUM(NSInteger, AVAudioSessionAnchoringStrategy) {
-    /// The audio session determines its own anchoring strategy based on
-    /// a handful of factors
-    AVAudioSessionAnchoringStrategyAutomatic   = 0,
-    
-    /// The session is anchored to the developer-provided scene
-    /// identifier (i.e. UIScene.session.persistentIdentifier)
-    AVAudioSessionAnchoringStrategyScene       = 1,
-    
-    /// The session is anchored to the user's concept of "front"
-    /// which the user can move with an intentional gesture.
-    AVAudioSessionAnchoringStrategyFront       = 2
-} NS_REFINED_FOR_SWIFT;
-
-typedef NS_ENUM(NSInteger, AVAudioSessionSpatialExperience) {
-    /// A fully head-tracked spatial experience parameterized by
-    /// a sound stage size and anchoring strategy
-    AVAudioSessionSpatialExperienceHeadTracked = 0,
-    
-    /// An unanchored, non-head-tracked spatial experience parameterized
-    /// by a sound stage size
-    AVAudioSessionSpatialExperienceFixed       = 1,
-    
-    /// An experience that bypasses any system-provided spatialization and
-    /// instead mixes the application's sound straight to the output
-    AVAudioSessionSpatialExperienceBypassed  = 2,
-} NS_REFINED_FOR_SWIFT;
-
-typedef NSString * const AVAudioSessionSpatialExperienceOption NS_TYPED_ENUM NS_REFINED_FOR_SWIFT;
-
-/// Associated value is NSNumber with AVAudioSessionSoundStageSize. Only used if
-/// SpatialExperience is HeadTracked or Fixed. If not provided for
-/// those SpatialExperiences, the default will be
-/// AVAudioSessionSoundStageSizeAutomatic.
-OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSoundStageSize API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
-
-/// Associated value is NSNumber with AVAudioSessionAnchoringStrategy. Only used if
-/// SpatialExperience is HeadTracked. If not provided for a head-tracked
-/// spatial experience, the default will be
-/// AVAudioSessionAnchoringStrategyAutomatic.
-OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionAnchoringStrategy API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
-
-/// Associated value is NSString from UIScene.session.persistentIdentifier. Only
-/// used if SpatialExperience is HeadTracked and AnchoringStrategy is
-/// Scene. If not provided for a scene-anchored spatial experience, the
-/// session will fail to set the intended spatial experience and
-/// return an error.
-OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSceneIdentifier API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
-
-
 @interface AVAudioSession (SpatialPreference)
 
 
@@ -592,138 +547,6 @@
 @end // AVAudioSession (NowPlayingCandidacy)
 
 #endif // TARGET_OS_VISION
-
-
-#pragma mark-- Names for NSNotifications --
-
-/*!
-	@brief	Notification sent to registered listeners when the system has interrupted the audio
-			session and when the interruption has ended.
-
-    Check the notification's userInfo dictionary for the interruption type, which is either
-    Begin or End. In the case of an end interruption notification, check the userInfo dictionary
-    for AVAudioSessionInterruptionOptions that indicate whether audio playback should resume.
-    In the case of a begin interruption notification, the reason for the interruption can be found
-    within the info dictionary under the key AVAudioSessionInterruptionReasonKey.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionInterruptionNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/*!
-	@brief	Notification sent to registered listeners when an audio route change has occurred.
-
-	Check the notification's userInfo dictionary for the route change reason and for a description
-	of the previous audio route.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionRouteChangeNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/*!
-	@brief	Notification sent to registered listeners if the media server is killed.
-
-	In the event that the server is killed, take appropriate steps to handle requests that come in
-	before the server resets.  See Technical Q&A QA1749.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereLostNotification API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/*!
-	@brief	Notification sent to registered listeners when the media server restarts.
-
-	In the event that the server restarts, take appropriate steps to re-initialize any audio objects
-	used by your application.  See Technical Q&A QA1749.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereResetNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/*!
-	@brief	Notification sent to registered listeners when they are in the foreground with an active
-		audio session and primary audio from other applications starts and stops.
-
-	Check the notification's userInfo dictionary for the notification type, which is either Begin or
-	End. Foreground applications may use this notification as a hint to enable or disable audio that
-	is secondary to the functionality of the application. For more information, see the related
-	property secondaryAudioShouldBeSilencedHint.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionSilenceSecondaryAudioHintNotification API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/*!
-    @brief  Notification sent to registered listeners when spatial playback capabilities are changed due to a
-    change in user preference.
-
-    Check the notification's userInfo dictionary for AVAudioSessionSpatialAudioEnabledKey to check if spatial
-    audio is enabled.
-    
-    Observers of this notification should also observe AVAudioSessionRouteChangeNotification since a route change
-    may also result in a change in the ability for the system to play spatial audio. Use
-    AVAudioSessionPortDescription's isSpatialAudioEnabled property to check if the current route supports
-    spatialized playback.
-*/
-OS_EXPORT NSNotificationName const  AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos) NS_SWIFT_NAME(AVAudioSession.spatialPlaybackCapabilitiesChangedNotification);
-
-/// Notification sent to registered listeners when the resolved rendering mode changes.
-OS_EXPORT NSNotificationName const  AVAudioSessionRenderingModeChangeNotification API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos) NS_SWIFT_NAME(AVAudioSession.renderingModeChangeNotification);
-
-/*!
-	 @brief Notification sent to registered listeners when the rendering capabilities change.
- */
-OS_EXPORT NSNotificationName const AVAudioSessionRenderingCapabilitiesChangeNotification API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos) NS_SWIFT_NAME(AVAudioSession.renderingCapabilitiesChangeNotification);
-
-/*!
-     @brief Notification sent to registered listeners when the system's capability to inject audio into input stream is changed
- 
- Check the notification's userInfo dictionary for AVAudioSessionMicrophoneInjectionIsAvailableKey to check if microphone
- injection is available. Use AVAudioSession's isMicrophoneInjectionAvailable property to check if microphone injection is available
- */
-OS_EXPORT NSNotificationName const AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification API_AVAILABLE(ios(18.2), visionos(2.2)) API_UNAVAILABLE(tvos, watchos, macos) NS_SWIFT_NAME(AVAudioSession.microphoneInjectionCapabilitiesChangeNotification);
-
-#pragma mark-- Keys for NSNotification userInfo dictionaries --
-
-/// keys for AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification
-/// value is an NSNumber whose boolean value indicates if spatial audio enabled.
-OS_EXPORT NSString *const AVAudioSessionSpatialAudioEnabledKey API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
-
-/// keys for AVAudioSessionInterruptionNotification
-/// Value is an NSNumber representing an AVAudioSessionInterruptionType
-OS_EXPORT NSString *const AVAudioSessionInterruptionTypeKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/// Only present for end interruption events.  Value is of type AVAudioSessionInterruptionOptions.
-OS_EXPORT NSString *const AVAudioSessionInterruptionOptionKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/// Only present in begin interruption events. Value is of type AVAudioSessionInterruptionReason.
-OS_EXPORT NSString *const AVAudioSessionInterruptionReasonKey API_AVAILABLE(ios(14.5), watchos(7.3)) API_UNAVAILABLE(tvos, macos);
-
-/*!
-	Only present in begin interruption events, where the interruption is a direct result of the
-	application being suspended by the operating sytem. Value is a boolean NSNumber, where a true
-	value indicates that the interruption is the result of the application being suspended, rather
-	than being interrupted by another audio session.
-
-	Starting in iOS 10, the system will deactivate the audio session of most apps in response to the
-	app process being suspended. When the app starts running again, it will receive the notification
-	that its session has been deactivated by the system. Note that the notification is necessarily
-	delayed in time, due to the fact that the application was suspended at the time the session was
-	deactivated by the system and the notification can only be delivered once the app is running
-	again.
-*/
-OS_EXPORT NSString *const AVAudioSessionInterruptionWasSuspendedKey API_DEPRECATED("No longer supported - see AVAudioSessionInterruptionReasonKey", ios(10.3, 14.5), watchos(3.2, 7.3), tvos(10.3, 14.5));
-
-/// keys for AVAudioSessionRouteChangeNotification
-/// value is an NSNumber representing an AVAudioSessionRouteChangeReason
-OS_EXPORT NSString *const AVAudioSessionRouteChangeReasonKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-/// value is AVAudioSessionRouteDescription *
-OS_EXPORT NSString *const AVAudioSessionRouteChangePreviousRouteKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/// keys for AVAudioSessionSilenceSecondaryAudioHintNotification
-/// value is an NSNumber representing an AVAudioSessionSilenceSecondaryAudioHintType
-OS_EXPORT NSString *const AVAudioSessionSilenceSecondaryAudioHintTypeKey API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
-
-/// keys for AVAudioSessionRenderingModeChangeNotification
-/// Contains a payload of NSInteger representing the new resolved rendering mode
-OS_EXPORT NSString *const AVAudioSessionRenderingModeNewRenderingModeKey API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos);
-
-/*!
-    Keys for AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification
-*/
-/// Indicates if microphone injection is available.
-/// Value is an NSNumber whose boolean value indicates if microphone injection is available.
-OS_EXPORT NSString *const AVAudioSessionMicrophoneInjectionIsAvailableKey API_AVAILABLE(ios(18.2), visionos(2.2)) API_UNAVAILABLE(tvos, watchos, macos);
 
 NS_ASSUME_NONNULL_END
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h	2025-04-19 02:41:41
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h	2025-05-24 06:44:29
@@ -125,6 +125,29 @@
 
 @end // AVAudioSessionDataSourceDescription
 
+/// Describes whether a specific capability is supported and if that capability is currently enabled
+NS_SWIFT_SENDABLE
+API_AVAILABLE(ios(26.0), watchos(26.0), tvos(26.0), macCatalyst(26.0), macos(26.0), visionos(26.0))
+@interface AVAudioSessionCapability : NSObject
+
+/// A Boolean value that indicates whether the capability is supported.
+@property (readonly, nonatomic, getter=isSupported) BOOL supported;
+/// A Boolean value that indicates whether the capability is enabled.
+@property (readonly, nonatomic, getter=isEnabled) BOOL enabled;
+
+@end
+
+/// An object that describes capabilities of Bluetooth microphone ports.
+NS_SWIFT_SENDABLE
+API_AVAILABLE(ios(26.0), watchos(26.0), tvos(26.0), macCatalyst(26.0), visionos(26.0)) API_UNAVAILABLE(macos)
+@interface AVAudioSessionPortExtensionBluetoothMicrophone : NSObject
+
+/// Describes whether this port supports Bluetooth high-quality recording.
+///
+/// Please see ``AVAudioSessionCategoryOptions/AVAudioSessionCategoryOptionBluetoothHighQualityRecording`` for details.
+@property (readonly, strong, nonatomic, nonnull) AVAudioSessionCapability* highQualityRecording;
+@end
+
 /// Information about a port, a physical connector or audio device.
 NS_SWIFT_SENDABLE
 API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos) 
@@ -196,6 +219,15 @@
 - (BOOL)setPreferredDataSource:(nullable AVAudioSessionDataSourceDescription *)dataSource error:(NSError **)outError API_AVAILABLE(ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos, macos);
 
 @end // AVAudioSessionPortDescription
+
+@interface AVAudioSessionPortDescription (BluetoothMicrophoneExtension)
+
+/// An optional port extension that describes capabilities relevant to Bluetooth microphone ports.
+///
+/// This property is optional and will be `nil` for all ports for which this capability set doesn't apply.
+@property (readonly, nonatomic, nullable) AVAudioSessionPortExtensionBluetoothMicrophone* bluetoothMicrophoneExtension API_AVAILABLE(ios(26.0), watchos(26.0), tvos(26.0), macCatalyst(26.0), visionos(26.0)) API_UNAVAILABLE(macos);
+
+@end
 
 /// A description of the input and output ports which comprise a route.
 NS_SWIFT_SENDABLE
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h	2025-04-19 03:36:09
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h	2025-05-24 01:25:07
@@ -2,7 +2,7 @@
 /*!
 	@file		AVAudioSessionTypes.h
 	@framework	AudioSession.framework
-	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
+	@copyright	(c) 2009-2024 Apple Inc. All rights reserved.
 */
 
 #ifndef AudioSession_AVAudioSessionTypes_h
@@ -138,7 +138,7 @@
 /*! Only valid with AVAudioSessionCategoryPlayAndRecord.  Appropriate for Voice over IP
  (VoIP) applications.  Reduces the number of allowable audio routes to be only those
  that are appropriate for VoIP applications and may engage appropriate system-supplied
- signal processing.  Has the side effect of setting AVAudioSessionCategoryOptionAllowBluetooth.
+ signal processing.  Has the side effect of setting AVAudioSessionCategoryOptionAllowBluetoothHFP.
  Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
 - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
 - Dynamic processing on input and output will be disabled resulting in a lower output playback level. */
@@ -167,7 +167,7 @@
 /*! Only valid with kAudioSessionCategory_PlayAndRecord. Reduces the number of allowable audio
  routes to be only those that are appropriate for video chat applications. May engage appropriate
  system-supplied signal processing.  Has the side effect of setting
- AVAudioSessionCategoryOptionAllowBluetooth and AVAudioSessionCategoryOptionDefaultToSpeaker. 
+ AVAudioSessionCategoryOptionAllowBluetoothHFP and AVAudioSessionCategoryOptionDefaultToSpeaker. 
  Using this mode without the VoiceProcessing IO unit or AVAudioEngine with voice processing enabled will result in the following:
 - Chat-specific signal processing such as echo cancellation or automatic gain correction will not be loaded
 - Dynamic processing on input and output will be disabled resulting in a lower output playback level. */
@@ -185,6 +185,170 @@
 OS_EXPORT AVAudioSessionMode const AVAudioSessionModeVoicePrompt API_AVAILABLE(ios(12.0), watchos(5.0), tvos(12.0)) API_UNAVAILABLE(macos);
 
 
+/// Appropriate for applications playing short-form video content.
+///
+/// Only valid with ``AVAudioSessionCategoryPlayback``.
+/// Not applicable with ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormAudio``,
+/// or ``AVAudioSessionRouteSharingPolicy/AVAudioSessionRouteSharingPolicyLongFormVideo``.
+///
+/// When this mode is set:
+/// - system will make informed decisions to automatically unmute the output of the media if the user shows intention of unmuting.
+/// 	- When auto-unmuted, ``AVAudioSessionUserIntentToUnmuteOutputNotification`` and ``AVAudioSessionOutputMuteStateChangeNotification`` will be sent.
+/// - if the session is output muted, system may prevent interrupting other active audio apps.
+OS_EXPORT AVAudioSessionMode const AVAudioSessionModeShortFormVideo API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos, macos);
+
+
+#pragma mark-- Names for NSNotifications --
+
+/*!
+	@brief	Notification sent to registered listeners when the system has interrupted the audio
+			session and when the interruption has ended.
+
+	Check the notification's userInfo dictionary for the interruption type, which is either
+	Begin or End. In the case of an end interruption notification, check the userInfo dictionary
+	for AVAudioSessionInterruptionOptions that indicate whether audio playback should resume.
+	In the case of a begin interruption notification, the reason for the interruption can be found
+	within the info dictionary under the key AVAudioSessionInterruptionReasonKey.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionInterruptionNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/*!
+	@brief	Notification sent to registered listeners when an audio route change has occurred.
+
+	Check the notification's userInfo dictionary for the route change reason and for a description
+	of the previous audio route.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionRouteChangeNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/*!
+	@brief	Notification sent to registered listeners if the media server is killed.
+
+	In the event that the server is killed, take appropriate steps to handle requests that come in
+	before the server resets.  See Technical Q&A QA1749.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereLostNotification API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/*!
+	@brief	Notification sent to registered listeners when the media server restarts.
+
+	In the event that the server restarts, take appropriate steps to re-initialize any audio objects
+	used by your application.  See Technical Q&A QA1749.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereResetNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/*!
+	@brief	Notification sent to registered listeners when they are in the foreground with an active
+		audio session and primary audio from other applications starts and stops.
+
+	Check the notification's userInfo dictionary for the notification type, which is either Begin or
+	End. Foreground applications may use this notification as a hint to enable or disable audio that
+	is secondary to the functionality of the application. For more information, see the related
+	property secondaryAudioShouldBeSilencedHint.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionSilenceSecondaryAudioHintNotification API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/*!
+	@brief  Notification sent to registered listeners when spatial playback capabilities are changed due to a
+	change in user preference.
+
+	Check the notification's userInfo dictionary for AVAudioSessionSpatialAudioEnabledKey to check if spatial
+	audio is enabled.
+
+	Observers of this notification should also observe AVAudioSessionRouteChangeNotification since a route change
+	may also result in a change in the ability for the system to play spatial audio. Use
+	AVAudioSessionPortDescription's isSpatialAudioEnabled property to check if the current route supports
+	spatialized playback.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos) NS_SWIFT_NAME(AVAudioSession.spatialPlaybackCapabilitiesChangedNotification);
+
+/// Notification sent to registered listeners when the resolved rendering mode changes.
+OS_EXPORT NSNotificationName const  AVAudioSessionRenderingModeChangeNotification API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos) NS_SWIFT_NAME(AVAudioSession.renderingModeChangeNotification);
+
+/*!
+	 @brief Notification sent to registered listeners when the rendering capabilities change.
+ */
+OS_EXPORT NSNotificationName const AVAudioSessionRenderingCapabilitiesChangeNotification API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos) NS_SWIFT_NAME(AVAudioSession.renderingCapabilitiesChangeNotification);
+
+/*!
+     @brief Notification sent to registered listeners when the system's capability to inject audio into input stream is changed
+ 
+ Check the notification's userInfo dictionary for AVAudioSessionMicrophoneInjectionIsAvailableKey to check if microphone
+ injection is available. Use AVAudioSession's isMicrophoneInjectionAvailable property to check if microphone injection is available
+ */
+OS_EXPORT NSNotificationName const AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification API_AVAILABLE(ios(18.2), visionos(2.2)) API_UNAVAILABLE(tvos, watchos, macos) NS_SWIFT_NAME(AVAudioSession.microphoneInjectionCapabilitiesChangeNotification);
+
+/// Notification sent to registered listeners when session's output mute state changes.
+///
+/// The userInfo dictionary will contain the updated output mute value as accessed by ``AVAudioSessionMuteStateKey``
+OS_EXPORT NSNotificationName const AVAudioSessionOutputMuteStateChangeNotification API_AVAILABLE(ios(26.0), macos(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) NS_SWIFT_NAME(AVAudioSession.outputMuteStateChangeNotification);
+
+/// Keys for ``AVAudioSessionOutputMuteStateChangeNotification``
+/// Value is `NSNumber` type with boolean value 0 for unmuted or value 1 for muted (samples zeroed out)
+OS_EXPORT NSString *const AVAudioSessionMuteStateKey API_AVAILABLE(ios(26.0), macos(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) NS_SWIFT_NAME(AVAudioSession.muteStateKey);
+
+/// Notification sent to registered listeners when the application's output is muted and user hints to unmute.
+OS_EXPORT NSNotificationName const AVAudioSessionUserIntentToUnmuteOutputNotification API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos, macos) NS_SWIFT_NAME(AVAudioSession.userIntentToUnmuteOutputNotification);
+
+#pragma mark-- Keys for NSNotification userInfo dictionaries --
+
+/// keys for AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification
+/// value is an NSNumber whose boolean value indicates if spatial audio enabled.
+OS_EXPORT NSString *const AVAudioSessionSpatialAudioEnabledKey API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
+
+/// keys for AVAudioSessionInterruptionNotification
+/// Value is an NSNumber representing an AVAudioSessionInterruptionType
+OS_EXPORT NSString *const AVAudioSessionInterruptionTypeKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/// Only present for end interruption events.  Value is of type AVAudioSessionInterruptionOptions.
+OS_EXPORT NSString *const AVAudioSessionInterruptionOptionKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/// Only present in begin interruption events. Value is of type AVAudioSessionInterruptionReason.
+OS_EXPORT NSString *const AVAudioSessionInterruptionReasonKey API_AVAILABLE(ios(14.5), watchos(7.3)) API_UNAVAILABLE(tvos, macos);
+
+/*!
+	Only present in begin interruption events, where the interruption is a direct result of the
+	application being suspended by the operating sytem. Value is a boolean NSNumber, where a true
+	value indicates that the interruption is the result of the application being suspended, rather
+	than being interrupted by another audio session.
+
+	Starting in iOS 10, the system will deactivate the audio session of most apps in response to the
+	app process being suspended. When the app starts running again, it will receive the notification
+	that its session has been deactivated by the system. Note that the notification is necessarily
+	delayed in time, due to the fact that the application was suspended at the time the session was
+	deactivated by the system and the notification can only be delivered once the app is running
+	again.
+*/
+OS_EXPORT NSString *const AVAudioSessionInterruptionWasSuspendedKey API_DEPRECATED("No longer supported - see AVAudioSessionInterruptionReasonKey", ios(10.3, 14.5), watchos(3.2, 7.3), tvos(10.3, 14.5));
+
+/// keys for AVAudioSessionRouteChangeNotification
+/// value is an NSNumber representing an AVAudioSessionRouteChangeReason
+OS_EXPORT NSString *const AVAudioSessionRouteChangeReasonKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+/// value is AVAudioSessionRouteDescription *
+OS_EXPORT NSString *const AVAudioSessionRouteChangePreviousRouteKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/// keys for AVAudioSessionSilenceSecondaryAudioHintNotification
+/// value is an NSNumber representing an AVAudioSessionSilenceSecondaryAudioHintType
+OS_EXPORT NSString *const AVAudioSessionSilenceSecondaryAudioHintTypeKey API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
+
+/// keys for AVAudioSessionRenderingModeChangeNotification
+/// Contains a payload of NSInteger representing the new resolved rendering mode
+OS_EXPORT NSString *const AVAudioSessionRenderingModeNewRenderingModeKey API_AVAILABLE(ios(17.2), tvos(17.2)) API_UNAVAILABLE(watchos, macos, visionos);
+
+/*!
+    Keys for AVAudioSessionMicrophoneInjectionCapabilitiesChangeNotification
+*/
+/// Indicates if microphone injection is available.
+/// Value is an NSNumber whose boolean value indicates if microphone injection is available.
+OS_EXPORT NSString *const AVAudioSessionMicrophoneInjectionIsAvailableKey API_AVAILABLE(ios(18.2), visionos(2.2)) API_UNAVAILABLE(tvos, watchos, macos);
+
+/*!
+	@brief  Notification sent to registered listeners when there are changes in ``availableInputs``.
+
+	There is no payload (userInfo dictionary) associated with the ``AVAudioSessionAvailableInputsChangeNotification`` notification.
+*/
+OS_EXPORT NSNotificationName const AVAudioSessionAvailableInputsChangeNotification API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos, macos)
+NS_SWIFT_NAME(AVAudioSession.availableInputsChangeNotification);
+
 #pragma mark-- enumerations --
 
 /*!
@@ -239,160 +403,172 @@
     AVAudioSessionRouteChangeReasonRouteConfigurationChange = 8
 };
 
-/*!
-    @enum        AVAudioSessionCategoryOptions
-    @brief        Customization of various aspects of a category's behavior. Use with
-                setCategory:mode:options:error:.
 
-    Applications must be prepared for changing category options to fail as behavior may change
-    in future releases. If an application changes its category, it should reassert the options,
-    since they are not sticky across category changes. Introduced in iOS 6.0 / watchOS 2.0 /
-    tvOS 9.0.
+///		Customization of various aspects of a category's behavior.
+///		Use with ``AVAudioSession/setCategory:mode:options:error:``.
+///
+///    Applications must be prepared for changing category options to fail as behavior may change
+///    in future releases. If an application changes its category, it should reassert the options,
+///    since they are not sticky across category changes. Introduced in iOS 6.0 / watchOS 2.0 /
+///    tvOS 9.0.
+typedef NS_OPTIONS(NSUInteger, AVAudioSessionCategoryOptions) {
 
-    @var AVAudioSessionCategoryOptionMixWithOthers
-        Controls whether other active audio apps will be interrupted or mixed with when your app's
-        audio session goes active. Details depend on the category.
+	///		Controls whether other active audio apps will be interrupted or mixed with when your app's
+	///		audio session goes active. Details depend on the category.
+	///
+	///		- ``AVAudioSessionCategoryPlayAndRecord`` or ``AVAudioSessionCategoryMultiRoute``:
+	///			MixWithOthers defaults to false, but can be set to true, allowing other applications to
+	///			play in the background while your app has both audio input and output enabled.
+	///
+	///		- ``AVAudioSessionCategoryPlayback``:
+	///			MixWithOthers defaults to false, but can be set to true, allowing other applications to
+	///			play in the background. Your app will still be able to play regardless of the setting
+	///			of the ringer switch.
+	///
+	///		- Other categories:
+	///			MixWithOthers defaults to false and cannot be changed.
+	///
+	///		MixWithOthers is only valid with ``AVAudioSessionCategoryPlayAndRecord``,
+	///		``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
+    AVAudioSessionCategoryOptionMixWithOthers            = 0x1,
 
-        AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryMultiRoute:
-             MixWithOthers defaults to false, but can be set to true, allowing other applications to
-             play in the background while your app has both audio input and output enabled.
+	///		Controls whether or not other active audio apps will be ducked when when your app's audio
+	///		session goes active. An example of this is a workout app, which provides periodic updates to
+	///		the user. It reduces the volume of any music currently being played while it provides its
+	///		status.
+	///
+	///		Defaults to off. Note that the other audio will be ducked for as long as the current session
+	///		is active. You will need to deactivate your audio session when you want to restore full
+	///		volume playback (un-duck) other sessions.
+	///
+	///		Setting this option will also make your session mixable with others
+	///		(``AVAudioSessionCategoryOptionMixWithOthers`` will be set).
+	///
+	///		DuckOthers is only valid with ``AVAudioSessionCategoryAmbient``,
+	///		``AVAudioSessionCategoryPlayAndRecord``, ``AVAudioSessionCategoryPlayback``, and
+	///		``AVAudioSessionCategoryMultiRoute``.
+    AVAudioSessionCategoryOptionDuckOthers               = 0x2,
 
-        AVAudioSessionCategoryPlayback:
-             MixWithOthers defaults to false, but can be set to true, allowing other applications to
-             play in the background. Your app will still be able to play regardless of the setting
-             of the ringer switch.
+	/// Deprecated - please see ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
+	AVAudioSessionCategoryOptionAllowBluetooth API_DEPRECATED_WITH_REPLACEMENT("AVAudioSessionCategoryOptionAllowBluetoothHFP", ios(1.0, 8.0), watchos(11.0, 11.0), tvos(17.0, 17.0), visionos(1.0, 1.0)) API_UNAVAILABLE(macos) = 0x4,
 
-        Other categories:
-             MixWithOthers defaults to false and cannot be changed.
+	///		Allows an application to change the default behavior of some audio session categories with
+	///		regard to whether Bluetooth Hands-Free Profile (HFP) devices are available for routing. The
+	///		exact behavior depends on the category.
+	///
+	///		- ``AVAudioSessionCategoryPlayAndRecord``:
+	///			AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired bluetooth
+	///			HFP device to appear as an available route for input, while playing through the
+	///			category-appropriate output.
+	///
+	///		- ``AVAudioSessionCategoryRecord``:
+	///			AllowBluetoothHFP defaults to false, but can be set to true, allowing a paired Bluetooth
+	///			HFP device to appear as an available route for input.
+	///
+	///		- Other categories:
+	///			AllowBluetoothHFP defaults to false and cannot be changed. Enabling Bluetooth for input in
+	///			these categories is not allowed.
+	AVAudioSessionCategoryOptionAllowBluetoothHFP API_AVAILABLE(ios(1.0), watchos(11.0), tvos(17.0), visionos(1.0)) API_UNAVAILABLE(macos) = 0x4,
 
-        MixWithOthers is only valid with AVAudioSessionCategoryPlayAndRecord,
-        AVAudioSessionCategoryPlayback, and AVAudioSessionCategoryMultiRoute.
+	///		Allows an application to change the default behavior of some audio session categories with
+	///		regard to the audio route. The exact behavior depends on the category.
+	///
+	///		- ``AVAudioSessionCategoryPlayAndRecord``:
+	///			DefaultToSpeaker will default to false, but can be set to true, routing to Speaker
+	///			(instead of Receiver) when no other audio route is connected.
+	///
+	///		- Other categories:
+	///			DefaultToSpeaker is always false and cannot be changed.
+	AVAudioSessionCategoryOptionDefaultToSpeaker API_UNAVAILABLE(tvos, watchos, macos) = 0x8,
 
-    @var AVAudioSessionCategoryOptionDuckOthers
-        Controls whether or not other active audio apps will be ducked when when your app's audio
-        session goes active. An example of this is a workout app, which provides periodic updates to
-        the user. It reduces the volume of any music currently being played while it provides its
-        status.
+	///		When a session with InterruptSpokenAudioAndMixWithOthers set goes active, then if there is
+	///		another playing app whose session mode is ``AVAudioSessionModeSpokenAudio`` (for podcast
+	///		playback in the background, for example), then the spoken-audio session will be
+	///		interrupted. A good use of this is for a navigation app that provides prompts to its user:
+	///		it pauses any spoken audio currently being played while it plays the prompt.
+	///
+	///		InterruptSpokenAudioAndMixWithOthers defaults to off. Note that the other app's audio will
+	///		be paused for as long as the current session is active. You will need to deactivate your
+	///		audio session to allow the other session to resume playback. Setting this option will also
+	///		make your category mixable with others (``AVAudioSessionCategoryOptionMixWithOthers``
+	///		will be set). If you want other non-spoken audio apps to duck their audio when your app's session
+	///		goes active, also set ``AVAudioSessionCategoryOptionDuckOthers``.
+	///
+	///		Only valid with ``AVAudioSessionCategoryPlayAndRecord``,
+	///		``AVAudioSessionCategoryPlayback``, and ``AVAudioSessionCategoryMultiRoute``.
+	AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers API_AVAILABLE(ios(9.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos) = 0x11,
 
-        Defaults to off. Note that the other audio will be ducked for as long as the current session
-        is active. You will need to deactivate your audio session when you want to restore full
-        volume playback (un-duck) other sessions.
+	///		Allows an application to change the default behavior of some audio session categories with
+	///		regard to whether Bluetooth Advanced Audio Distribution Profile (A2DP) devices are
+	///		available for routing. The exact behavior depends on the category.
+	///
+	///		- ``AVAudioSessionCategoryPlayAndRecord``:
+	///			AllowBluetoothA2DP defaults to false, but can be set to true, allowing a paired
+	///			Bluetooth A2DP device to appear as an available route for output, while recording
+	///			through the category-appropriate input.
+	///
+	///		- ``AVAudioSessionCategoryMultiRoute`` and ``AVAudioSessionCategoryRecord``:
+	///			AllowBluetoothA2DP is false, and cannot be set to true.
+	///
+	///		- Other categories:
+	///			AllowBluetoothA2DP is always implicitly true and cannot be changed; Bluetooth A2DP ports
+	///			are always supported in output-only categories.
+	///
+	///		Setting both ``AVAudioSessionCategoryOptionAllowBluetoothHFP``
+	///		and ``AVAudioSessionCategoryOptionAllowBluetoothA2DP`` is
+	///		allowed. In cases where a single Bluetooth device supports both HFP and A2DP, the HFP
+	///		ports will be given a higher priority for routing. For HFP and A2DP ports on separate
+	///		hardware devices, the last-in wins rule applies.
+	AVAudioSessionCategoryOptionAllowBluetoothA2DP API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0)) API_UNAVAILABLE(macos) = 0x20,
 
-        Setting this option will also make your session mixable with others
-        (AVAudioSessionCategoryOptionMixWithOthers will be set).
+	///		Allows an application to change the default behavior of some audio session categories
+	///		with regard to showing AirPlay devices as available routes. This option applies to
+	///		various categories in the same way as ``AVAudioSessionCategoryOptionAllowBluetoothA2DP``;
+	///		see above for details. Only valid with ``AVAudioSessionCategoryPlayAndRecord``.
+	AVAudioSessionCategoryOptionAllowAirPlay API_AVAILABLE(ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos, macos) = 0x40,
 
-        DuckOthers is only valid with AVAudioSessionCategoryAmbient,
-        AVAudioSessionCategoryPlayAndRecord, AVAudioSessionCategoryPlayback, and
-        AVAudioSessionCategoryMultiRoute.
+	///		Some devices include a privacy feature that mutes the built-in microphone at a hardware level
+	///		under certain conditions e.g. when the Smart Folio of an iPad is closed. The default behavior is
+	///		to interrupt the session using the built-in microphone when that microphone is muted in hardware.
+	///		This option allows an application to opt out of the default behavior if it is using a category that
+	///		supports both input and output, such as ``AVAudioSessionCategoryPlayAndRecord``, and wants to
+	///		allow its session to stay activated even when the microphone has been muted. The result would be
+	///		that playback continues as normal, and microphone sample buffers would continue to be produced
+	///		but all microphone samples would have a value of zero.
+	///
+	///		This may be useful if an application knows that it wants to allow playback to continue and
+	///		recording/monitoring a muted microphone will not lead to a poor user experience. Attempting to use
+	///		this option with a session category that doesn't support the use of audio input will result in an error.
+	///
+	///		- Note Under the default policy, a session will be interrupted if it is running input at the time when
+	///		the microphone is muted in hardware. Similarly, attempting to start input when the microphone is
+	///		muted will fail.
+	///		- Note This option has no relation to the recordPermission property, which indicates whether or
+	///		not the user has granted permission to use microphone input.
+	AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption API_AVAILABLE(ios(14.5), watchos(7.3)) API_UNAVAILABLE(tvos, macos) = 0x80,
 
-    @var AVAudioSessionCategoryOptionAllowBluetooth
-        Allows an application to change the default behavior of some audio session categories with
-        regard to whether Bluetooth Hands-Free Profile (HFP) devices are available for routing. The
-        exact behavior depends on the category.
-
-        AVAudioSessionCategoryPlayAndRecord:
-            AllowBluetooth defaults to false, but can be set to true, allowing a paired bluetooth
-            HFP device to appear as an available route for input, while playing through the
-            category-appropriate output.
-
-        AVAudioSessionCategoryRecord:
-            AllowBluetooth defaults to false, but can be set to true, allowing a paired Bluetooth
-            HFP device to appear as an available route for input
-
-        Other categories:
-            AllowBluetooth defaults to false and cannot be changed. Enabling Bluetooth for input in
-            these categories is not allowed.
-
-    @var AVAudioSessionCategoryOptionDefaultToSpeaker
-        Allows an application to change the default behavior of some audio session categories with
-        regard to the audio route. The exact behavior depends on the category.
-
-        AVAudioSessionCategoryPlayAndRecord:
-            DefaultToSpeaker will default to false, but can be set to true, routing to Speaker
-            (instead of Receiver) when no other audio route is connected.
-
-        Other categories:
-            DefaultToSpeaker is always false and cannot be changed.
-
-    @var AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers
-        When a session with InterruptSpokenAudioAndMixWithOthers set goes active, then if there is
-        another playing app whose session mode is AVAudioSessionModeSpokenAudio (for podcast
-        playback in the background, for example), then the spoken-audio session will be
-        interrupted. A good use of this is for a navigation app that provides prompts to its user:
-        it pauses any spoken audio currently being played while it plays the prompt.
-
-        InterruptSpokenAudioAndMixWithOthers defaults to off. Note that the other app's audio will
-        be paused for as long as the current session is active. You will need to deactivate your
-        audio session to allow the other session to resume playback. Setting this option will also
-        make your category mixable with others (AVAudioSessionCategoryOptionMixWithOthers will be
-        set). If you want other non-spoken audio apps to duck their audio when your app's session
-        goes active, also set AVAudioSessionCategoryOptionDuckOthers.
-
-        Only valid with AVAudioSessionCategoryPlayAndRecord, AVAudioSessionCategoryPlayback, and
-        AVAudioSessionCategoryMultiRoute. Introduced in iOS 9.0 / watchOS 2.0 / tvOS 9.0.
-
-    @var AVAudioSessionCategoryOptionAllowBluetoothA2DP
-        Allows an application to change the default behavior of some audio session categories with
-        regard to whether Bluetooth Advanced Audio Distribution Profile (A2DP) devices are
-        available for routing. The exact behavior depends on the category.
-
-        AVAudioSessionCategoryPlayAndRecord:
-            AllowBluetoothA2DP defaults to false, but can be set to true, allowing a paired
-            Bluetooth A2DP device to appear as an available route for output, while recording
-            through the category-appropriate input.
-
-        AVAudioSessionCategoryMultiRoute and AVAudioSessionCategoryRecord:
-            AllowBluetoothA2DP is false, and cannot be set to true.
-
-        Other categories:
-            AllowBluetoothA2DP is always implicitly true and cannot be changed; Bluetooth A2DP ports
-            are always supported in output-only categories.
-
-        Setting both AVAudioSessionCategoryOptionAllowBluetooth and
-        AVAudioSessionCategoryOptionAllowBluetoothA2DP is allowed. In cases where a single
-        Bluetooth device supports both HFP and A2DP, the HFP ports will be given a higher priority
-        for routing. For HFP and A2DP ports on separate hardware devices, the last-in wins rule
-        applies.
-
-        Introduced in iOS 10.0 / watchOS 3.0 / tvOS 10.0.
-
-    @var AVAudioSessionCategoryOptionAllowAirPlay
-        Allows an application to change the default behavior of some audio session categories
-        with regard to showing AirPlay devices as available routes. This option applies to
-        various categories in the same way as AVAudioSessionCategoryOptionAllowBluetoothA2DP;
-        see above for details.
-
-        Only valid with AVAudioSessionCategoryPlayAndRecord. Introduced in iOS 10.0 / tvOS 10.0.
-
-    @var AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption
-        Some devices include a privacy feature that mutes the built-in microphone at a hardware level
-        under certain conditions e.g. when the Smart Folio of an iPad is closed. The default behavior is
-        to interrupt the session using the built-in microphone when that microphone is muted in hardware.
-        This option allows an application to opt out of the default behavior if it is using a category that
-        supports both input and output, such as AVAudioSessionCategoryPlayAndRecord, and wants to
-        allow its session to stay activated even when the microphone has been muted. The result would be
-        that playback continues as normal, and microphone sample buffers would continue to be produced
-        but all microphone samples would have a value of zero.
-
-        This may be useful if an application knows that it wants to allow playback to continue and
-        recording/monitoring a muted microphone will not lead to a poor user experience. Attempting to use
-        this option with a session category that doesn't support the use of audio input will result in an error.
-
-        Note that under the default policy, a session will be interrupted if it is running input at the time when
-        the microphone is muted in hardware. Similarly, attempting to start input when the microphone is
-        muted will fail.
-        Note that this option has no relation to the recordPermission property, which indicates whether or
-        not the user has granted permission to use microphone input.
-*/
-typedef NS_OPTIONS(NSUInteger, AVAudioSessionCategoryOptions) {
-    AVAudioSessionCategoryOptionMixWithOthers            = 0x1,
-    AVAudioSessionCategoryOptionDuckOthers               = 0x2,
-    AVAudioSessionCategoryOptionAllowBluetooth API_AVAILABLE(tvos(17.0), watchos(11.0)) API_UNAVAILABLE(macos) = 0x4,
-    AVAudioSessionCategoryOptionDefaultToSpeaker API_UNAVAILABLE(tvos, watchos, macos) = 0x8,
-    AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers API_AVAILABLE(ios(9.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos) = 0x11,
-    AVAudioSessionCategoryOptionAllowBluetoothA2DP API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0)) API_UNAVAILABLE(macos) = 0x20,
-    AVAudioSessionCategoryOptionAllowAirPlay API_AVAILABLE(ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos, macos) = 0x40,
-    AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption API_AVAILABLE(ios(14.5), watchos(7.3)) API_UNAVAILABLE(tvos, macos) = 0x80,
+	///		When this option is specified with a category that supports both input and output, the session
+	///		will enable full-bandwidth audio in both input & output directions, if the Bluetooth route supports
+	///		it (e.g. certain AirPods models). It is currently compatible only with mode ``AVAudioSessionModeDefault``.
+	///
+	///		- Support for this can be queried on input ports via the BluetoothMicrophone interface on a port,
+	///			via its member `highQualityRecording.isSupported`.
+	///
+	///		- Active sessions can see if full-bandwidth Bluetooth audio was successfully enabled by querying
+	///		the BluetoothMicrophone interface of the input port of the current route for:
+	///		`highQualityRecording.isEnabled`.
+	///
+	///		- When this option is provided alone, it will be enabled if the route supports it, otherwise the option
+	///		will be ignored. This option may be combined with ``AVAudioSessionCategoryOptionAllowBluetoothHFP``,
+	///		in which case HFP will be used as a fallback if the route does not support this
+	///		``AVAudioSessionCategoryOptionBluetoothHighQualityRecording`` option.
+	///
+	///		- Note This option may increase input latency when enabled and is therefore not recommended for
+	///			real-time communication usage.
+	///		- Note Apps using ``AVAudioSessionCategoryOptionBluetoothHighQualityRecording``
+	///		may want to consider setting ``AVAudioSession/setPrefersNoInterruptionsFromSystemAlerts:error:``
+	///		while recording, to avoid the recording session being interrupted by an incoming call ringtone.
+	AVAudioSessionCategoryOptionBluetoothHighQualityRecording API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(watchos, tvos, macCatalyst, visionos, macos) = 1 << 19
 };
 
 /// Values for AVAudioSessionInterruptionTypeKey in AVAudioSessionInterruptionNotification's
@@ -670,6 +846,81 @@
     /// Inject Spoken Audio, like synthesized speech, with microphone audio
     AVAudioSessionMicrophoneInjectionModeSpokenAudio = 1,
 } NS_SWIFT_NAME(AVAudioSession.MicrophoneInjectionMode);
+
+#if TARGET_OS_VISION
+/*!
+ The perceived "size" or "immersivity" of the sound. Use Small for least
+ immersive and Large for most immersive.
+ */
+typedef NS_ENUM(NSInteger, AVAudioSessionSoundStageSize) {
+	/// The audio session determines its own sound stage size based on
+	/// a handful of factors
+	AVAudioSessionSoundStageSizeAutomatic = 0,
+
+	/// A smaller, front-focused sound stage
+	AVAudioSessionSoundStageSizeSmall     = 1,
+
+	/// A medium-immersive sound stage
+	AVAudioSessionSoundStageSizeMedium    = 2,
+
+	/// A fully-immersive sound stage
+	AVAudioSessionSoundStageSizeLarge     = 3,
+} NS_SWIFT_NAME(AVAudioSession.SoundStageSize);
+
+/*!
+ When the intended spatial experience is HeadTracked, the anchoring strategy
+ provides additional information about the reference point for spatialization.
+ */
+typedef NS_ENUM(NSInteger, AVAudioSessionAnchoringStrategy) {
+	/// The audio session determines its own anchoring strategy based on
+	/// a handful of factors
+	AVAudioSessionAnchoringStrategyAutomatic   = 0,
+
+	/// The session is anchored to the developer-provided scene
+	/// identifier (i.e. UIScene.session.persistentIdentifier)
+	AVAudioSessionAnchoringStrategyScene       = 1,
+
+	/// The session is anchored to the user's concept of "front"
+	/// which the user can move with an intentional gesture.
+	AVAudioSessionAnchoringStrategyFront       = 2
+} NS_REFINED_FOR_SWIFT;
+
+typedef NS_ENUM(NSInteger, AVAudioSessionSpatialExperience) {
+	/// A fully head-tracked spatial experience parameterized by
+	/// a sound stage size and anchoring strategy
+	AVAudioSessionSpatialExperienceHeadTracked = 0,
+
+	/// An unanchored, non-head-tracked spatial experience parameterized
+	/// by a sound stage size
+	AVAudioSessionSpatialExperienceFixed       = 1,
+
+	/// An experience that bypasses any system-provided spatialization and
+	/// instead mixes the application's sound straight to the output
+	AVAudioSessionSpatialExperienceBypassed  = 2,
+} NS_REFINED_FOR_SWIFT;
+
+typedef NSString * const AVAudioSessionSpatialExperienceOption NS_TYPED_ENUM NS_REFINED_FOR_SWIFT;
+
+/// Associated value is NSNumber with AVAudioSessionSoundStageSize. Only used if
+/// SpatialExperience is HeadTracked or Fixed. If not provided for
+/// those SpatialExperiences, the default will be
+/// AVAudioSessionSoundStageSizeAutomatic.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSoundStageSize API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Associated value is NSNumber with AVAudioSessionAnchoringStrategy. Only used if
+/// SpatialExperience is HeadTracked. If not provided for a head-tracked
+/// spatial experience, the default will be
+/// AVAudioSessionAnchoringStrategyAutomatic.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionAnchoringStrategy API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Associated value is NSString from UIScene.session.persistentIdentifier. Only
+/// used if SpatialExperience is HeadTracked and AnchoringStrategy is
+/// Scene. If not provided for a scene-anchored spatial experience, the
+/// session will fail to set the intended spatial experience and
+/// return an error.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSceneIdentifier API_AVAILABLE(visionos(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+#endif // TARGET_OS_VISION
 
 #endif // AudioSession_AVAudioSessionTypes_h
 #else
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSettings.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSettings.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSettings.h	2025-04-19 01:01:42
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSettings.h	2025-05-24 01:17:36
@@ -39,6 +39,13 @@
 extern NSString *const AVEncoderBitRateStrategyKey                  API_AVAILABLE(macos(10.9), ios(7.0), watchos(2.0), tvos(9.0)); /* value is an AVAudioBitRateStrategy constant. see below. */
 extern NSString *const AVEncoderBitDepthHintKey					    API_AVAILABLE(macos(10.7), ios(3.0), watchos(3.0), tvos(9.0)); /* value is an integer from 8 to 32 */
 
+/* DRC/loudness encoder property keys */
+extern NSString *const AVEncoderDynamicRangeControlConfigurationKey        API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0)); /* value is an AVAudioDynamicRangeControlConfiguration constant - see below. */
+extern NSString *const AVEncoderContentSourceKey                    API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0)); /* value is an AVAudioContentSource constant - see below. */
+
+/* Audio Synchronization Packet encoder property keys */
+extern NSString *const AVEncoderASPFrequencyKey                     API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0)); /* value is an integer larger than 2. Recommended value is 75 */
+
 /* sample rate converter property keys */
 extern NSString *const AVSampleRateConverterAlgorithmKey            API_AVAILABLE(macos(10.9), ios(7.0), watchos(2.0), tvos(9.0)); /* value is an AVSampleRateConverterAlgorithm constant. see below. */
 extern NSString *const AVSampleRateConverterAudioQualityKey 		API_AVAILABLE(macos(10.9), ios(7.0), watchos(2.0), tvos(9.0)); /* value is an integer from enum AVAudioQuality */
@@ -68,3 +75,37 @@
 	AVAudioQualityMax    = 0x7F
 };
 
+typedef NS_ENUM(NSInteger, AVAudioDynamicRangeControlConfiguration) {
+    AVAudioDynamicRangeControlConfiguration_None    = 0,
+    AVAudioDynamicRangeControlConfiguration_Music   = 1,
+    AVAudioDynamicRangeControlConfiguration_Speech  = 2,
+    AVAudioDynamicRangeControlConfiguration_Movie   = 3,
+    AVAudioDynamicRangeControlConfiguration_Capture = 4
+};
+
+/*  Constants to be used with AVAudioContentSource to indicate the content type */
+typedef NS_ENUM(NSInteger, AVAudioContentSource) {
+    AVAudioContentSource_Unspecified                   = -1,
+    AVAudioContentSource_Reserved                      = 0,
+    AVAudioContentSource_AppleCapture_Traditional      = 1,  /* Traditional Apple device capture */
+    AVAudioContentSource_AppleCapture_Spatial          = 2,  /* Spatial Apple device capture */
+    AVAudioContentSource_AppleCapture_Spatial_Enhanced = 3,  /* Reserved for Apple use */
+    AVAudioContentSource_AppleMusic_Traditional        = 4,  /* Traditional Apple music and music video content such as stereo and multichannel */
+    AVAudioContentSource_AppleMusic_Spatial            = 5,  /* Spatial Apple music and music video content */
+    AVAudioContentSource_AppleAV_Traditional_Offline   = 6,  /* Traditional Apple professional AV offline encoded content such as stereo and multichannel */
+    AVAudioContentSource_AppleAV_Spatial_Offline       = 7,  /* Spatial Apple professional AV offline encoded content */
+    AVAudioContentSource_AppleAV_Traditional_Live      = 8,  /* Traditional Apple professional AV live content such as stereo and multichannel */
+    AVAudioContentSource_AppleAV_Spatial_Live          = 9,  /* Spatial Apple professional AV live content */
+    AVAudioContentSource_ApplePassthrough              = 10, /* Apple passthrough content (use only if source information is not available) */
+
+    AVAudioContentSource_Capture_Traditional           = 33, /* Traditional device capture */
+    AVAudioContentSource_Capture_Spatial               = 34, /* Spatial device capture */
+    AVAudioContentSource_Capture_Spatial_Enhanced      = 35, /* Reserved for future use */
+    AVAudioContentSource_Music_Traditional             = 36, /* Traditional music and music video content such as stereo and multichannel */
+    AVAudioContentSource_Music_Spatial                 = 37, /* Spatial music and music video content */
+    AVAudioContentSource_AV_Traditional_Offline        = 38, /* Traditional professional AV offline encoded content such as stereo and multichannel */
+    AVAudioContentSource_AV_Spatial_Offline            = 39, /* Spatial professional AV offline encoded content */
+    AVAudioContentSource_AV_Traditional_Live           = 40, /* Traditional professional AV live content such as stereo and multichannel */
+    AVAudioContentSource_AV_Spatial_Live               = 41, /* Spatial professional AV live content */
+    AVAudioContentSource_Passthrough                   = 42  /* Passthrough content (use only if source information is not available) */
+}   API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0));
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h	2025-04-19 03:09:46
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h	2025-05-24 01:17:36
@@ -19,171 +19,126 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-/*!
- @class AVAudioUnitMIDIInstrument
- @abstract Base class for sample synthesizers.
- @discussion
-    This base class represents audio units of type kAudioUnitType_MusicDevice or kAudioUnitType_RemoteInstrument. This can be used in a chain
-    that processes realtime input (live) and has general concept of music events i.e. notes.
- */
+/// Base class for MIDI instruments.
 API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos)
 @interface AVAudioUnitMIDIInstrument : AVAudioUnit AVAudioUnitMIDIInstrument_MixingConformance
 
 #if AVAUDIOUNIT_HAVE_AUDIOUNIT
-/*! @method initWithAudioComponentDescription:
- @abstract initialize the node with the component description
- @param description
-    audio component description structure that describes the audio component of type kAudioUnitType_MusicDevice
-    or kAudioUnitType_RemoteInstrument.
- */
+/**
+ Initialize the node with the component description for an AUv2 Audio Unit.
+
+ - Parameter description: audio component description structure that describes the audio component of type kAudioUnitType_MusicDevice
+   or kAudioUnitType_RemoteInstrument.
+
+ - note: To load AUv3 audio units (or any audio unit asynchronously), use the class
+ method ``AVAudioUnit/instantiateWithComponentDescription:options:completionHandler:`` instead.
+*/
 - (instancetype)initWithAudioComponentDescription:(AudioComponentDescription)description;
 #endif
+/**
+ Sends a MIDI Note On event to the instrument
 
-/*! @method startNote:withVelocity:onChannel:
- @abstract sends a MIDI Note On event to the instrument
- @param note
-    the note number (key) to play.
-    Range: 0 -> 127
- @param velocity
-    specifies the volume with which the note is played.
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent
-	Range: 0 -> 15
- */
+ - Parameters:
+   - note: the note number (key) to play. Range: 0 -> 127
+   - velocity: specifies the volume with which the note is played. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)startNote:(uint8_t)note withVelocity:(uint8_t)velocity onChannel:(uint8_t)channel;
+/**
+ Sends a MIDI Note Off event to the instrument
 
-/*! @method stopNote:onChannel:
- @abstract sends a MIDI Note Off event to the instrument
- @param note
-    the note number (key) to stop
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
- 
- */
+ - Parameters:
+   - note: the note number (key) to stop. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)stopNote:(uint8_t)note onChannel:(uint8_t)channel;
+/**
+ Sends a MIDI controller event to the instrument.
 
-/*! @method sendController:withValue:onChannel:
- @abstract send a MIDI controller event to the instrument.
- @param controller
-    a standard MIDI controller number. 
-    Range: 0 -> 127
- @param  value
-    value for the controller. 
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
-
- */
+ - Parameters:
+   - controller: a standard MIDI controller number. Range: 0 -> 127
+   - value: value for the controller. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)sendController:(uint8_t)controller withValue:(uint8_t)value onChannel:(uint8_t)channel;
+/**
+ Sends a MIDI controller event to the instrument.
 
-/*! @method sendPitchBend:onChannel:
- @abstract sends MIDI Pitch Bend event to the instrument.
- @param pitchbend
-    value of the pitchbend
-    Range: 0 -> 16383
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
-
- */
+ - Parameters:
+   - controller: a standard MIDI controller number. Range: 0 -> 127
+   - value: value for the controller. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)sendPitchBend:(uint16_t)pitchbend onChannel:(uint8_t)channel;
+/**
+ Sends MIDI channel pressure event to the instrument.
 
-/*! @method sendPressure:onChannel:
- @abstract sends MIDI channel pressure event to the instrument.
- @param pressure 
-    value of the pressure.
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
-
- */
+ - Parameters:
+   - pressure: value of the pressure. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)sendPressure:(uint8_t)pressure onChannel:(uint8_t)channel;
+/**
+ Sends MIDI Polyphonic key pressure event to the instrument
 
-/*! @method sendPressureForKey:withValue:onChannel:
- @abstract sends MIDI Polyphonic key pressure event to the instrument
- @param key
-    the key (note) number to which the pressure event applies
-    Range: 0 -> 127
- @param value
-    value of the pressure
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
-
- */
+ - Parameters:
+   - key: the key (note) number to which the pressure event applies. Range: 0 -> 127
+   - value: value of the pressure. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15so
+*/
 - (void)sendPressureForKey:(uint8_t)key withValue:(uint8_t)value onChannel:(uint8_t)channel;
+/**
+ Sends MIDI Program Change event to the instrument
 
-/*! @method sendProgramChange:onChannel:
- @abstract sends MIDI Program Change event to the instrument
- @param program
-    the program number.
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
- @discussion
-    the instrument will be loaded from the bank that has been previous set by MIDI Bank Select
-    controller messages (0 and 31). If none has been set, bank 0 will be used. 
- */
+   The instrument will be loaded from the bank that has been previous set by MIDI Bank Select
+   controller messages (0 and 31). If none has been set, bank 0 will be used.
+ - Parameters:
+   - program: the program number. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)sendProgramChange:(uint8_t)program onChannel:(uint8_t)channel;
+/**
+ Sends a MIDI Program Change and Bank Select events to the instrument
 
-/*! @method sendProgramChange:bankMSB:bankLSB:onChannel:
- @abstract sends a MIDI Program Change and Bank Select events to the instrument
- @param program
-    specifies the program (preset) number within the bank to load.
-    Range: 0 -> 127
- @param bankMSB
-    specifies the most significant byte value for the bank to select.
-    Range: 0 -> 127
- @param bankLSB
-    specifies the least significant byte value for the bank to select.
-    Range: 0 -> 127
- @param channel
-    the channel number to which the event is sent.
-	Range: 0 -> 15
- */
+ - Parameters:
+   - program: specifies the program (preset) number within the bank to load. Range: 0 -> 127
+   - bankMSB: specifies the most significant byte value for the bank to select. Range: 0 -> 127
+   - bankLSB: specifies the least significant byte value for the bank to select. Range: 0 -> 127
+   - channel: the channel number to which the event is sent. Range: 0 -> 15
+*/
 - (void)sendProgramChange:(uint8_t)program bankMSB:(uint8_t)bankMSB bankLSB:(uint8_t)bankLSB onChannel:(uint8_t)channel;
+/**
+ Sends a MIDI event which contains two data bytes to the instrument.
 
-/*! @method sendMIDIEvent:data1:data2:
- @abstract sends a MIDI event which contains two data bytes to the instrument.
- @param midiStatus
-    the STATUS value of the MIDI event
- @param data1
-    the first data byte of the MIDI event
- @param data2
-    the second data byte of the MIDI event.
-  */
+ - Parameters:
+   - midiStatus: the STATUS value of the MIDI event
+   - data1: the first data byte of the MIDI event
+   - data2: the second data byte of the MIDI event.
+*/
 - (void)sendMIDIEvent:(uint8_t)midiStatus data1:(uint8_t)data1 data2:(uint8_t)data2;
+/**
+ Sends a MIDI event which contains one data byte to the instrument.
 
-/*! @method sendMIDIEvent:data1:
- @abstract sends a MIDI event which contains one data byte to the instrument.
- @param midiStatus
-    the STATUS value of the MIDI event
- @param data1
-    the first data byte of the MIDI event
- */
+ - Parameters:
+   - midiStatus: the STATUS value of the MIDI event
+   - data1: the first data byte of the MIDI event
+*/
 - (void)sendMIDIEvent:(uint8_t)midiStatus data1:(uint8_t)data1;
+/**
+ Sends a MIDI System Exclusive event to the instrument.
 
-/*! @method sendMIDISysExEvent:
- @abstract sends a MIDI System Exclusive event to the instrument.
- @param midiData
-    a NSData object containing the complete SysEx data including start(F0) and termination(F7) bytes.
- 
- */
+ - Parameters:
+   - midiData: a NSData object containing the complete SysEx data including start(F0) and termination(F7) bytes.
+*/
 - (void)sendMIDISysExEvent:(NSData *)midiData;
 
 #if AVAUDIOUNIT_HAVE_AUDIOUNIT
-/*! @method sendMIDIEventList:eventList:
- @abstract sends a MIDI event list to the instrument.
- @param eventList
-	the MIDIEventList
-  */
+/**
+Sends a MIDI event list to the instrument.
+
+ - Parameters:
+   - eventList: the MIDIEventList
+*/
 - (void)sendMIDIEventList:(const struct MIDIEventList *)eventList API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
 #endif
 
Clone this wiki locally