Skip to content

AVFAudio watchOS xcode15.0 b1

Manuel de la Pena edited this page Jan 5, 2024 · 2 revisions

#AVFAudio.framework ##mandel

diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h	1969-12-31 19:00:00
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioApplication.h	2023-05-19 19:53:34
@@ -0,0 +1,107 @@
+#if (defined(USE_AVFAUDIO_PUBLIC_HEADERS) && USE_AVFAUDIO_PUBLIC_HEADERS) || !__has_include(<AudioSession/AVAudioApplication.h>)
+/*!
+	@file		AVAudioApplication.h
+	@framework	AudioSession.framework
+	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
+*/
+
+#ifndef AVAudioApplication_h
+#define AVAudioApplication_h
+
+#import <AVFAudio/AVAudioSessionTypes.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+	@enum AVAudioApplicationRecordPermission
+	@brief	These are the values returned by recordPermission.
+	@var	AVAudioApplicationRecordPermissionUndetermined
+	The user has not yet been asked for permission.
+	@var	AVAudioApplicationRecordPermissionDenied
+	The user has been asked and has denied permission.
+	@var	AVAudioApplicationRecordPermissionGranted
+	The user has been asked and has granted permission.
+*/
+typedef NS_ENUM(NSInteger, AVAudioApplicationRecordPermission) {
+	AVAudioApplicationRecordPermissionUndetermined = 'undt',
+	AVAudioApplicationRecordPermissionDenied = 'deny',
+	AVAudioApplicationRecordPermissionGranted = 'grnt'
+} NS_SWIFT_NAME(AVAudioApplication.recordPermission);
+
+
+/*!
+	@brief	Notification sent to registered listeners when the application's input is muted
+			or unmuted.
+
+	Check the notification's userInfo dictionary for the mute state AVAudioApplicationInputMuteStateKey
+	which will have a boolean value 0 for unmuted or value 1 for muted.
+	@note this notification will only be dispatched for state changes when there is an active record session (i.e. record or playAndRecord category).
+		Setting the `inputMuted` state while the record session is not active is allowed and will be stored, but it will not trigger a notification for the
+		state change. When the record session subsequently goes active, the `inputMuted` state will be applied, and this notification will be dispatched
+		with the latest input muted state.
+*/
+OS_EXPORT NSNotificationName const AVAudioApplicationInputMuteStateChangeNotification API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) NS_SWIFT_NAME(AVAudioApplication.inputMuteStateChangeNotification);
+
+/// Keys for AVAudioApplicationInputMuteStateChangeNotification
+/// Value is NSNumber type with boolean value 0 for unmuted or value 1 for muted (samples zeroed out)
+OS_EXPORT NSString *const AVAudioApplicationMuteStateKey API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) NS_SWIFT_NAME(AVAudioApplication.muteStateKey);
+
+
+/// Class containing methods that relate to an application bundle's audio (i.e. a collection of one or more AVAudioSession instances)
+API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) 
+@interface AVAudioApplication : NSObject
+
+/// Returns the singleton instance
+@property (class, readonly) AVAudioApplication *sharedInstance NS_SWIFT_NAME(shared);
+
+/// @see `sharedInstance`
+- (instancetype)init NS_UNAVAILABLE;
+
+/// @brief Set the muted/unmuted state of the application's audio input. When set true, inputs
+/// (microphone etc.) of all audio clients relating to this application will have their samples zeroed out.
+/// @note - this is per-application input muting and doesn't affect the hardware mute state.
+- (BOOL)setInputMuted:(BOOL)muted error:(NSError**)outError API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+/// Get the input muted state - return value is boolean 0 for unmuted or value 1 for muted (input samples zeroed out)
+@property(readonly, nonatomic, getter=isInputMuted) BOOL inputMuted API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+
+
+/*!
+	@brief  Provide a block that implements your app's input (microphone) muting logic (macOS only). The block will be called
+			whenever the input mute state changes, either due to changing the `AVAudioApplication.inputMute` property on
+			this API, or due to a Bluetooth audio accessory gesture (certain AirPods / Beats headphones) changing the mute state.
+
+	@param	inputMuteHandler block that will be called upon every input mute state change. If the boolean `inputShouldBeMuted`
+			is true, your block should mute all input/microphone samples until the next time the handler is called. Your block should return
+			a value of YES if successful, or in exceptional cases return a NO value if the mute action was unsuccesful.
+			Since the input mute handling logic should happen a single place, subsequent calls to this method will overwrite any previously
+			registered block with the one provided. A nil value may be provided to cancel the block being called, e.g. at end of call lifecycle.
+
+	@note   This is available on macOS only - for all other platforms input muting will be handled internally. It is recommended only to
+			perform your input muting logic within this block, and to perform your UI updates for input mute state changes within the handler
+			for AVAudioApplicationInputMuteStateChangeNotification.
+ */
+- (BOOL)setInputMuteStateChangeHandler:(BOOL (^_Nullable)(BOOL inputShouldBeMuted))inputMuteHandler error:(NSError**)outError API_AVAILABLE(macos(14.0)) API_UNAVAILABLE(ios, watchos, tvos, macCatalyst);
+
+
+/// Returns an enum indicating whether the user has granted or denied permission to record, or has
+/// not been asked
+@property (readonly) AVAudioApplicationRecordPermission recordPermission API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+
+/*!
+	@brief Checks to see if calling process has permission to record audio.
+
+	The 'response' block will be called immediately if permission has already been granted or
+	denied.  Otherwise, it presents a dialog to notify the user and allow them to choose, and calls
+	the block once the UI has been dismissed.  'granted' indicates whether permission has been
+	granted. Note that the block may be called in a different thread context.
+*/
++ (void)requestRecordPermissionWithCompletionHandler:(void (^)(BOOL granted))response API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0));
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#endif /* AVAudioApplication_h */
+#else
+#include <AudioSession/AVAudioApplication.h>
+#endif
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h	2023-03-09 19:14:06
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h	2023-05-19 20:45:51
@@ -321,7 +321,10 @@
 		Prepare the engine for starting.
 
 	This method preallocates many of the resources the engine requires in order to start.
-	It can be used to be able to start more responsively.
+    Use it to responsively start audio input or output.
+ 
+    On AVAudioSession supported platforms, this method may cause the audio session to be implicitly activated. Activating the audio session (implicitly or explicitly) may cause other audio sessions to be interrupted or ducked depending on the session's configuration. It is recommended to configure and activate the app's audio session before preparing the engine.
+    See https://developer.apple.com/library/archive/documentation/Audio/Conceptual/AudioSessionProgrammingGuide/Introduction/Introduction.html for details.
 */
 - (void)prepare;
 
@@ -343,6 +346,8 @@
 	3. The driver failed to start the hardware.
 
 	In manual rendering mode, prepares the engine to render when requested by the client.
+ 
+    On AVAudioSession supported platforms, this method may cause the audio session to be implicitly activated. It is recommended to configure and activate the app's audio session before starting the engine. For more information, see the `prepare` method above.
 */
 - (BOOL)startAndReturnError:(NSError **)outError;
 
@@ -452,10 +457,9 @@
 	the input node, or create a recording tap on it.
 
 	When the engine is rendering to/from an audio device, the AVAudioSesssion category and/or
-	availability of hardware determine whether an app can perform input (e.g. input hardware is
-	not available on tvos). Check for the input node's input format (i.e. hardware format) for
-	non-zero sample rate and channel count to see if input is enabled.
-	Trying to perform input through the input node when it is not enabled or available will 
+	availability of hardware determine whether an app can perform input. Check for the input node's
+    input format (i.e. hardware format) for non-zero sample rate and channel count to see if input is enabled.
+	Trying to perform input through the input node when it is not enabled or available will
 	cause the engine to throw an error (when possible) or an exception.
 
 	In manual rendering mode, the input node can be used to synchronously supply data to
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h	2023-03-09 19:09:35
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioFormat.h	2023-05-19 21:00:36
@@ -7,7 +7,7 @@
 
 #import <AVFAudio/AVAudioChannelLayout.h>
 
-#if __has_include(<CoreMedia/CMFormatDescription.h>)
+#if __has_include(<CoreMedia/CMFormatDescription.h>) && !0
 #define AVAUDIOFORMAT_HAVE_CMFORMATDESCRIPTION 1
 #import <CoreMedia/CMFormatDescription.h>
 #endif
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h	2023-03-09 23:53:50
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioIONode.h	2023-05-19 21:00:37
@@ -40,7 +40,50 @@
 */
 typedef const AudioBufferList * __nullable (^AVAudioIONodeInputBlock)(AVAudioFrameCount inNumberOfFrames) API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
 
+/*!
+	@enum       AVAudioVoiceProcessingSpeechActivityEvent
+	@abstract	Types of speech activity events.
+	@constant	AVAudioVoiceProcessingSpeechActivityStarted
+				Speech activity has started.
+	@constant	AVAudioVoiceProcessingSpeechActivityEnded
+				Speech activity has ended.
+*/
+typedef NS_ENUM(NSInteger, AVAudioVoiceProcessingSpeechActivityEvent)
+{
+	AVAudioVoiceProcessingSpeechActivityStarted = 0,
+	AVAudioVoiceProcessingSpeechActivityEnded = 1
+} API_AVAILABLE(macos(14.0), ios(17.0), tvos(17.0)) API_UNAVAILABLE(watchos);
 
+/*!
+	@enum	AVAudioVoiceProcessingOtherAudioDuckingLevel
+	@abstract Ducking level applied to other (i.e. non-voice) audio by AVAudio voice processing AU.
+	@discussion
+			DuckingLevelDefault = Default ducking level to other audio for typical voice chat.
+			DuckingLevelMin = minimum ducking to other audio.
+			DuckingLevelMid = medium ducking to other audio.
+			DuckingLevelMax = maximum ducking to other audio.
+*/
+typedef NS_ENUM(NSInteger, AVAudioVoiceProcessingOtherAudioDuckingLevel) {
+	AVAudioVoiceProcessingOtherAudioDuckingLevelDefault = 0,
+	AVAudioVoiceProcessingOtherAudioDuckingLevelMin = 10,
+	AVAudioVoiceProcessingOtherAudioDuckingLevelMid = 20,
+	AVAudioVoiceProcessingOtherAudioDuckingLevelMax = 30
+} API_AVAILABLE(macos(14.0), ios(17.0)) API_UNAVAILABLE(tvos, watchos) NS_SWIFT_NAME(AVAudioVoiceProcessingOtherAudioDuckingConfiguration.Level);
+
+/*!
+	@struct          AVAudioVoiceProcessingOtherAudioDuckingConfiguration
+	@abstract        The configuration of ducking other (i.e. non-voice) audio
+
+	@var             enableAdvancedDucking
+						 Enables advanced ducking which ducks other audio based on the presence of voice activity from local and/or remote chat participants.
+	@var             duckingLevel
+						 Ducking level of other audio
+*/
+typedef struct API_AVAILABLE(macos(14.0), ios(17.0)) API_UNAVAILABLE(tvos, watchos) AVAudioVoiceProcessingOtherAudioDuckingConfiguration {
+	BOOL enableAdvancedDucking;
+	AVAudioVoiceProcessingOtherAudioDuckingLevel duckingLevel;
+} AVAudioVoiceProcessingOtherAudioDuckingConfiguration API_AVAILABLE(macos(14.0), ios(17.0)) API_UNAVAILABLE(tvos, watchos);
+
 /*!	@class AVAudioIONode
 	@abstract 
 		Base class for a node that performs audio input or output in the engine.
@@ -157,7 +200,7 @@
 
 /*! @property voiceProcessingBypassed
     @abstract
-       Bypass all processing done by the voice processing unit.
+       Bypass all processing for microphone uplink done by the voice processing unit.
     @discussion
        Querying this property when voice processing is disabled will return false.
  */
@@ -165,7 +208,7 @@
 
 /*! @property voiceProcessingAGCEnabled
     @abstract
-        Enable automatic gain control on the processed microphone/uplink
+        Enable automatic gain control on the processed microphone uplink.
         signal. Enabled by default.
     @discussion
         Querying this property when voice processing is disabled will return false.
@@ -177,8 +220,32 @@
         Mutes the input of the voice processing unit.
     @discussion
         Querying this property when voice processing is disabled will return false.
-    */
+*/
 @property (nonatomic, getter=isVoiceProcessingInputMuted) BOOL voiceProcessingInputMuted API_AVAILABLE(macos(10.15), ios(13.0), watchos(6.0), tvos(13.0));
+
+/*! @method setMutedSpeechActivityEventListener
+	@abstract
+		Register a listener to be notified when speech activity event occurs while the input is muted.
+	@param listenerBlock
+		The block the engine will call when speech activity event occurs while the input is muted.
+		Passing nil will remove an already set block.
+	@return
+		YES for success
+	@discussion
+		Continuous presence of or lack of speech activity during mute will not cause redundant notification.
+		In order to use this API, it's expected to implement the mute via the voiceProcessingInputMuted.
+*/
+- (BOOL)setMutedSpeechActivityEventListener:(nullable void (^)(AVAudioVoiceProcessingSpeechActivityEvent event))listenerBlock API_AVAILABLE(macos(14.0), ios(17.0), tvos(17.0)) API_UNAVAILABLE(watchos);
+
+/*! @property voiceProcessingOtherAudioDuckingConfiguration
+	@abstract
+		The configuration of ducking other (i.e. non-voice) audio
+	@discussion
+		Configures the ducking of other (i.e. non-voice) audio, including advanced ducking enablement and ducking level.
+		In general, when other audio is played during voice chat, applying a higher level of ducking could increase the intelligibility of the voice chat.
+		If not set, the default ducking configuration is to disable advanced ducking, with a ducking level set to AVAudioVoiceProcessingOtherAudioDuckingLevelDefault.
+*/
+@property (nonatomic) AVAudioVoiceProcessingOtherAudioDuckingConfiguration voiceProcessingOtherAudioDuckingConfiguration API_AVAILABLE(macos(10.14), ios(17.0)) API_UNAVAILABLE(watchos, tvos);
 
 @end
 
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRecorder.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRecorder.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRecorder.h	2023-03-09 19:22:06
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRecorder.h	2023-05-19 21:00:36
@@ -3,7 +3,7 @@
 	
 	Framework:  AVFoundation
 
-	Copyright 2008-2016 Apple Inc. All rights reserved.
+	Copyright 2008-2023 Apple Inc. All rights reserved.
 */
 
 #import <AVFAudio/AVAudioFormat.h>
@@ -15,95 +15,225 @@
 @protocol AVAudioRecorderDelegate;
 @class NSURL, NSError;
 
-
-API_AVAILABLE(macos(10.7), ios(3.0), watchos(4.0)) API_UNAVAILABLE(tvos)
+/*!
+    @class AVAudioRecorder
+    @abstract An object that records audio data to a file.
+ */
+API_AVAILABLE(macos(10.7), ios(3.0), watchos(4.0), tvos(17.0))
 @interface AVAudioRecorder : NSObject {
 @private
     void *_impl;
 }
 
-
-/* The file type to create can be set through the corresponding settings key. If not set, it will be inferred from the file extension. Will overwrite a file at the specified url if a file exists. */
+/*!
+    @method initWithURL:settings:error:
+    @abstract Init the AudioRecorder with a specified url and settings.
+    @discussion The file type to create can be set through the corresponding settings key. If not set, it will be inferred from the file extension. Will overwrite a file at the specified url if a file exists.
+ */
 - (nullable instancetype)initWithURL:(NSURL *)url settings:(NSDictionary<NSString *, id> *)settings error:(NSError **)outError;
 
-/* The file type to create can be set through the corresponding settings key. If not set, it will be inferred from the file extension. Will overwrite a file at the specified url if a file exists. */
-- (nullable instancetype)initWithURL:(NSURL *)url format:(AVAudioFormat *)format error:(NSError **)outError API_AVAILABLE(macos(10.12), ios(10.0), watchos(4.0)) API_UNAVAILABLE(tvos);
+/*!
+    @method initWithURL:format:error:
+    @abstract Init the AudioRecorder with a specified url and format.
+    @discussion The file type to create can be set through the corresponding settings key. If not set, it will be inferred from the file extension. Will overwrite a file at the specified url if a file exists.
+ */
+- (nullable instancetype)initWithURL:(NSURL *)url format:(AVAudioFormat *)format error:(NSError **)outError API_AVAILABLE(macos(10.12), ios(10.0), watchos(4.0), tvos(17.0));
 
 /* transport control */
 /* methods that return BOOL return YES on success and NO on failure. */
-- (BOOL)prepareToRecord; /* creates the file and gets ready to record. happens automatically on record. */
-- (BOOL)record; /* start or resume recording to file. */
-- (BOOL)recordAtTime:(NSTimeInterval)time API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0)) API_UNAVAILABLE(tvos); /* start recording at specified time in the future. time is an absolute time based on and greater than deviceCurrentTime. */
-- (BOOL)recordForDuration:(NSTimeInterval) duration; /* record a file of a specified duration. the recorder will stop when it has recorded this length of audio */
-- (BOOL)recordAtTime:(NSTimeInterval)time forDuration:(NSTimeInterval) duration API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0)) API_UNAVAILABLE(tvos); /* record a file of a specified duration starting at specified time. time is an absolute time based on and greater than deviceCurrentTime. */
-- (void)pause; /* pause recording */
-- (void)stop; /* stops recording. closes the file. */
 
-- (BOOL)deleteRecording; /* delete the recorded file. recorder must be stopped. returns NO on failure. */
+/*!
+    @method prepareToRecord
+    @abstract Creates the output file and gets ready to record.
+    @discussion This method is called automatically on record. Returns YES on success and NO on failure.
+ */
+- (BOOL)prepareToRecord;
 
+/*!
+    @method record
+    @abstract Start or resume recording to file.
+    @discussion Returns YES on success and NO on failure.
+ */
+- (BOOL)record;
+
+/*!
+    @method recordAtTime:
+    @abstract Start recording at specified time in the future.
+    @discussion Time is an absolute time based on and greater than deviceCurrentTime. Returns YES on success and NO on failure.
+ */
+- (BOOL)recordAtTime:(NSTimeInterval)time API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0), tvos(17.0));
+
+/*!
+    @method recordForDuration:
+    @abstract Record for a specified duration.
+    @discussion The recorder will stop when it has recorded this length of audio. Returns YES on success and NO on failure.
+ */
+- (BOOL)recordForDuration:(NSTimeInterval) duration;
+
+/*!
+    @method recordAtTime:forDuration:
+    @abstract Record for a specified duration at a specified time in the future.
+    @discussion Time is an absolute time based on and greater than deviceCurrentTime. Returns YES on success and NO on failure.
+ */
+- (BOOL)recordAtTime:(NSTimeInterval)time forDuration:(NSTimeInterval) duration API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0), tvos(17.0));
+
+/*!
+    @method pause
+    @abstract Pause recording.
+ */
+- (void)pause;
+
+/*!
+    @method stop
+    @abstract Stop recording.
+    @discussion This method also closes the output file.
+ */
+- (void)stop;
+
+/*!
+    @method deleteRecording
+    @abstract Delete the recorded file.
+    @discussion AudioRecorder must be stopped. Returns YES on success and NO on failure.
+ */
+- (BOOL)deleteRecording;
+
 /* properties */
 
-@property(readonly, getter=isRecording) BOOL recording; /* is it recording or not? */
+/*!
+    @property recording
+    @abstract Returns YES if the AudioRecorder is currently recording.
+ */
+@property(readonly, getter=isRecording) BOOL recording;
 
-@property(readonly) NSURL *url; /* URL of the recorded file */
+/*!
+    @property url
+    @abstract URL of the recorded file.
+ */
+@property(readonly) NSURL *url;
 
-/* these settings are fully valid only when prepareToRecord has been called */
+/*!
+    @property settings
+    @abstract A dictionary of settings for the AudioRecorder.
+    @discussion These settings are fully valid only when prepareToRecord has been called. For supported key-value pairs, see https://developer.apple.com/documentation/avfaudio/avaudiorecorder/1388386-initwithurl?language=objc
+ */
 @property(readonly) NSDictionary<NSString *, id> *settings;
 
-/* this object is fully valid only when prepareToRecord has been called */
-@property(readonly) AVAudioFormat *format API_AVAILABLE(macos(10.12), ios(10.0), watchos(4.0)) API_UNAVAILABLE(tvos);
+/*!
+    @property format
+    @abstract The audio format of the AudioRecorder.
+    @discussion This property is fully valid only when prepareToRecord has been called.
+ */
+@property(readonly) AVAudioFormat *format API_AVAILABLE(macos(10.12), ios(10.0), watchos(4.0), tvos(17.0));
 
-/* the delegate will be sent messages from the AVAudioRecorderDelegate protocol */ 
+/*!
+    @property delegate
+    @abstract A delegate object to the AudioRecorder that conforms to the AVAudioRecorderDelegate protocol.
+ */
 @property(weak, nullable) id<AVAudioRecorderDelegate> delegate;
 
-/* get the current time of the recording - only valid while recording */
+/*!
+    @property currentTime
+    @abstract Get the current time of the recording.
+    @discussion This method is only vaild while recording.
+ */
 @property(readonly) NSTimeInterval currentTime;
-/* get the device current time - always valid */
-@property(readonly) NSTimeInterval deviceCurrentTime API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0)) API_UNAVAILABLE(tvos);
 
+/*!
+    @property deviceCurrentTime
+    @abstract Get the device current time.
+    @discussion This method is always valid.
+ */
+@property(readonly) NSTimeInterval deviceCurrentTime API_AVAILABLE(macos(10.9), ios(6.0), watchos(4.0), tvos(17.0));
+
 /* metering */
 
-@property(getter=isMeteringEnabled) BOOL meteringEnabled; /* turns level metering on or off. default is off. */
+/*!
+    @property meteringEnabled
+    @abstract Turns level metering on or off.
+    @discussion Default is off.
+ */
+@property(getter=isMeteringEnabled) BOOL meteringEnabled;
 
-- (void)updateMeters; /* call to refresh meter values */
+/*!
+    @method updateMeters
+    @abstract Call this method to refresh meter values.
+ */
+- (void)updateMeters;
 
-- (float)peakPowerForChannel:(NSUInteger)channelNumber; /* returns peak power in decibels for a given channel */
-- (float)averagePowerForChannel:(NSUInteger)channelNumber; /* returns average power in decibels for a given channel */
+/*!
+    @method peakPowerForChannel:
+    @abstract Returns peak power in decibels for a given channel.
+ */
+- (float)peakPowerForChannel:(NSUInteger)channelNumber;
 
-/* The channels property lets you assign the output to record specific channels as described by AVAudioSession's channels property */
-/* This property is nil valued until set. */
-/* The array must have the same number of channels as returned by the numberOfChannels property. */
-@property(nonatomic, copy, nullable) NSArray<AVAudioSessionChannelDescription *> *channelAssignments API_AVAILABLE(ios(7.0), watchos(4.0)) API_UNAVAILABLE(macos, tvos) ; /* Array of AVAudioSessionChannelDescription objects */
+/*!
+    @method averagePowerForChannel:
+    @abstract Returns average power in decibels for a given channel.
+ */
+- (float)averagePowerForChannel:(NSUInteger)channelNumber;
 
+/*!
+    @property channelAssignments
+    @abstract Array of AVAudioSessionChannelDescription objects
+    @discussion The channels property lets you assign the output to record specific channels as described by AVAudioSessionPortDescription's channels property. This property is nil valued until set. The array must have the same number of channels as returned by the numberOfChannels property.
+ */
+@property(nonatomic, copy, nullable) NSArray<AVAudioSessionChannelDescription *> *channelAssignments API_AVAILABLE(ios(7.0), watchos(4.0), tvos(17.0)) API_UNAVAILABLE(macos) ;
+
 @end
 
-
-/* A protocol for delegates of AVAudioRecorder */
-API_AVAILABLE(macos(10.7), ios(3.0), watchos(4.0)) API_UNAVAILABLE(tvos)
+/*!
+    @protocol AVAudioRecorderDelegate
+    @abstract A protocol for delegates of AVAudioRecorder.
+ */
+API_AVAILABLE(macos(10.7), ios(3.0), watchos(4.0), tvos(17.0))
 @protocol AVAudioRecorderDelegate <NSObject>
 @optional 
 
-/* audioRecorderDidFinishRecording:successfully: is called when a recording has been finished or stopped. This method is NOT called if the recorder is stopped due to an interruption. */
+/*!
+    @method audioRecorderDidFinishRecording:successfully:
+    @abstract This callback method is called when a recording has been finished or stopped.
+    @discussion This method is NOT called if the recorder is stopped due to an interruption.
+ */
 - (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag;
 
-/* if an error occurs while encoding it will be reported to the delegate. */
+/*!
+    @method audioRecorderEncodeErrorDidOccur:error:
+    @abstract This callback method is called when an error occurs while encoding.
+    @discussion If an error occurs while encoding it will be reported to the delegate.
+ */
 - (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder error:(NSError * __nullable)error;
 
 #if TARGET_OS_IPHONE
 
 /* AVAudioRecorder INTERRUPTION NOTIFICATIONS ARE DEPRECATED - Use AVAudioSession instead. */
 
-/* audioRecorderBeginInterruption: is called when the audio session has been interrupted while the recorder was recording. The recorded file will be closed. */
-- (void)audioRecorderBeginInterruption:(AVAudioRecorder *)recorder NS_DEPRECATED_IOS(2_2, 8_0);
+/*!
+    @method audioRecorderBeginInterruption:
+    @abstract audioRecorderBeginInterruption: is called when the audio session has been interrupted while the recorder was recording. The recorded file will be closed.
+    @discussion Deprecated - use AVAudioSession instead.
+ */
+- (void)audioRecorderBeginInterruption:(AVAudioRecorder *)recorder API_UNAVAILABLE(tvos) API_DEPRECATED("Deprecated - use AVAudioSession instead", ios(2.2, 8.0));
 
-/* audioRecorderEndInterruption:withOptions: is called when the audio session interruption has ended and this recorder had been interrupted while recording. */
-/* Currently the only flag is AVAudioSessionInterruptionFlags_ShouldResume. */
-- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withOptions:(NSUInteger)flags NS_DEPRECATED_IOS(6_0, 8_0);
+/*!
+    @method audioRecorderEndInterruption:withOptions:
+    @abstract audioRecorderEndInterruption:withOptions: is called when the audio session interruption has ended and this recorder had been interrupted while recording.
+    @discussion Currently the only flag is AVAudioSessionInterruptionFlags_ShouldResume.  Deprecated - use AVAudioSession instead.
+ */
+- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withOptions:(NSUInteger)flags API_UNAVAILABLE(tvos) API_DEPRECATED("Deprecated - use AVAudioSession instead", ios(6.0, 8.0));
 
-- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withFlags:(NSUInteger)flags NS_DEPRECATED_IOS(4_0, 6_0);
+/*!
+    @method audioRecorderEndInterruption:withFlags:
+    @abstract audioRecorderEndInterruption:withFlags: is called when the audio session interruption has ended and this recorder had been interrupted while recording.
+    @discussion Deprecated - use AVAudioSession instead.
+ */
+- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withFlags:(NSUInteger)flags API_UNAVAILABLE(tvos) API_DEPRECATED("Deprecated - use AVAudioSession instead", ios(4.0, 6.0));
 
-/* audioRecorderEndInterruption: is called when the preferred method, audioRecorderEndInterruption:withFlags:, is not implemented. */
-- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder NS_DEPRECATED_IOS(2_2, 6_0);
+/*!
+    @method audioRecorderEndInterruption:
+    @abstract audioRecorderEndInterruption: is called when the preferred method, audioRecorderEndInterruption:withFlags:, is not implemented.
+    @discussion Deprecated - use AVAudioSession instead.
+ */
+- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder API_UNAVAILABLE(tvos) API_DEPRECATED("Deprecated - use AVAudioSession instead", ios(2.2, 6.0));
 
 #endif // TARGET_OS_IPHONE
 
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h	2023-03-09 23:50:42
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h	2023-05-19 21:00:36
@@ -56,6 +56,7 @@
 	return r;
 }
 
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
 typedef NSString *AVAudioSequencerInfoDictionaryKey NS_TYPED_ENUM;
 
 AVAS_EXPORT
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2023-03-09 19:09:36
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2023-05-26 21:31:29
@@ -2,7 +2,7 @@
 /*!
 	@file		AVAudioSession.h
 	@framework	AudioSession.framework
-	@copyright	(c) 2009-2020 Apple Inc. All rights reserved.
+	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
 */
 
 #ifndef AudioSession_AVAudioSession_h
@@ -116,9 +116,8 @@
 
 /// Returns an enum indicating whether the user has granted or denied permission to record, or has
 /// not been asked
-@property (readonly) AVAudioSessionRecordPermission recordPermission API_AVAILABLE(ios(8.0), watchos(4.0)) API_UNAVAILABLE(macos, tvos);
+@property (readonly) AVAudioSessionRecordPermission recordPermission API_DEPRECATED("Please use AVAudioApplication recordPermission", ios(8.0, 17.0), watchos(4.0, 10.0)) API_UNAVAILABLE(macos, tvos);
 
-
 /*!
  	@brief Checks to see if calling process has permission to record audio.
  
@@ -127,7 +126,7 @@
 	the block once the UI has been dismissed.  'granted' indicates whether permission has been
 	granted. Note that the block may be called in a different thread context.
 */
-- (void)requestRecordPermission:(void (^)(BOOL granted))response API_AVAILABLE(ios(7.0), watchos(4.0)) API_UNAVAILABLE(macos, tvos);
+- (void)requestRecordPermission:(void (^)(BOOL granted))response API_DEPRECATED("Please use AVAudioApplication requestRecordPermissionWithCompletionHandler", ios(7.0, 17.0), watchos(4.0, 10.0)) API_UNAVAILABLE(macos, tvos);
 
 /*!
     @brief Use this method to temporarily override the output to built-in speaker.
@@ -283,7 +282,7 @@
 /// input gain, so check this value before attempting to set input gain.
 @property (readonly, getter=isInputGainSettable) BOOL inputGainSettable API_AVAILABLE(ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos, macos);
 
-/// True if input hardware is available.
+/// True if input hardware is available. Key-value observable.
 @property (readonly, getter=isInputAvailable) BOOL inputAvailable API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
@@ -420,11 +419,128 @@
 
 @property(readonly) BOOL supportsMultichannelContent API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
 
+/*!
+    @brief Use this method to opt in or opt out of interruption on route disconnect policy.
+ 
+    As described in the Audio Session Programming Guide, most media playback apps are expected
+    to pause playback if the route change reason is AVAudioSessionRouteChangeReasonOldDeviceUnavailable.
+ 
+    Starting in iOS 17, by default Now Playing sessions will be interrupted if they are active
+    when a route change occurs because of a disconnect event. All other sessions will not be
+    interrupted due to a disconnect event.
+*/
+- (BOOL) setPrefersInterruptionOnRouteDisconnect:(BOOL)inValue error:(NSError **)outError API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0)) API_UNAVAILABLE(macos);
+
+/// Indicates if session will be interrupted on route disconnect.
+@property(readonly) BOOL prefersInterruptionOnRouteDisconnect API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0)) API_UNAVAILABLE(macos);
+
 @end // interface for AVAudioSession (RoutingConfiguration)
 
+#if defined(TARGET_OS_XR) && TARGET_OS_XR
+/*!
+ The perceived "size" or "immersivity" of the sound. Use Small for least
+ immersive and Large for most immersive.
+ */
+typedef NS_ENUM(NSInteger, AVAudioSessionSoundStageSize) {
+    /// The audio session determines its own sound stage size based on
+    /// a handful of factors
+    AVAudioSessionSoundStageSizeAutomatic = 0,
+    
+    /// A smaller, front-focused sound stage
+    AVAudioSessionSoundStageSizeSmall     = 1,
+    
+    /// A medium-immersive sound stage
+    AVAudioSessionSoundStageSizeMedium    = 2,
+    
+    /// A fully-immersive sound stage
+    AVAudioSessionSoundStageSizeLarge     = 3,
+} NS_SWIFT_NAME(AVAudioSession.SoundStageSize);
 
+/*!
+ When the intended spatial experience is HeadTracked, the anchoring strategy
+ provides additional information about the reference point for spatialization.
+ */
+typedef NS_ENUM(NSInteger, AVAudioSessionAnchoringStrategy) {
+    /// The audio session determines its own anchoring strategy based on
+    /// a handful of factors
+    AVAudioSessionAnchoringStrategyAutomatic   = 0,
+    
+    /// The session is anchored to the developer-provided scene
+    /// identifier (i.e. UIScene.session.persistentIdentifier)
+    AVAudioSessionAnchoringStrategyScene       = 1,
+    
+    /// The session is anchored to the user's concept of "front"
+    /// which the user can move with an intentional gesture.
+    AVAudioSessionAnchoringStrategyFront       = 2
+} NS_REFINED_FOR_SWIFT;
 
+typedef NS_ENUM(NSInteger, AVAudioSessionSpatialExperience) {
+    /// A fully head-tracked spatial experience parameterized by
+    /// a sound stage size and anchoring strategy
+    AVAudioSessionSpatialExperienceHeadTracked = 0,
+    
+    /// An unanchored, non-head-tracked spatial experience parameterized
+    /// by a sound stage size
+    AVAudioSessionSpatialExperienceFixed       = 1,
+    
+    /// An experience that bypasses any system-provided spatialization and
+    /// instead mixes the application's sound straight to the output
+    AVAudioSessionSpatialExperienceBypassed  = 2,
+} NS_REFINED_FOR_SWIFT;
 
+typedef NSString * const AVAudioSessionSpatialExperienceOption NS_TYPED_ENUM NS_REFINED_FOR_SWIFT;
+
+/// Associated value is NSNumber with AVAudioSessionSoundStageSize. Only used if
+/// SpatialExperience is HeadTracked or Fixed. If not provided for
+/// those SpatialExperiences, the default will be
+/// AVAudioSessionSoundStageSizeAutomatic.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSoundStageSize API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Associated value is NSNumber with AVAudioSessionAnchoringStrategy. Only used if
+/// SpatialExperience is HeadTracked. If not provided for a head-tracked
+/// spatial experience, the default will be
+/// AVAudioSessionAnchoringStrategyAutomatic.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionAnchoringStrategy API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Associated value is NSString from UIScene.session.persistentIdentifier. Only
+/// used if SpatialExperience is HeadTracked and AnchoringStrategy is
+/// Scene. If not provided for a scene-anchored spatial experience, the
+/// session will fail to set the intended spatial experience and
+/// return an error.
+OS_EXPORT AVAudioSessionSpatialExperienceOption AVAudioSessionSpatialExperienceOptionSceneIdentifier API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+
+@interface AVAudioSession (SpatialPreference)
+
+
+///Configure the developer's intended spatial experience for this audio session.
+- (BOOL)setIntendedSpatialExperience:(AVAudioSessionSpatialExperience)intendedSpatialExperience
+                             options:(nullable NSDictionary<AVAudioSessionSpatialExperienceOption, id>*)options
+                               error:(NSError**)error NS_REFINED_FOR_SWIFT API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Get the developer's intended spatial experience
+@property (readonly, nonatomic) AVAudioSessionSpatialExperience intendedSpatialExperience NS_REFINED_FOR_SWIFT API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+/// Get the developer's intended spatial experience options. May be nil if the
+/// experience is NonSpatial.
+@property (readonly, nonatomic, nullable) NSDictionary<AVAudioSessionSpatialExperienceOption, id>* intendedSpatialExperienceOptions NS_REFINED_FOR_SWIFT API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+@end // AVAudioSession (SpatialPreference)
+
+@interface AVAudioSession (NowPlayingCandidacy)
+
+/// Get/set whether this session is a now-playing candidate. Now playing
+/// candidacy must be exclusive for an audio session to be eligible - that is,
+/// if multiple audio sessions from the same application are now-playing
+/// candidates, none of them are eligible.
+@property(readonly) BOOL isNowPlayingCandidate API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+- (BOOL)setIsNowPlayingCandidate:(BOOL)inValue error:(NSError **)outError API_AVAILABLE(xros(1.0)) API_UNAVAILABLE(ios, watchos, tvos, macos);
+
+@end // AVAudioSession (NowPlayingCandidacy)
+
+#endif // TARGET_OS_XR
+
+
 #pragma mark-- Names for NSNotifications --
 
 /*!
@@ -437,7 +553,7 @@
     In the case of a begin interruption notification, the reason for the interruption can be found
     within the info dictionary under the key AVAudioSessionInterruptionReasonKey.
 */
-OS_EXPORT NSNotificationName const  AVAudioSessionInterruptionNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSNotificationName const  AVAudioSessionInterruptionNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
 	@brief	Notification sent to registered listeners when an audio route change has occurred.
@@ -445,7 +561,7 @@
 	Check the notification's userInfo dictionary for the route change reason and for a description
 	of the previous audio route.
 */
-OS_EXPORT NSNotificationName const  AVAudioSessionRouteChangeNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSNotificationName const  AVAudioSessionRouteChangeNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
 	@brief	Notification sent to registered listeners if the media server is killed.
@@ -453,7 +569,7 @@
 	In the event that the server is killed, take appropriate steps to handle requests that come in
 	before the server resets.  See Technical Q&A QA1749.
 */
-OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereLostNotification API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereLostNotification API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
 	@brief	Notification sent to registered listeners when the media server restarts.
@@ -461,7 +577,7 @@
 	In the event that the server restarts, take appropriate steps to re-initialize any audio objects
 	used by your application.  See Technical Q&A QA1749.
 */
-OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereResetNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSNotificationName const  AVAudioSessionMediaServicesWereResetNotification API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
 	@brief	Notification sent to registered listeners when they are in the foreground with an active
@@ -472,7 +588,7 @@
 	is secondary to the functionality of the application. For more information, see the related
 	property secondaryAudioShouldBeSilencedHint.
 */
-OS_EXPORT NSNotificationName const  AVAudioSessionSilenceSecondaryAudioHintNotification API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSNotificationName const  AVAudioSessionSilenceSecondaryAudioHintNotification API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*!
     @brief  Notification sent to registered listeners when spatial playback capabilities are changed due to a
@@ -496,10 +612,10 @@
 
 /// keys for AVAudioSessionInterruptionNotification
 /// Value is an NSNumber representing an AVAudioSessionInterruptionType
-OS_EXPORT NSString *const AVAudioSessionInterruptionTypeKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSString *const AVAudioSessionInterruptionTypeKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /// Only present for end interruption events.  Value is of type AVAudioSessionInterruptionOptions.
-OS_EXPORT NSString *const AVAudioSessionInterruptionOptionKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSString *const AVAudioSessionInterruptionOptionKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /// Only present in begin interruption events. Value is of type AVAudioSessionInterruptionReason.
 OS_EXPORT NSString *const AVAudioSessionInterruptionReasonKey API_AVAILABLE(ios(14.5), watchos(7.3)) API_UNAVAILABLE(tvos, macos);
@@ -521,13 +637,13 @@
 
 /// keys for AVAudioSessionRouteChangeNotification
 /// value is an NSNumber representing an AVAudioSessionRouteChangeReason
-OS_EXPORT NSString *const AVAudioSessionRouteChangeReasonKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSString *const AVAudioSessionRouteChangeReasonKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 /// value is AVAudioSessionRouteDescription *
-OS_EXPORT NSString *const AVAudioSessionRouteChangePreviousRouteKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSString *const AVAudioSessionRouteChangePreviousRouteKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /// keys for AVAudioSessionSilenceSecondaryAudioHintNotification
 /// value is an NSNumber representing an AVAudioSessionSilenceSecondaryAudioHintType
-OS_EXPORT NSString *const AVAudioSessionSilenceSecondaryAudioHintTypeKey API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0));
+OS_EXPORT NSString *const AVAudioSessionSilenceSecondaryAudioHintTypeKey API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 NS_ASSUME_NONNULL_END
 
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h	2023-03-09 19:17:20
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionTypes.h	2023-05-26 21:30:38
@@ -2,7 +2,7 @@
 /*!
 	@file		AVAudioSessionTypes.h
 	@framework	AudioSession.framework
-	@copyright	(c) 2009-2020 Apple Inc. All rights reserved.
+	@copyright	(c) 2009-2023 Apple Inc. All rights reserved.
 */
 
 #ifndef AudioSession_AVAudioSessionTypes_h
@@ -19,6 +19,8 @@
 typedef NSString *AVAudioSessionPort NS_STRING_ENUM;
 
 /* input port types */
+/// Continuity microphone for appletv.
+OS_EXPORT AVAudioSessionPort const AVAudioSessionPortContinuityMicrophone API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0)) API_UNAVAILABLE(macos);
 /// Line level input on a dock connector
 OS_EXPORT AVAudioSessionPort const AVAudioSessionPortLineIn      		API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 /// Built-in microphone on an iOS device
@@ -105,7 +107,7 @@
 
 /*! Use this category when using a hardware codec or signal processor while
  not playing or recording audio. */
-OS_EXPORT AVAudioSessionCategory const AVAudioSessionCategoryAudioProcessing API_DEPRECATED("No longer supported", ios(3.0, 10.0)) API_UNAVAILABLE(watchos, tvos) API_UNAVAILABLE(macos);
+OS_EXPORT AVAudioSessionCategory const AVAudioSessionCategoryAudioProcessing API_DEPRECATED("No longer supported", ios(3.0, 10.0)) API_UNAVAILABLE(watchos, tvos, macos);
 
 /*! Use this category to customize the usage of available audio accessories and built-in audio hardware.
  For example, this category provides an application with the ability to use an available USB output
@@ -150,7 +152,8 @@
 OS_EXPORT AVAudioSessionMode const AVAudioSessionModeVideoRecording API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*! Appropriate for applications that wish to minimize the effect of system-supplied signal
- processing for input and/or output audio signals. */
+ processing for input and/or output audio signals.
+ This mode disables some dynamics processing on input and output resulting in a lower output playback level. */
 OS_EXPORT AVAudioSessionMode const AVAudioSessionModeMeasurement API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /*! Engages appropriate output signal processing for movie playback scenarios.  Currently
@@ -412,12 +415,19 @@
     @var   AVAudioSessionInterruptionReasonBuiltInMicMuted
         The audio session was interrupted due to the built-in mic being muted e.g. due to an iPad's Smart Folio being closed.
  
+    @var   AVAudioSessionInterruptionReasonRouteDisconnected
+        The audio session was interrupted due to route getting disconnected.
  */
 typedef NS_ENUM(NSUInteger, AVAudioSessionInterruptionReason) {
     AVAudioSessionInterruptionReasonDefault         = 0,
     AVAudioSessionInterruptionReasonAppWasSuspended API_DEPRECATED("wasSuspended reason no longer present", ios(14.5, 16.0)) = 1,
     AVAudioSessionInterruptionReasonBuiltInMicMuted = 2,
-
+#if defined(TARGET_OS_XR) && TARGET_OS_XR
+    ///The audio session was interrupted because its UIScene was backgrounded
+    AVAudioSessionInterruptionReasonSceneWasBackgrounded = 3,
+#endif // TARGET_OS_XR
+    ///The audio session was interrupted because route was disconnected.
+    AVAudioSessionInterruptionReasonRouteDisconnected API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0)) API_UNAVAILABLE(macos) = 4
 } NS_SWIFT_NAME(AVAudioSession.InterruptionReason);
 
 ///  options for use when calling setActive:withOptions:error:
@@ -591,9 +601,9 @@
 	Introduced: ios(8.0), watchos(4.0)
 */
 typedef NS_ENUM(NSUInteger, AVAudioSessionRecordPermission) {
-	AVAudioSessionRecordPermissionUndetermined = 'undt',
-	AVAudioSessionRecordPermissionDenied = 'deny',
-	AVAudioSessionRecordPermissionGranted = 'grnt'
+	AVAudioSessionRecordPermissionUndetermined API_DEPRECATED_WITH_REPLACEMENT("AVAudioApplicationRecordPermissionUndetermined", ios(8.0, 17.0), watchos(4.0, 10.0)) API_UNAVAILABLE(macos, tvos) = 'undt',
+	AVAudioSessionRecordPermissionDenied API_DEPRECATED_WITH_REPLACEMENT("AVAudioApplicationRecordPermissionDenied", ios(8.0, 17.0), watchos(4.0, 10.0)) API_UNAVAILABLE(macos, tvos) = 'deny',
+	AVAudioSessionRecordPermissionGranted API_DEPRECATED_WITH_REPLACEMENT("AVAudioApplicationRecordPermissionGranted", ios(8.0, 17.0), watchos(4.0, 10.0)) API_UNAVAILABLE(macos, tvos) = 'grnt'
 };
 
 #endif // AudioSession_AVAudioSessionTypes_h
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2023-03-04 17:33:27
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2023-05-19 20:45:50
@@ -47,9 +47,7 @@
 #if !0
 #import <AVFAudio/AVSpeechSynthesis.h>
 #import <AVFAudio/AVSpeechSynthesisProvider.h>
-#endif
-
-#if TARGET_OS_IPHONE && !0
+#import <AVFAudio/AVAudioApplication.h>
 #import <AVFAudio/AVAudioSession.h>
 #import <AVFAudio/AVAudioSessionRoute.h>
 #import <AVFAudio/AVAudioSessionTypes.h>
diff -ruN /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h
--- /Applications/Xcode_14.3.1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h	2023-03-09 19:14:09
+++ /Applications/Xcode_15.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h	2023-05-19 20:52:42
@@ -37,7 +37,8 @@
     AVSpeechSynthesisMarkerMarkPhoneme,
     AVSpeechSynthesisMarkerMarkWord,
     AVSpeechSynthesisMarkerMarkSentence,
-    AVSpeechSynthesisMarkerMarkParagraph
+    AVSpeechSynthesisMarkerMarkParagraph,
+    AVSpeechSynthesisMarkerMarkBookmark
 } NS_ENUM_AVAILABLE(13_0, 16_0) NS_SWIFT_NAME(AVSpeechSynthesisMarker.Mark);
 
 
@@ -55,7 +56,46 @@
 typedef void (^AVSpeechSynthesizerBufferCallback)(AVAudioBuffer *buffer) NS_SWIFT_NAME(AVSpeechSynthesizer.BufferCallback);
 typedef void (^AVSpeechSynthesizerMarkerCallback)(NSArray<AVSpeechSynthesisMarker *> *markers) NS_SWIFT_NAME(AVSpeechSynthesizer.MarkerCallback) API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0));
 
+typedef NS_ENUM(NSUInteger, AVSpeechSynthesisPersonalVoiceAuthorizationStatus) {
+    /**
+     The app's authorization status has not yet been determined.
 
+     When your app's status is not determined, calling the requestAuthorization: method prompts the user to grant or deny authorization.
+     */
+    AVSpeechSynthesisPersonalVoiceAuthorizationStatusNotDetermined,
+    /**
+     The user denied your app's request to use personal voices.
+     */
+    AVSpeechSynthesisPersonalVoiceAuthorizationStatusDenied,
+    /**
+     Personal voices are unsupported on this device.
+     */
+    AVSpeechSynthesisPersonalVoiceAuthorizationStatusUnsupported,
+    /**
+     The user granted your app's request to use personal voices.
+     */
+    AVSpeechSynthesisPersonalVoiceAuthorizationStatusAuthorized,
+} NS_SWIFT_NAME(AVSpeechSynthesizer.PersonalVoiceAuthorizationStatus) API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
+typedef NS_OPTIONS(NSUInteger, AVSpeechSynthesisVoiceTraits)
+{
+    AVSpeechSynthesisVoiceTraitNone             = 0,
+    /**
+     The voice is generally for novelty purposes, for example a character's voice in a game.
+     */
+    AVSpeechSynthesisVoiceTraitIsNoveltyVoice   = 1 << 0,
+    /**
+     The voice is was generated by, and belongs to the user. Voices with this trait will only be avilable when AVSpeechSynthesizer.personalVoiceAuthorizationStatus is AVSpeechSynthesisPersonalVoiceAuthorizationStatusAuthorized
+     */
+    AVSpeechSynthesisVoiceTraitIsPersonalVoice  = 1 << 1,
+} NS_SWIFT_NAME(AVSpeechSynthesisVoice.Traits) API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
+/**
+ Posted when available voices for speech synthesis on the system have changed. For example, if new 3rd party voices are available through a downloaded app, or if a new personal voice is available and the app is authorized to access personal voices.
+ */
+NS_SWIFT_NAME(AVSpeechSynthesizer.availableVoicesDidChangeNotification)
+extern NSNotificationName AVSpeechSynthesisAvailableVoicesDidChangeNotification API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
 @protocol AVSpeechSynthesizerDelegate;
 
 /*!
@@ -109,6 +149,8 @@
 // The AVAudioCommonFormat and interleaved properties can be determined by properties within the settings dictionary.
 @property(nonatomic, readonly) NSDictionary<NSString *, id> *audioFileSettings API_AVAILABLE(ios(13.0), watchos(6.0), tvos(13.0), macos(10.15));
 
+@property(nonatomic, readonly) AVSpeechSynthesisVoiceTraits voiceTraits API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
 @end
 
 /*!
@@ -221,6 +263,26 @@
 // If there's no active call, setting this property has no effect.
 @property(nonatomic, assign) BOOL mixToTelephonyUplink API_AVAILABLE(ios(13.0), watchos(6.0)) API_UNAVAILABLE(tvos, macos);
 
+/**
+ Asks the user to allow your app to use personal voices for speech synthesis
+ 
+ Call this method before performing any other tasks associated with speech synthesis using personal voices. This method executes asynchronously, returning shortly after you call it. At some point later, the system calls the provided handler block with the results.
+ 
+ When your app's authorization status is PersonalVoiceAuthorizationStatus.notDetermined, this method causes the system to prompt the user to grant or deny permission for your app to use personal voices. The user's response is saved so that future calls to this method do not prompt the user again.
+ */
++ (void)requestPersonalVoiceAuthorizationWithCompletionHandler:(void(^)(AVSpeechSynthesisPersonalVoiceAuthorizationStatus status))handler API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
+/**
+ Returns your app's current authorization to use personal voices.
+
+ @Discussion
+ The user can reject your app's request to use personal voices, but your request can also be denied if personal voices are not supported on the device. The app can also change your app's authorization status at any time from the Settings app.
+ 
+ @Returns
+ The app's current authorization status value. For a list of values, see AVSpeechSynthesisPersonalVoiceAuthorizationStatus.
+ */
+@property (class, nonatomic, readonly) AVSpeechSynthesisPersonalVoiceAuthorizationStatus personalVoiceAuthorizationStatus API_AVAILABLE(ios(17.0), tvos(17.0), watchos(10.0), macos(14.0));
+
 @end
 
 /*!
@@ -238,6 +300,8 @@
 - (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didCancelSpeechUtterance:(AVSpeechUtterance *)utterance API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
 
 - (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer willSpeakRangeOfSpeechString:(NSRange)characterRange utterance:(AVSpeechUtterance *)utterance API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer willSpeakMarker:(AVSpeechSynthesisMarker *)marker utterance:(AVSpeechUtterance *)utterance API_AVAILABLE(ios(17.0), watchos(10.0), tvos(17.0), macos(14.0)) NS_AVAILABLE(14_0, 17_0);
 @end
 
 #pragma mark - AVSpeechSynthesisMarker
@@ -253,8 +317,16 @@
 /// The location and length of the pertaining speech request's SSML text. This marker applies to the range of characters represented by the NSString.
 @property (nonatomic, assign) NSRange textRange;
 
+@property (nonatomic, copy) NSString *bookmarkName NS_AVAILABLE(14_0, 17_0);
+@property (nonatomic, copy) NSString *phoneme NS_AVAILABLE(14_0, 17_0);
+
 - (instancetype)initWithMarkerType:(AVSpeechSynthesisMarkerMark)type forTextRange:(NSRange)range atByteSampleOffset:(NSUInteger)byteSampleOffset;
 
+- (instancetype)initWithWordRange:(NSRange)range atByteSampleOffset:(NSInteger)byteSampleOffset NS_AVAILABLE(14_0, 17_0);
+- (instancetype)initWithSentenceRange:(NSRange)range atByteSampleOffset:(NSInteger)byteSampleOffset NS_AVAILABLE(14_0, 17_0);
+- (instancetype)initWithParagraphRange:(NSRange)range atByteSampleOffset:(NSInteger)byteSampleOffset NS_AVAILABLE(14_0, 17_0);
+- (instancetype)initWithPhonemeString:(NSString *)phoneme atByteSampleOffset:(NSInteger) byteSampleOffset NS_AVAILABLE(14_0, 17_0);
+- (instancetype)initWithBookmarkName:(NSString *)mark atByteSampleOffset:(NSInteger) byteSampleOffset NS_AVAILABLE(14_0, 17_0);
 @end
 
 NS_ASSUME_NONNULL_END
Clone this wiki locally