Skip to content

AVFAudio tvOS xcode14.0 beta1

Manuel de la Pena edited this page Sep 6, 2022 · 2 revisions

#AVFAudio.framework ##mandel

diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h	2022-02-23 10:57:55.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioEngine.h	2022-05-31 15:04:01.000000000 -0400
@@ -706,7 +706,39 @@
 	Any client installed block on the source node's audio unit `AUMIDIOutputEventBlock`
 	will be overwritten when making the MIDI connection.
  */
-- (void)connectMIDI:(AVAudioNode *)sourceNode to:(AVAudioNode *)destinationNode format:(AVAudioFormat * __nullable)format block:(AUMIDIOutputEventBlock __nullable)tapBlock API_AVAILABLE(macos(10.14), ios(12.0), watchos(5.0), tvos(12.0));
+- (void)connectMIDI:(AVAudioNode *)sourceNode to:(AVAudioNode *)destinationNode format:(AVAudioFormat * __nullable)format block:(AUMIDIOutputEventBlock __nullable)tapBlock API_DEPRECATED_WITH_REPLACEMENT("connectMIDI:to:format:eventListBlock:", macos(10.14, 13.0), ios(13.0, 16.0), watchos(5.0, 9.0), tvos(12.0, 16.0));
+
+/*! @method connectMIDI:to:format:eventListblock:
+    @abstract
+        Establish a MIDI only connection between two nodes.
+    @param sourceNode
+        The source node.
+    @param destinationNode
+        The destination node.
+    @param format
+        If non-nil, the format of the source node's output bus is set to this format.
+        In all cases, the format of the source nodes' output bus has to match with the
+        destination nodes' output bus format.
+        Although the output bus of the source is not in use, the format needs to be set
+        in order to be able to use the sample rate for MIDI event timing calculations.
+    @param tapBlock
+        This block is called from the source node's `AUMIDIOutputEventListBlock`
+        on the realtime thread. The host can tap the MIDI data of the source node through
+        this block.
+
+    Use this method to establish a MIDI only connection between a source node and a
+    destination node that has MIDI input capability.
+
+    The source node can only be a AVAudioUnit node of type `kAudioUnitType_MIDIProcessor`.
+    The destination node types can be `kAudioUnitType_MusicDevice`,
+    `kAudioUnitType_MusicEffect` or `kAudioUnitType_MIDIProcessor`.
+
+    Note that any pre-existing MIDI connection involving the destination will be broken.
+
+    Any client installed block on the source node's audio unit `AUMIDIOutputEventListBlock`
+    will be overwritten when making the MIDI connection.
+ */
+- (void)connectMIDI:(AVAudioNode *)sourceNode to:(AVAudioNode *)destinationNode format:(AVAudioFormat * __nullable)format eventListBlock:(AUMIDIEventListBlock __nullable)tapBlock API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
 
 /*! @method connectMIDI:toNodes:format:block:
     @abstract
@@ -742,7 +774,43 @@
 	Any client installed block on the source node's audio unit `AUMIDIOutputEventBlock`
 	will be overwritten when making the MIDI connection.
  */
-- (void)connectMIDI:(AVAudioNode *)sourceNode toNodes:(NSArray<AVAudioNode *> *)destinationNodes format:(AVAudioFormat * __nullable)format block:(AUMIDIOutputEventBlock __nullable)tapBlock API_AVAILABLE(macos(10.14), ios(12.0), watchos(5.0), tvos(12.0));
+- (void)connectMIDI:(AVAudioNode *)sourceNode toNodes:(NSArray<AVAudioNode *> *)destinationNodes format:(AVAudioFormat * __nullable)format block:(AUMIDIOutputEventBlock __nullable)tapBlock API_DEPRECATED_WITH_REPLACEMENT("connectMIDI:toNodes:format:eventListBlock:", macos(10.14, 13.0), ios(13.0, 16.0), watchos(5.0, 9.0), tvos(12.0, 16.0));
+
+/*! @method connectMIDI:toNodes:format:eventListBlock:
+    @abstract
+        Establish a MIDI only connection between a source node and multiple destination nodes.
+    @param sourceNode
+        The source node.
+    @param destinationNodes
+        An array of AVAudioNodes specifying destination nodes.
+    @param format
+        If non-nil, the format of the source node's output bus is set to this format.
+        In all cases, the format of the source nodes' output bus has to match with the
+        destination nodes' output bus format.
+        Although the output bus of the source is not in use, the format needs to be set
+        in order to be able to use the sample rate for MIDI event timing calculations.
+    @param tapBlock
+        This block is called from the source node's `AUMIDIOutputEventListBlock`
+        on the realtime thread. The host can tap the MIDI data of the source node through
+        this block.
+
+    Use this method to establish a MIDI only connection between a source node and
+    multiple destination nodes.
+
+    The source node can only be a AVAudioUnit node of type `kAudioUnitType_MIDIProcessor`.
+    The destination node types can be `kAudioUnitType_MusicDevice`,
+    `kAudioUnitType_MusicEffect` or `kAudioUnitType_MIDIProcessor`.
+
+    MIDI connections made using this method are either one-to-one (when a single
+    destination connection is specified) or one-to-many (when multiple connections are
+    specified), but never many-to-one.
+
+    Note that any pre-existing connection involving the destination will be broken.
+
+    Any client installed block on the source node's audio unit `AUMIDIOutputEventListBlock`
+    will be overwritten when making the MIDI connection.
+ */
+- (void)connectMIDI:(AVAudioNode *)sourceNode toNodes:(NSArray<AVAudioNode *> *)destinationNodes format:(AVAudioFormat * __nullable)format eventListBlock:(AUMIDIEventListBlock __nullable)tapBlock API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
 
 /*! @method disconnectMIDI:from:
     @abstract
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h	2022-02-23 07:14:24.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSequencer.h	2022-05-31 15:04:34.000000000 -0400
@@ -1,34 +1,27 @@
 /*
 	File:		AVAudioSequencer.h
-	Framework:	AVFoundation
+	Framework:	AVFAudio
 
 	Copyright (c) 2015 Apple Inc. All Rights Reserved.
 */
 
 #import <Foundation/Foundation.h>
+#import <AVFAudio/AVAudioTypes.h>
 
 #if __has_include(<CoreMIDI/MIDIServices.h>)
 	#import <CoreMIDI/MIDIServices.h>
 #endif
 
+#define AVAS_EXPORT __attribute__((visibility("default"))) extern
+
 NS_ASSUME_NONNULL_BEGIN
 
 @class AVAudioUnit;
 @class AVAudioTime;
 @class AVAudioEngine;
 @class AVMusicTrack;
-@class AVMusicTrackEventIterator;
 @class AVAudioSequencer;
 
-/*!	@typedef AVMusicTimeStamp
-	@abstract A fractional number of beats
-	
-	@discussion
-		This is used for all sequencer timeline-related methods.  The relationship between this
-		value and time in seconds is determined by the sequence's tempo.
-*/
-typedef Float64 AVMusicTimeStamp;
-
 /*! @typedef AVMusicSequenceLoadOptions
 	@abstract Determines whether data on different MIDI channels is mapped to multiple tracks, or
 		if the tracks are preserved as-is.
@@ -44,8 +37,8 @@
 		API_AVAILABLE(macos(10.11), ios(9.0), watchos(2.0), tvos(9.0))
 */
 typedef NS_OPTIONS(NSUInteger, AVMusicSequenceLoadOptions) {
-	AVMusicSequenceLoadSMF_PreserveTracks		= 0,				// 0x00
-	AVMusicSequenceLoadSMF_ChannelsToTracks		= (1UL << 0)		// 0x01
+	AVMusicSequenceLoadSMF_PreserveTracks		= 0,
+	AVMusicSequenceLoadSMF_ChannelsToTracks		= (1UL << 0)
 };
 
 /*! @typedef AVBeatRange
@@ -63,6 +56,96 @@
 	return r;
 }
 
+typedef NSString *AVAudioSequencerInfoDictionaryKey NS_TYPED_ENUM;
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyAlbum
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyApproximateDurationInSeconds
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyArtist
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyChannelLayout
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyComments
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyComposer
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyCopyright
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyEncodingApplication
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyGenre
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyISRC
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyKeySignature
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyLyricist
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyNominalBitRate
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyRecordedDate
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeySourceBitDepth
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeySourceEncoder
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeySubTitle
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyTempo
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyTimeSignature
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyTitle
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyTrackNumber
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+AVAS_EXPORT
+AVAudioSequencerInfoDictionaryKey AVAudioSequencerInfoDictionaryKeyYear
+				API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
 /*! @class AVAudioSequencer
 	@abstract A collection of MIDI events organized into AVMusicTracks, plus a player to play back the events.
 */
@@ -94,6 +177,10 @@
 		determines how the file's contents are mapped to tracks inside the sequence
 	@param outError
         on exit, if an error occurs, a description of the error
+	@discussion
+		Loading a MIDI file that was previously saved via this system will restore the complete state
+		of the sequence, including muting, loop points and enablement, etc. of all tracks.  It will also
+		restore all non-MIDI AVMusicEvent types which had been added to the sequence's tracks.
 */
 - (BOOL)loadFromURL:(NSURL *)fileURL options:(AVMusicSequenceLoadOptions)options error:(NSError **)outError;
 
@@ -105,11 +192,15 @@
 		determines how the contents are mapped to tracks inside the sequence
 	@param outError
         on exit, if an error occurs, a description of the error
+	@discussion
+		Loading a MIDI file that was previously saved via this system will restore the complete state
+		of the sequence, including muting, loop points and enablement, etc. of all tracks.  It will also
+		restore all non-MIDI AVMusicEvent types which had been added to the sequence's tracks.
 */
 - (BOOL)loadFromData:(NSData *)data options:(AVMusicSequenceLoadOptions)options error:(NSError **)outError;
 
 /*! @method writeToURL:SMPTEResolution:replaceExisting:error:
-	@abstract Create and write a MIDI file from the events in the sequence
+	@abstract Create and write a MIDI file containing the events and complete state of the sequence
 	@param fileURL
 		the path for the file to be created
 	@param resolution
@@ -121,8 +212,11 @@
 	@param outError
         on exit, if an error occurs, a description of the error
 	@discussion
-		Only MIDI events are written when writing to the MIDI file. MIDI files are normally beat
-		based, but can also have a SMPTE (or real-time rather than beat time) representation.
+		A MIDI file saved via this method will contain not only the complete MIDI content of the sequence,
+		but also the state of all tracks, including muting, loop points and enablement, etc.  It will also
+		contain all non-MIDI AVMusicEvent types which had been added to the sequence's track.
+ 
+		MIDI files are normally beat based, but can also have a SMPTE (or real-time rather than beat time) representation.
 		The relationship between "tick" and quarter note for saving to Standard MIDI File
 		- pass in zero to use default - this will be the value that is currently set on the tempo track
 */
@@ -137,44 +231,93 @@
 - (NSData *)dataWithSMPTEResolution:(NSInteger)SMPTEResolution error:(NSError **)outError;
 
 /*!	@method secondsForBeats:
-	@abstract Get the time in seconds for the given beat position (timestamp) in the track
+	@abstract Get the time in seconds for the given beat position (timestamp) in the AVMusicTrack
 */
 - (NSTimeInterval)secondsForBeats:(AVMusicTimeStamp)beats;
 
 /*!	@method beatsForSeconds:
-	@abstract Get the beat position (timestamp) for the given time in the track
+	@abstract Get the beat position (timestamp) for the given time in the AVMusicTrack
 */
 - (AVMusicTimeStamp)beatsForSeconds:(NSTimeInterval)seconds;
 
+/*!	@method reverseEvents:
+	@abstract Reverse the order of all events in all AVMusicTracks, including the tempo track
+*/
+- (void)reverseEvents API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!	@method createAndAppendTrack:
+	@abstract Create a new AVMusicTrack and append it to the AVMusicSequencer's list
+*/
+- (AVMusicTrack *)createAndAppendTrack API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!	@method removeTrack:
+	@abstract Remove the given AVMusicTrack from the AVMusicSequencer.
+	@discussion This does not destroy the AVMusicTrack because it may be re-used.
+*/
+- (BOOL)removeTrack:(AVMusicTrack *)track API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!	@typedef AVAudioSequencerUserCallback
+	@abstract
+		A block which is called asynchronously during playback whenever an AVMusicUserEvent is
+		encountered (see AVMusicUserEvent).
+	@param track
+		The track which contains the AVMusicUserEvent.
+	@param userData
+		The raw data that was used to initialize the AVMusicUserEvent.
+	@param timeStamp
+		The beat location at which the event was found.  This will necessarily be in the past due
+		to the asynchronous nature of the callback.
+	@discussion
+		This callback is delivered on an internal queue and is asynchronous to the rendering thread.
+ 
+		The returned 'userData' will be unique to each AVMusicUserEvent instance.
+ */
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+typedef void (^AVAudioSequencerUserCallback)(AVMusicTrack *track, NSData *userData, AVMusicTimeStamp timeStamp);
+
+/*!	@method setUserCallback:
+	@abstract
+		Add a block which will be called each time the AVAudioSequencer encounters an AVMusicUserEvent during playback.
+	@discussion
+		The same callback is called for events which occur on any track in the sequencer.
+ 
+		Set the block to nil to disable it.
+*/
+- (void)setUserCallback:(AVAudioSequencerUserCallback _Nullable)userCallback API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
 /* properties */
 
 /*!	@property tracks
-	@abstract An NSArray containing all the tracks in the sequence
+	@abstract An NSArray containing all the AVMusicTracks in the sequence
 	@discussion
-		Track indices count from 0, and do not include the tempo track.
+		This list will not include the tempo track.
 */
 @property (nonatomic, readonly) NSArray<AVMusicTrack *> *tracks;
 
 /*!	@property tempoTrack
 	@abstract The tempo track
 	 @discussion
-		 Each sequence has a single tempo track. All tempo events are placed into this track (as well
-		 as other appropriate events (for instance, the time signature from a MIDI file). The tempo
-		 track can be edited and iterated upon as any other track. Non-tempo events in a tempo track
-		 are ignored.
+		Each AVMusicSequence has a single tempo track.
+ 
+		All tempo events read from external MIDI files are placed into this track (as well as other
+		appropriate events (e.g., the time signature meta event from the file).
+ 
+		The tempo track can be edited and iterated upon as any other track.
+ 
+		Non-tempo-related events will generate exceptions if added.
 */
 @property (nonatomic, readonly) AVMusicTrack *tempoTrack;
 
 /*!	@property userInfo
 	@abstract A dictionary containing meta-data derived from a sequence
 	@discussion
-		The dictionary can contain one or more of the kAFInfoDictionary_* keys
-		specified in <AudioToolbox/AudioFile.h>
+		The dictionary can contain one or more of the values accessible via the AVAudioSequencerInfoDictionaryKeys.
 */
 @property (nonatomic, readonly) NSDictionary<NSString *, id> *userInfo;
 
 @end
 
+API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos)
 @interface AVAudioSequencer(AVAudioSequencer_Player)
 
 /*! @property currentPositionInSeconds
@@ -258,10 +401,22 @@
 
 @end
 
+/*!
+	@define AVMusicTimeStampEndOfTrack
+	@abstract	A timestamp used to access all events in a AVMusicTrack via an AVBeatRange.
+	@discussion Pass this value as the length of an AVBeatRange to indicate a end time beyond the last
+				event in the track.  In this way, it's possible to specify a AVBeatRange which
+				include all events starting at some particular time up to and including the last event.
+*/
+#define AVMusicTimeStampEndOfTrack			DBL_MAX
 
 /*! @class AVMusicTrack
-	@abstract A collection of music events which will be sent to a given destination, and which can be 
+	@abstract	A collection of music events which will be sent to a given destination, and which can be
 				offset, muted, etc. independently of events in other tracks.
+	@discussion
+				AVMusicTrack is not a container of AVMusicEvents - it will not hold references to
+				AVMusicEvents that are added, so an application should maintain its own if it is
+				desired.
 */
 API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos)
 @interface AVMusicTrack : NSObject {
@@ -378,4 +533,114 @@
 
 @end
 
+@class AVMusicEvent;
+
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMusicTrack(AVMusicTrackEditor)
+
+/*!	@property usesAutomatedParameters
+	@abstract	Indicates whether the track is an automation track.
+	@discussion
+				If set to YES, this can be used to contain, parameter automation events, exclusively.
+				Adding any other event types will generate exceptions.
+ 
+				If a track already contains non-parameter events, setting this to YES will
+				generate an exception.
+ */
+@property (readwrite) BOOL usesAutomatedParameters;
+
+/*!	@method	addAvent:atBeat
+	@abstract	Adds an AVMusicEvent's contents to a track at the specified AVMusicTimeStamp.
+	@param		event			the event to be added
+	@param		beat			the AVMusicTimeStamp
+	@discussion
+				Because event contents are copied into the track, the same event may be added multiple
+				times at different timestamps.
+ 
+				There are restrictions on which AVMusicEvent subclasses may be added to different tracks:
+ 
+				- Only AVExtendedTempoEvents and AVMIDIMetaEvents with certain AVMIDIMetaEventTypes
+				  can be added to an AVMusicSequence's tempo track (see AVMIDIMetaEvent).
+ 
+				- AVParameterEvents can only be added to automation tracks (see AVParameterEvent).
+ 
+				- All other event subclasses cannot be added to tempo or automation tracks.
+*/
+
+- (void)addEvent:(AVMusicEvent *)event atBeat:(AVMusicTimeStamp)beat;
+
+/*!	@method moveEventsInRange:byAmount
+	@abstract	Shift the beat location of all events in the given beat range by the amount specified.
+	@param		range			the range of beats.  Must be a valid AVBeatRange.
+	@param		beatAmount		the amount in beats to shift each event.  The amount may be positive or negative.
+ */
+- (void)moveEventsInRange:(AVBeatRange)range byAmount:(AVMusicTimeStamp)beatAmount;
+
+/*!	@method clearEventsInRange:
+	@abstract	Removes all events in the given beat range, erasing that portion of the AVMusicTrack.
+	@param		range			the range of beats.  Must be a valid AVBeatRange.
+	@discussion	All events outside of the specified range left unmodified.
+ */
+- (void)clearEventsInRange:(AVBeatRange)range;
+
+/*!	@method cutEventsInRange:
+	@abstract	Removes all events in the given beat range, splicing out that portion of the AVMusicTrack.
+	@param		range			the range of beats.  Must be a valid AVBeatRange.
+	@discussion	All events past the end of the specified range will be shifted backward by the duration of the range.
+ */
+- (void)cutEventsInRange:(AVBeatRange)range;
+
+/*!	@method copyEventsInRange:fromTrack:insertAtBeat
+	@abstract	Copies all events in the given beat range from the specified AVMusicTrack,
+				splicing them into the current AVMusicTrack.
+	@param		range			the range of beats.  Must be a valid AVBeatRange.
+	@param		sourceTrack		the AVMusicTrack to copy the events from.
+	@param		insertStartBeat	the start beat at which the copied events should be spliced in.
+	@discussion	All events originally at or past insertStartBeat will be shifted forward by the duration
+				of the copied-in range.
+ */
+- (void)copyEventsInRange:(AVBeatRange)range fromTrack:(AVMusicTrack *)sourceTrack insertAtBeat:(AVMusicTimeStamp)insertStartBeat;
+
+/*!	@method copyAndMergeEventsInRange:fromTrack:mergeAtBeat
+	@abstract	Copies all events in the given beat range from the specified AVMusicTrack,
+				merging them into the current AVMusicTrack.
+	@param		range			the range of beats.  Must be a valid AVBeatRange.
+	@param		sourceTrack		the AVMusicTrack to copy the events from.
+	@param		insertStartBeat	the start beat at which the copied events should be merged.
+	@discussion	All events originally at or past mergeStartBeat will be left unmodified.
+ 
+				Copying events from track to track follows the same type-exclusion rules as adding
+				events:  The operation will generate an exception.
+ */
+- (void)copyAndMergeEventsInRange:(AVBeatRange)range fromTrack:(AVMusicTrack *)sourceTrack mergeAtBeat:(AVMusicTimeStamp)mergeStartBeat;
+
+/*!	@typedef AVMusicEventEnumerationBlock
+	@abstract	The block type used to enumerate and optionally remove AVMusicEvents when using
+				`AVMusicTrack(enumerateEventsInRange:usingBlock:)`
+	@param		event			the AVMusicEvent returned by this enumeration block call.  If this
+								event is modified by the block, the corresponding track event will be changed.
+	@param		timeStamp		the beat position of this event in the AVMusicTrack.  If the block
+								sets *timeStamp to a new value, the corresponding event's beat position
+								in the track will be updated.
+	@param		removeEvent		If the block sets *removeEvent to YES, the current event will be
+								removed from the track.
+*/
+typedef void (^AVMusicEventEnumerationBlock)(AVMusicEvent *event, AVMusicTimeStamp *timeStamp, BOOL *removeEvent);
+
+/*!	@method enumerateEventsInRange:usingBlock:
+	@abstract	Iterates through the AVMusicEvents within the AVMusicTrack whose timestamps fit within the range,
+				calling the block for each.
+	@param		block			the AVMusicEventEnumerationBlock to call for each event.
+	@discussion	Each event returned via the block should be examined using `NSObject(isKindOfClass:)`
+				to determine its subclass and then cast and accessed/edited accordingly.
+ 
+				The iteration may continue after removing an event.
+ 
+				The event objects returned via the block will not be the same instances
+				which were added to the AVMusicTrack, though their contents will be identical.
+ */
+- (void)enumerateEventsInRange:(AVBeatRange)range usingBlock:(NS_NOESCAPE AVMusicEventEnumerationBlock)block;
+
+@end
+
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioTypes.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioTypes.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioTypes.h	2022-02-12 10:30:58.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioTypes.h	2022-05-21 06:28:07.000000000 -0400
@@ -52,6 +52,14 @@
 */
 typedef NSUInteger AVAudioNodeBus;
 
+/*!	@typedef AVMusicTimeStamp
+	@abstract The time position in beats of playback and events in the AVAudioSequencer and its components.
+	@discussion
+		AVMusicTimeStamp allows the position and duration of events and actions in the AVAudioSequencer to
+		function independently from the tempo of the sequence being played.  At the default tempo of 120.0,
+		a time stamp of 1.0 represents 0.5 seconds of time.
+*/
+typedef double AVMusicTimeStamp;
 
 
 /*=============================================================================*/
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitComponent.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitComponent.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitComponent.h	2022-02-23 07:14:25.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitComponent.h	2022-05-31 14:52:35.000000000 -0400
@@ -30,6 +30,10 @@
 // Standard Audio Unit Manufacturers
 extern NSString * const AVAudioUnitManufacturerNameApple	API_AVAILABLE(macos(10.10), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
 
+#if !TARGET_OS_OSX
+@class UIImage;
+#endif
+
 #pragma mark AVAudioUnitComponent
 
 /*!
@@ -129,22 +133,24 @@
 */
 @property (nonatomic, readonly, nullable) NSURL		*iconURL API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, watchos, tvos);
 
-#if TARGET_OS_OSX
 /*! @property icon
 	@abstract An icon representing the component.
-    @discussion
-        For a component originating in an app extension, the returned icon will be that of the
-        application containing the extension.
-        
-        For components loaded from bundles, the icon will be that of the bundle.
-*/
+	@discussion
+		For a component originating in an app extension, the returned icon will be that of the
+		application containing the extension.
+
+		For components loaded from bundles, the icon will be that of the bundle.
+ */
+#if TARGET_OS_OSX
 @property (nonatomic, readonly, nullable) NSImage *icon API_AVAILABLE(macos(10.11)) API_UNAVAILABLE(ios, watchos, tvos);
+#else
+@property (nonatomic, readonly, nullable) UIImage *icon API_AVAILABLE(ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos) API_UNAVAILABLE(macos);
 #endif
 
 /*! @property passesAUVal
 	@abstract YES if the AudioComponent has passed the AU validation tests, otherwise NO
 */
-@property (nonatomic, readonly) BOOL		passesAUVal API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, watchos, tvos);
+@property (nonatomic, readonly) BOOL		passesAUVal API_AVAILABLE(macos(10.10), ios(16.0)) API_UNAVAILABLE(watchos, tvos);
 
 /*! @property hasCustomView
 	@abstract YES if the AudioComponent provides custom view, otherwise NO
@@ -155,7 +161,7 @@
 	@abstract A NSDictionary that contains information describing the capabilities of the AudioComponent.
 	The specific information depends on the type and the keys are defined in AudioUnitProperties.h
 */
-@property (nonatomic, readonly) NSDictionary<NSString *, id>		*configurationDictionary API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, watchos, tvos);
+@property (nonatomic, readonly) NSDictionary<NSString *, id>		*configurationDictionary API_AVAILABLE(macos(10.10), ios(16.0)) API_UNAVAILABLE(watchos, tvos);
 
 /*! @method supportsNumberInputChannels:outputChannels:
 	@abstract returns YES if the AudioComponent supports the input/output channel configuration
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h	2022-02-23 07:14:25.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioUnitMIDIInstrument.h	2022-05-31 15:04:01.000000000 -0400
@@ -178,6 +178,15 @@
  */
 - (void)sendMIDISysExEvent:(NSData *)midiData;
 
+#if AVAUDIOUNIT_HAVE_AUDIOUNIT
+/*! @method sendMIDIEventList:eventList:
+ @abstract sends a MIDI event list to the instrument.
+ @param eventList
+	the MIDIEventList
+  */
+- (void)sendMIDIEventList:(const struct MIDIEventList *)eventList API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+#endif
+
 @end
 
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.apinotes /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.apinotes
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.apinotes	2022-02-12 11:05:35.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.apinotes	2022-05-21 07:22:39.000000000 -0400
@@ -23,6 +23,20 @@
   - Selector: 'setCategory:mode:routeSharingPolicy:options:error:'
     SwiftName: setCategory(_:mode:policy:options:)
     MethodKind: Instance
+- Name: AVMusicTrack
+  Methods:
+  - Selector: 'addEvent:atBeat:'
+    SwiftName: addEvent(_:at:)
+    MethodKind: Instance
+  - Selector: 'copyEventsInRange:fromTrack:insertAtBeat:'
+    SwiftName: copyEvents(in:from:insertAt:)
+    MethodKind: Instance
+  - Selector: 'copyAndMergeEventsInRange:fromTrack:mergeAtBeat:'
+    SwiftName: copyAndMergeEvents(in:from:mergeAt:)
+    MethodKind: Instance
+  - Selector: 'moveEventsInRange:byAmount:'
+    SwiftName: moveEvents(in:by:)
+    MethodKind: Instance
 
 Tags:
 - Name: AVAudioSessionCategoryOptions
@@ -47,8 +61,14 @@
   SwiftName: AVAudioSession.SetActiveOptions
 - Name: AVAudioSessionSilenceSecondaryAudioHintType
   SwiftName: AVAudioSession.SilenceSecondaryAudioHintType
+- Name: AVMIDIControlChangeMessageType
+  SwiftName: AVMIDIControlChangeEvent.MessageType
+- Name: AVMIDIMetaEventType
+  SwiftName: AVMIDIMetaEvent.EventType
 
 Typedefs:
+- Name: AVAudioSequencerInfoDictionaryKey
+  SwiftName: AVAudioSequencer.InfoDictionaryKey
 - Name: AVAudioSessionCategory
   SwiftName: AVAudioSession.Category
 - Name: AVAudioSessionLocation
@@ -73,6 +93,8 @@
   SwiftName: AVAudioSession.silenceSecondaryAudioHintNotification
 - Name: AVAudioSessionMediaServicesWereResetNotification
   SwiftName: AVAudioSession.mediaServicesWereResetNotification
+- Name: AVExtendedNoteOnEventDefaultInstrument
+  SwiftName: AVExtendedNoteOnEvent.defaultInstrument
 
 SwiftVersions:
 - Version: 3
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2022-02-12 08:04:03.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2022-05-21 06:53:06.000000000 -0400
@@ -42,9 +42,11 @@
 #import <AVFAudio/AVAudioUnitTimePitch.h>
 #import <AVFAudio/AVAudioUnitVarispeed.h>
 #import <AVFAudio/AVMIDIPlayer.h>
+#import <AVFAudio/AVMusicEvents.h>
 
 #if !0
 #import <AVFAudio/AVSpeechSynthesis.h>
+#import <AVFAudio/AVSpeechSynthesisProvider.h>
 #endif
 
 #if TARGET_OS_IPHONE && !0
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVMusicEvents.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVMusicEvents.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVMusicEvents.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVMusicEvents.h	2022-05-31 15:04:35.000000000 -0400
@@ -0,0 +1,612 @@
+/*==================================================================================================
+ File:       AVMusicEvents.h
+ 
+ Contains:   API for events associated with an AVMusicTrack and AVAudioSequencer
+ 
+ Copyright:  (c) 2021 by Apple, Inc., all rights reserved.
+ 
+ Bugs?:      For bug reports, consult the following page on
+ the World Wide Web:
+ 
+ http://developer.apple.com/bugreporter/
+ 
+ ==================================================================================================*/
+
+#import <Foundation/Foundation.h>
+#import <AVFAudio/AVAudioTypes.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!	@class AVMusicEvent
+	@abstract
+		The base class for all events associated with an AVMusicTrack.
+	@discussion
+		This class is provided to allow enumeration of the heterogenous events contained within an AVMusicTrack.
+ */
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMusicEvent : NSObject
+@end
+
+/*!	@class AVMIDINoteEvent
+	@abstract
+		The event class representing MIDI note-on/off messages.
+	@param channel
+		The MIDI channel for the note.  Range: 0-15.
+	@param key
+		The MIDI key number for the note.  Range: 0-127.
+	@param velocity
+		The MIDI velocity for the note.  Range: 0-127 (see discussion).
+	@param duration
+		The duration of this note event in AVMusicTimeStamp beats.  Range: Any non-negative number.
+	@discussion
+		The AVAudioSequencer will automatically send a MIDI note-off after the note duration has passed.
+		To send an explicit note-off event, create an AVMIDINoteEvent with its velocity set to zero.
+ */
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDINoteEvent : AVMusicEvent
+
+/*!	@method initWithChannel:key:velocity:duration
+	@abstract
+		Initialize the event with a MIDI channel, key number, velocity and duration.
+	@param channel
+		The MIDI channel.  Range: 0-15.
+	@param key
+		The MIDI key number.  Range: 0-127.
+	@param velocity
+		The MIDI velocity.  Range: 0-127 with zero indicating a note-off event.
+	@param duration
+		The duration in beats for this note.  Range: Any non-negative number.
+ */
+- (instancetype)initWithChannel:(UInt32)channel key:(UInt32)keyNum velocity:(UInt32)velocity duration:(AVMusicTimeStamp)duration;
+
+/*!	@property channel
+		The MIDI channel for the event.  Range: 0-15.
+ */
+@property (readwrite) UInt32 channel;
+
+/*!	@property key
+		The MIDI key number for the event.  Range: 0-127.
+ */
+@property (readwrite) UInt32 key;
+
+/*!	@property velocity
+		The MIDI velocity for the event.  Range: 0-127.
+ */
+@property (readwrite) UInt32 velocity;
+
+/*!	@property duration
+		The duration of the event in AVMusicTimeStamp beats.  Range: Any non-negative number.
+ */
+@property (readwrite) AVMusicTimeStamp duration;
+
+@end
+
+/*!	@class AVMIDIChannelEvent
+	@abstract
+		The event base class for all MIDI messages which operate on a single MIDI channel.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIChannelEvent : AVMusicEvent
+
+/*!	@property channel
+		The MIDI channel for the event.  Range: 0-15.
+ */
+@property (readwrite)UInt32 channel;
+
+@end
+
+/*! @enum AVMIDIControlChangeMessageType
+	@abstract
+		Types of MIDI control change events.  See the General MIDI Specification for details.
+ */
+typedef NS_ENUM(NSInteger, AVMIDIControlChangeMessageType)
+{
+	AVMIDIControlChangeMessageTypeBankSelect			= 0,
+	AVMIDIControlChangeMessageTypeModWheel			= 1,
+	AVMIDIControlChangeMessageTypeBreath				= 2,
+	AVMIDIControlChangeMessageTypeFoot				= 4,
+	AVMIDIControlChangeMessageTypePortamentoTime		= 5,
+	AVMIDIControlChangeMessageTypeDataEntry			= 6,
+	AVMIDIControlChangeMessageTypeVolume				= 7,
+	AVMIDIControlChangeMessageTypeBalance				= 8,
+	AVMIDIControlChangeMessageTypePan					= 10,
+	AVMIDIControlChangeMessageTypeExpression			= 11,
+	
+	// these events have value (0-63) == off, (64-127) == on
+	AVMIDIControlChangeMessageTypeSustain				= 64,
+	AVMIDIControlChangeMessageTypePortamento			= 65,
+	AVMIDIControlChangeMessageTypeSostenuto			= 66,
+	AVMIDIControlChangeMessageTypeSoft				= 67,
+	AVMIDIControlChangeMessageTypeLegatoPedal			= 68,
+	AVMIDIControlChangeMessageTypeHold2Pedal			= 69,
+
+	AVMIDIControlChangeMessageTypeFilterResonance		= 71,
+	AVMIDIControlChangeMessageTypeReleaseTime			= 72,
+	AVMIDIControlChangeMessageTypeAttackTime			= 73,
+	AVMIDIControlChangeMessageTypeBrightness			= 74,
+	AVMIDIControlChangeMessageTypeDecayTime			= 75,
+	AVMIDIControlChangeMessageTypeVibratoRate			= 76,
+	AVMIDIControlChangeMessageTypeVibratoDepth		= 77,
+	AVMIDIControlChangeMessageTypeVibratoDelay		= 78,
+	
+	AVMIDIControlChangeMessageTypeReverbLevel			= 91,
+	AVMIDIControlChangeMessageTypeChorusLevel			= 93,
+	AVMIDIControlChangeMessageTypeRPN_LSB				= 100,
+	AVMIDIControlChangeMessageTypeRPN_MSB				= 101,
+	AVMIDIControlChangeMessageTypeAllSoundOff			= 120,
+	AVMIDIControlChangeMessageTypeResetAllControllers	= 121,
+	AVMIDIControlChangeMessageTypeAllNotesOff			= 123,
+	AVMIDIControlChangeMessageTypeOmniModeOff			= 124,
+	AVMIDIControlChangeMessageTypeOmniModeOn			= 125,
+	AVMIDIControlChangeMessageTypeMonoModeOn			= 126,
+	AVMIDIControlChangeMessageTypeMonoModeOff			= 127
+};
+
+/*!	@class AVMIDIControlChangeEvent
+	@abstract
+		The event class representing MIDI control change messages.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIControlChangeEvent : AVMIDIChannelEvent
+
+/*!	@method initWithChannel:messageType:value
+	@abstract
+		Initialize the event with a channel, a control change type, and a control value.
+	@param channel
+		The MIDI channel for the control change.  Range: 0-15.
+	@param messageType
+		The AVMIDIControlChangeMessageType indicating which MIDI control change message to send.
+	@param value
+		The value for this control change.  Range: Depends on the type (see the General MIDI specification).
+ */
+- (instancetype)initWithChannel:(UInt32)channel messageType:(AVMIDIControlChangeMessageType)messageType value:(UInt32)value;
+
+/*!	@property messageType
+		The type of control change message, specified as an AVMIDIControlChangeMessageType.
+ */
+@property (readonly)AVMIDIControlChangeMessageType messageType;
+
+/*!	@property value
+		The value of the control change event.  The range of this value depends on the type (see the General MIDI specification).
+ */
+@property (readonly)UInt32 value;
+
+@end
+
+/*!	@class AVMIDIPolyPressureEvent
+	@abstract
+		The event class representing MIDI "poly" or "key" pressure messages.
+ */
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIPolyPressureEvent : AVMIDIChannelEvent
+
+/*!	@method initWithChannel:key:pressure
+	@abstract
+		Initialize the event with a channel, a MIDI key number, and a key pressure value.
+	@param channel
+		The MIDI channel for the message.  Range: 0-15.
+	@param key
+		The MIDI key number to which the pressure should be applied.
+	@param pressure
+		The poly pressure value.
+*/
+- (instancetype)initWithChannel:(UInt32)channel key:(UInt32)key pressure:(UInt32)pressure;
+
+/*!	@property key
+		The MIDI key number.
+ */
+@property (readwrite)UInt32 key;
+
+/*!	@property pressure
+		The poly pressure value for the requested key.
+ */
+@property (readwrite)UInt32 pressure;
+
+@end
+
+/*!	@class AVMIDIProgramChangeEvent
+	@abstract
+		The event class representing MIDI program or patch change messages.
+	@discussion
+		The effect of these messages will depend on the containing AVMusicTrack's destinationAudioUnit.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIProgramChangeEvent : AVMIDIChannelEvent
+
+/*!	@method initWithChannel:programNumber:
+	@abstract
+		Initialize the event with a channel and a program number.
+	@param channel
+		The MIDI channel for the message.  Range: 0-15.
+	@param programNumber
+		The program number to be sent.  Range: 0-127.
+	@discussion
+		Per the General MIDI specification, the actual instrument that is chosen will depend on optional
+		AVMIDIControlChangeMessageTypeBankSelect events sent prior to this program change.
+*/
+- (instancetype)initWithChannel:(UInt32)channel programNumber:(UInt32)programNumber;
+
+/*!	@property programNumber
+		The MIDI program number.  Range: 0-127.
+ */
+@property (readwrite)UInt32 programNumber;
+
+@end
+
+/*!	@class AVMIDIChannelPressureEvent
+	@abstract
+		The event class representing MIDI channel pressure messages.
+	@discussion
+		The effect of these messages will depend on the containing AVMusicTrack's destinationAudioUnit
+		and the capabilities of the destination's currently-loaded instrument.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIChannelPressureEvent : AVMIDIChannelEvent
+
+/*!	@method initWithChannel:pressure:
+	@abstract
+		Initialize the event with a channel and a pressure value.
+	@param channel
+		The MIDI channel for the message.  Range: 0-15.
+	@param pressure
+		The MIDI channel pressure.  Range: 0-127.
+ */
+- (instancetype)initWithChannel:(UInt32)channel pressure:(UInt32)pressure;
+
+/*!	@property pressure
+		The MIDI channel pressure.
+ */
+@property (readwrite)UInt32 pressure;
+
+@end
+
+/*!	@class AVMIDIPitchBendEvent
+	@abstract
+		The event class representing MIDI pitch bend messages.
+	@discussion
+		The effect of these messages will depend on the AVMusicTrack's destinationAudioUnit
+		and the capabilities of the destination's currently-loaded instrument.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIPitchBendEvent : AVMIDIChannelEvent
+
+/*!	@method initWithChannel:value:
+	@abstract
+		Initialize the event with a channel and a pitch bend value.
+	@param channel
+		The MIDI channel for the message.  Range: 0-15.
+	@param value
+		The pitch bend value.  Range: 0-16383 (midpoint 8192).
+ */
+- (instancetype)initWithChannel:(UInt32)channel value:(UInt32)value;
+
+/*!	@property value
+		The value of the pitch bend event.  Range: 0-16383 (midpoint 8192).
+ */
+@property (readwrite)UInt32 value;
+
+@end
+
+/*!	@class AVMIDISysexEvent
+	@abstract
+		The event class representing MIDI system exclusive messages.
+	@discussion
+		The size and contents of an AVMIDISysexEvent cannot be modified once created.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDISysexEvent : AVMusicEvent
+
+/*!	@method initWithData:
+	@abstract
+		Initialize the event with an NSData.
+	@param data
+		An NSData object containing the raw contents of the system exclusive event.
+ */
+- (instancetype)initWithData:(NSData *)data;
+
+/*!	@property sizeInBytes
+		The size of the raw data associated with this system exclusive event.
+ */
+@property (readonly)UInt32 sizeInBytes;
+
+@end
+
+/*!	@enum AVMIDIMetaEventType
+	@abstract
+		Constants which indicate which type of MIDI Meta-Event to create.
+ */
+typedef NS_ENUM(NSInteger, AVMIDIMetaEventType)
+{
+	AVMIDIMetaEventTypeSequenceNumber		= 0x00,
+	AVMIDIMetaEventTypeText					= 0x01,
+	AVMIDIMetaEventTypeCopyright			= 0x02,
+	AVMIDIMetaEventTypeTrackName			= 0x03,
+	AVMIDIMetaEventTypeInstrument			= 0x04,
+	AVMIDIMetaEventTypeLyric				= 0x05,
+	AVMIDIMetaEventTypeMarker				= 0x06,
+	AVMIDIMetaEventTypeCuePoint				= 0x07,
+	AVMIDIMetaEventTypeMidiChannel			= 0x20,
+	AVMIDIMetaEventTypeMidiPort				= 0x21,
+	AVMIDIMetaEventTypeEndOfTrack			= 0x2f,
+	AVMIDIMetaEventTypeTempo				= 0x51,
+	AVMIDIMetaEventTypeSmpteOffset			= 0x54,
+	AVMIDIMetaEventTypeTimeSignature		= 0x58,
+	AVMIDIMetaEventTypeKeySignature			= 0x59,
+	AVMIDIMetaEventTypeProprietaryEvent		= 0x7f
+};
+
+/*!	@class AVMIDIMetaEvent
+	@abstract
+		The event class representing MIDI Meta-Event messages.
+	@discussion
+		The size and contents of an AVMIDIMetaEvent cannot be modified once created.
+ 
+		Events with AVMIDIMetaEventType AVMIDIMetaEventTypeTempo, AVMIDIMetaEventTypeSmpteOffset,
+		or AVMIDIMetaEventTypeTimeSignature can only be added to a sequence's tempo track.
+ 
+		The class does not verify that the content matches the MIDI specification.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMIDIMetaEvent : AVMusicEvent
+
+/*!	@method initWithType:data
+	@abstract
+		Initialize the event with a MIDI Meta-Event type and an NSData.
+	@param type
+		A AVMIDIMetaEventType indicating which type of Meta-Event.
+	@param data
+		An NSData object containing the raw contents of the Meta-Event.
+ */
+- (instancetype)initWithType:(AVMIDIMetaEventType)type data:(NSData *)data;
+
+/*!	@property type
+		The type of Meta-Event, specified as an AVMIDIMetaEventType.
+ */
+@property (readonly) AVMIDIMetaEventType type;
+
+@end
+
+/*!	@class AVMusicUserEvent
+	@abstract
+		The event class representing custom user messages.
+	@discussion
+		When a scheduled AVMusicUserEvent is reached during playback of a AVMusicTrack, the track's
+		user callback block will be called if it has been set.  The event's NSData will be provided as
+		an argument to that block.
+		The size and contents of an AVMusicUserEvent cannot be modified once created.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVMusicUserEvent : AVMusicEvent
+
+/*!	@method initWithData:
+	@abstract
+		Initialize the event with an NSData.
+	@param data
+		An NSData object containing the contents to be returned via the AVMusicTrack's user callback.
+ */
+- (instancetype)initWithData:(NSData *)data;
+
+/*!	@property sizeInBytes
+		The size of the data associated with this user event.
+ */
+@property (readonly)UInt32 sizeInBytes;
+
+@end
+
+/*! @constant AVExtendedNoteOnEventDefaultInstrument
+	@abstract
+		A constant representing the default instrument ID to use for an AVExtendedNoteOnEvent.  This indicates to the
+		system to use the instrument currently loaded on the channel referenced by the groupID.  This is the only
+		supported value at this time.
+ */
+
+extern UInt32 const AVExtendedNoteOnEventDefaultInstrument API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!	@class AVExtendedNoteOnEvent
+	@abstract
+		The event class representing a custom extension of a MIDI note-on.
+	@discussion
+		Using an AVExtendedNoteOnEvent allows an application to trigger a specialized note-on event on one of several
+		Apple audio units which support it.  The floating point note and velocity numbers allow optional fractional control
+		of the note's run-time properties which are modulated by those inputs.  In addition, it supports the possibility
+		of an audio unit with more than the standard 16 MIDI channels.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVExtendedNoteOnEvent : AVMusicEvent
+
+/*!	@method initWithMIDINote:velocity:groupID:duration
+	@abstract
+		Initialize the event with a midi note, velocity, instrument and group ID, and a duration.
+	@param midiNote
+		The MIDI velocity represented as a floating point.  Range: Destination-dependent, usually 0.0 - 127.0.
+	@param velocity
+		The MIDI velocity represented as a floating point.  Range: Destination-dependent, usually 0.0 - 127.0.
+	@param groupID
+		An index indicating the AudioUnitElement within the Group Scope which should handle this event (see AudioUnitElement).
+		This normally maps to a channel within the audio unit.
+		Range: normally between 0 and 15, but may be higher if the AVMusicTrack's destinationAudioUnit supports more channels.
+	@param duration
+		The duration of this event in AVMusicTimeStamp beats.  Range:  Any nonnegative number.
+ */
+- (instancetype)initWithMIDINote:(float)midiNote
+						velocity:(float)velocity
+						 groupID:(UInt32)groupID
+						duration:(AVMusicTimeStamp)duration;
+
+/*!	@method initWithMIDINote:velocity:instrumentID:groupID:duration
+	@abstract
+		Initialize the event with a midi note, velocity, instrument and group ID, and a duration.
+	@discussion
+		This initializer is identical to initWithMIDINote:velocity:groupID:duration with the addition of
+		an instrumentID parameter which will allow for the possibility of an externally-created custom instrument.
+		If this initializer is used, instrumentID should be set to AVExtendedNoteOnEventDefaultInstrument for now.
+ */
+
+- (instancetype)initWithMIDINote:(float)midiNote
+						velocity:(float)velocity
+					instrumentID:(UInt32)instrumentID
+						 groupID:(UInt32)groupID
+						duration:(AVMusicTimeStamp)duration;
+
+/*!	@property midiNote
+		The MIDI note number represented as a floating point.  If the instrument within the AVMusicTrack's
+		destinationAudioUnit supports fractional values, this may be used to generate arbitrary
+		macro- and micro-tunings.  Range: Destination-dependent, usually 0.0 - 127.0.
+ */
+@property (readwrite)float midiNote;
+
+/*!	@property velocity
+		The MIDI velocity represented as a floating point.  If the instrument within the AVMusicTrack's
+		destinationAudioUnit supports fractional values, this may be used to generate very precise changes
+		in gain, etc.  Range: Destination-dependent, usually 0.0 - 127.0.
+ */
+@property (readwrite)float velocity;
+
+/*!	@property instrumentID
+		This should be set to AVExtendedNoteOnEventDefaultInstrument.
+ */
+@property (readwrite)UInt32 instrumentID;
+
+/*!	@property groupID
+		This represents the audio unit channel (i.e., Group Scope) which should handle this event.
+		Range: normally between 0 and 15, but may be higher if the AVMusicTrack's destinationAudioUnit
+		supports more channels.
+ */
+@property (readwrite)UInt32 groupID;
+
+/*!	@property duration
+		The duration of this event in AVMusicTimeStamp beats.  Range:  Any nonnegative number.
+ */
+@property (readwrite)AVMusicTimeStamp duration;
+
+@end
+
+/*!	@class AVParameterEvent
+	@abstract
+		The event class representing a parameter set/change event on the AVMusicTrack's destinationAudioUnit.
+	@discussion
+		AVParameterEvents make it possible to schedule and/or automate parameter changes on the audio unit
+		that has been configured as the destination for the AVMusicTrack containing this event.
+ 
+		When the track is played as part of a sequence, the destination audio unit will receive set-parameter
+		messages whose values change smoothly along a linear ramp between each event's beat location.
+ 
+		If an AVParameterEvent is added to an empty, non-automation track, the track becomes an automation track.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVParameterEvent : AVMusicEvent
+
+/*!	@method initWithParameterID:scope:element:value
+	@abstract
+		Initialize the event with the parameter ID, scope, element, and value for the parameter to be set.
+	@param parameterID
+		The ID of the parameter (see AudioUnitParameterID).
+	@param scope
+		The audio unit scope for the parameter (see AudioUnitScope).
+	@param element
+		The element index within the scope (see AudioUnitElement).
+	@param value
+		The value of the parameter to be set.  Range:  Dependent on parameter.
+*/
+- (instancetype) initWithParameterID:(UInt32)parameterID
+							   scope:(UInt32)scope
+							 element:(UInt32)element
+							   value:(float)value;
+
+
+/*!	@property parameterID
+		The ID of the parameter (see AudioUnitParameterID).
+ */
+@property (readwrite)UInt32 parameterID;
+
+/*!	@property scope
+		The audio unit scope for the parameter (see AudioUnitScope).
+ */
+@property (readwrite)UInt32 scope;
+
+/*!	@property element
+		The element index within the scope (see AudioUnitElement).
+ */
+@property (readwrite)UInt32 element;
+
+/*!	@property value
+		The value of the parameter to be set.  Range:  Dependent on parameter.
+*/
+@property (readwrite)float value;
+
+@end
+
+/*!	@class AVAUPresetEvent
+	@abstract
+		The event class representing a preset load and change on the AVMusicTrack's destinationAudioUnit.
+	@discussion
+		AVAUPresetEvents make it possible to schedule and/or automate preset changes on the audio unit
+		that has been configured as the destination for the AVMusicTrack containing this event.
+*/
+
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVAUPresetEvent : AVMusicEvent
+
+/*!	@method initWithScope:element:dictionary
+	@abstract
+		Initialize the event with the scope, element, and dictionary for the preset.
+	@param scope
+		The audio unit scope for the parameter (see AudioUnitScope).  This should always be set to Global.
+	@param element
+		The element index within the scope (see AudioUnitElement).  This should usually be set to 0.
+	@param presetDictionary
+		An NSDictionary containing the preset.  The audio unit will expect this to be a dictionary
+		structured as an appropriate audio unit preset.
+	@discussion
+		The dictionary passed to this initializer will be copied and is not editable once the event is
+		created.
+		
+*/
+- (instancetype) initWithScope:(UInt32)scope
+					   element:(UInt32)element
+				  dictionary:(NSDictionary *)presetDictionary;
+
+/*!	@property scope
+		The audio unit scope for the parameter (see AudioUnitScope).  This should always be set to Global.
+ */
+@property (readwrite)UInt32 scope;
+
+/*!	@property element
+		The element index within the scope (see AudioUnitElement).  This should usually be set to 0.
+ */
+@property (readwrite)UInt32 element;
+
+/*!	@property presetDictionary
+		An NSDictionary containing the preset.
+ */
+@property (readonly,copy)NSDictionary *presetDictionary;
+
+@end
+
+/*!	@class AVExtendedTempoEvent
+	@abstract
+		The event class representing a tempo change to a specific beats-per-minute value.
+	@discussion
+		This event provides a way to specify a tempo change that is less cumbersome than using
+		tempo meta-events.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos)
+@interface AVExtendedTempoEvent : AVMusicEvent
+
+/*!	@method initWithTempo:
+	@abstract
+		Initialize the event tempo.
+	@param tempo
+		The new tempo in beats-per-minute.  Range:  Any positive value.
+		The new tempo will begin at the timestamp for this event.
+ */
+- (instancetype)initWithTempo:(double)tempo;
+
+/*!	@property tempo
+		The new tempo in beats-per-minute.  Range:  Any positive value.
+ */
+@property (readwrite)double tempo;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h	2022-02-23 07:16:32.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesis.h	2022-05-31 15:02:47.000000000 -0400
@@ -10,7 +10,7 @@
 
 #ifdef __OBJC2__
 
-@class AVAudioSession, AVAudioSessionChannelDescription, AVAudioBuffer;
+@class AVAudioSession, AVAudioSessionChannelDescription, AVAudioBuffer, AVSpeechSynthesisMarker;
 
 NS_ASSUME_NONNULL_BEGIN
 
@@ -30,6 +30,16 @@
     AVSpeechSynthesisVoiceGenderFemale
 } NS_ENUM_AVAILABLE(10_15, 13_0);
 
+/*! @brief   Markers used in the output event callback. Used for providing metadata on synthesized audio.
+ */
+typedef NS_ENUM(NSInteger, AVSpeechSynthesisMarkerMark) {
+    AVSpeechSynthesisMarkerMarkPhoneme,
+    AVSpeechSynthesisMarkerMarkWord,
+    AVSpeechSynthesisMarkerMarkSentence,
+    AVSpeechSynthesisMarkerMarkParagraph
+} NS_ENUM_AVAILABLE(13_0, 16_0) NS_SWIFT_NAME(AVSpeechSynthesisMarker.Mark);
+
+
 extern const float AVSpeechUtteranceMinimumSpeechRate API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
 extern const float AVSpeechUtteranceMaximumSpeechRate API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
 extern const float AVSpeechUtteranceDefaultSpeechRate API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
@@ -42,6 +52,7 @@
 extern NSString *const AVSpeechSynthesisIPANotationAttribute API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0), macos(10.14));
 
 typedef void (^AVSpeechSynthesizerBufferCallback)(AVAudioBuffer *buffer) NS_SWIFT_NAME(AVSpeechSynthesizer.BufferCallback);
+typedef void (^AVSpeechSynthesizerMarkerCallback)(NSArray<AVSpeechSynthesisMarker *> *markers) NS_SWIFT_NAME(AVSpeechSynthesizer.MarkerCallback) API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0));
 
 
 @protocol AVSpeechSynthesizerDelegate;
@@ -113,8 +124,28 @@
 + (instancetype)speechUtteranceWithString:(NSString *)string;
 + (instancetype)speechUtteranceWithAttributedString:(NSAttributedString *)string API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0), macos(10.14));
 
+/*!
+ A speech utterance that expects markup written using the Speech Synthesis Markup Language (SSML) standard.
+ Returns nil if invalid SSML is passed in.
+ */
++ (nullable instancetype)speechUtteranceWithSSMLRepresentation:(NSString *)string API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0));
+
 - (instancetype)initWithString:(NSString *)string;
 - (instancetype)initWithAttributedString:(NSAttributedString *)string API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0), macos(10.14));
+/*!
+ @abstract
+ A speech utterance that expects markup written using the Speech Synthesis Markup Language (SSML)  standard.
+ 
+ @discussion
+ Uses SSML markup to add attributes. If using SSML to request voices that fall under certain attributes, a single
+ utterance may be split into multiple parts, each sent to the appropriate synthesizer. If no voice matches the properties,
+ the voice in the @c voice property of the utterance will be used. If no @c voice is specified, the system's default
+ will be used. @c AVSpeechUtterance properties that affect the prosidy of a voice such as @c rate,
+ @c pitchMultiplier, @c pitchMultiplier will not apply to an utterance that uses an SSML representation.
+ 
+ Returns nil if invalid SSML is passed in.
+*/
+- (nullable instancetype)initWithSSMLRepresentation:(NSString *)string API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0));
 
 /* If no voice is specified, the system's default will be used. */
 @property(nonatomic, retain, nullable) AVSpeechSynthesisVoice *voice;
@@ -161,6 +192,11 @@
 // Use this method to receive audio buffers that can be used to store or further process synthesized speech.
 // The dictionary provided by -[AVSpeechSynthesisVoice audioFileSettings] can be used to create an AVAudioFile.
 - (void)writeUtterance:(AVSpeechUtterance *)utterance toBufferCallback:(AVSpeechSynthesizerBufferCallback)bufferCallback API_AVAILABLE(ios(13.0), watchos(6.0), tvos(13.0), macos(10.15)) ;
+/*!
+ Use this method to receive audio buffers and associated metadata that can be used to store or further process synthesized speech.
+ The dictionary provided by -[AVSpeechSynthesisVoice audioFileSettings] can be used to create an AVAudioFile.
+ */
+- (void)writeUtterance:(AVSpeechUtterance *)utterance toBufferCallback:(AVSpeechSynthesizerBufferCallback)bufferCallback toMarkerCallback:(AVSpeechSynthesizerMarkerCallback)markerCallback API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0)) ;
 
 /* These methods will operate on the speech utterance that is speaking. Returns YES if it succeeds, NO for failure. */
 
@@ -203,6 +239,22 @@
 - (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer willSpeakRangeOfSpeechString:(NSRange)characterRange utterance:(AVSpeechUtterance *)utterance API_AVAILABLE(ios(7.0), watchos(1.0), tvos(7.0), macos(10.14));
 @end
 
+#pragma mark - AVSpeechSynthesisMarker
+
+API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0)) 
+@interface AVSpeechSynthesisMarker : NSObject <NSSecureCoding, NSCopying>
+
+@property (nonatomic, assign) AVSpeechSynthesisMarkerMark mark;
+
+/// Byte offset into the associated audio buffer
+@property (nonatomic, assign) NSUInteger byteSampleOffset;
+
+/// The location and length of the pertaining speech request's SSML text. This marker applies to the range of characters represented by the NSString.
+@property (nonatomic, assign) NSRange textRange;
+
+- (instancetype)initWithMarkerType:(AVSpeechSynthesisMarkerMark)type forTextRange:(NSRange)range atByteSampleOffset:(NSUInteger)byteSampleOffset;
+
+@end
 
 NS_ASSUME_NONNULL_END
 
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesisProvider.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesisProvider.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesisProvider.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVSpeechSynthesisProvider.h	2022-06-03 18:09:44.000000000 -0400
@@ -0,0 +1,169 @@
+/*
+File:  AVSpeechSynthesisProvider.h
+
+Framework:  AVFoundation
+
+Copyright 2022 Apple Inc. All rights reserved.
+*/
+
+#import <TargetConditionals.h>
+
+#ifndef AVSpeeechSynthesisProvider_h
+#define AVSpeeechSynthesisProvider_h
+
+#import <AVFAudio/AVSpeechSynthesis.h>
+
+#if __has_include(<AudioToolbox/AUAudioUnit.h>)
+#import <AudioToolbox/AUAudioUnit.h>
+#endif
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVSpeechSynthesisProviderRequest, AVSpeechSynthesisMarker;
+
+#pragma mark - AVSpeechSynthesisProviderVoice -
+
+/*! @brief  The representation of a provided voice that is available for speech synthesis.
+    @discussion
+        @c AVSpeechSynthesisProviderVoice is distinct from @c AVSpeechSynthesisVoice, in that it is a voice provided to the system by an @c AVSpeechSynthesisProviderAudioUnit.
+ 
+        An @c AVSpeechSynthesisProviderVoice will surface as an @c AVSpeechSynthesisVoice when using @c AVSpeechSynthesisVoice.speechVoices(). The quality will always be listed as @c .enhanced
+ */
+API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0)) 
+@interface AVSpeechSynthesisProviderVoice : NSObject <NSSecureCoding, NSCopying>
+
+/*! @brief  The localized name of the voice
+ */
+@property (nonatomic, readonly) NSString *name;
+
+/*! @brief  A unique identifier for the voice
+    @discussion
+        The recommended format is reverse domain notation.
+        Behavior is undefined if identifiers are not unique for all voices within a given extension.
+ */
+@property (nonatomic, readonly) NSString *identifier;
+
+/*! @brief  A set of BCP 47 codes identifying the languages this synthesizer is primarily used for.
+    @discussion
+        These languages are what a user would expect a synthesizer to fully support and be primarily used for.
+ */
+@property (nonatomic, readonly) NSArray<NSString *> *primaryLanguages;
+
+/*! @brief  A superset of BCP 47 codes identifying the voice’s supported languages.
+    @discussion
+        These languages are what a user would expect a voice to be able to speak such that if the voice is given a multi-lingual phrase, it would be able to speak the entire phrase without a need to to switch voices. For example, a zh-CN voice could have @c ["zh-CN"] as its @c primaryLanguages, but in @c supportedLanguages have @c ["zh-CN","en-US"] indicating if it received "你好 means Hello", it would be able to speak the entire phrase.
+ */
+@property (nonatomic, readonly) NSArray<NSString *> *supportedLanguages;
+
+/*! @brief  The size of the voice (optional)
+    @discussion
+       This reported size of the voice package on disk, in bytes. Defaults to 0.
+*/
+@property (nonatomic, readwrite) int64_t voiceSize;
+
+/*! @brief  The voice version (optional)
+    @discussion
+        This is an optional property for bookkeeping. This value does not affect system behavior.
+ */
+@property (nonatomic, strong) NSString *version;
+
+/*! @brief  The gender of the voice (optional)
+ */
+@property (nonatomic, readwrite) AVSpeechSynthesisVoiceGender gender;
+
+/*! @brief  The age of the voice in years (optional)
+    @discussion
+       This is an optional property that indicates the age of this voice, to be treated as a personality trait. Defaults to 0.
+*/
+@property (nonatomic, assign) NSInteger age;
+
+
+- (instancetype)initWithName:(NSString *)name identifier:(NSString *)identifier primaryLanguages:(NSArray<NSString *> *)primaryLanguages supportedLanguages:(NSArray<NSString *> *)supportedLanguages;
+
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+/*! @brief  A call that indicates that a new voice or set of voices is available, or no longer available, for system use.
+    @discussion
+       Call this method to indicate to the system that there has been change in the availability of the voices your application is providing to the system.
+*/
++ (void)updateSpeechVoices;
+
+@end
+
+
+#pragma mark - AVSpeechSynthesisProviderRequest -
+
+/*!
+    An @c AVSpeechSynthesisProviderRequest gets delivered to an @c AVSpeechSynthesisProviderAudioUnit in order to synthesize audio.
+    This is distinct from an @c AVSpeechUtterance, which is a generic utterance to be spoken.
+ */
+API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0)) 
+@interface AVSpeechSynthesisProviderRequest : NSObject <NSSecureCoding, NSCopying>
+
+/*!
+    @abstract The SSML representation of the text to be synthesized with the corresponding speech synthesis attributes for customization of pitch, rate, intonation, and more.
+    @see https://www.w3.org/TR/speech-synthesis11/
+ */
+@property (nonatomic, readonly) NSString *ssmlRepresentation;
+
+/*! @abstract The voice to be used in this speech request
+ */
+@property (nonatomic, readonly) AVSpeechSynthesisProviderVoice *voice;
+
+- (instancetype)initWithSSMLRepresentation:(NSString *)text voice:(AVSpeechSynthesisProviderVoice *)voice;
+
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+
+#pragma mark - AVSpeechSynthesisProviderAudioUnit -
+
+/*! @brief  An Audio Unit dedicated to speech synthesizer tasks
+    @discussion
+        An @c AVSpeechSynthesisProviderAudioUnit is dedicated to producing audio buffers that contain synthesized speech, as well as markers that provide metadata on those audio buffers. The text to be synthesized is delivered as an @c AVSpeechSynthesisProviderRequest
+ */
+
+#if __has_include(<AudioToolbox/AUAudioUnit.h>)
+API_AVAILABLE(ios(16.0), macos(13.0), tvos(16.0)) API_UNAVAILABLE(watchos) 
+@interface AVSpeechSynthesisProviderAudioUnit : AUAudioUnit
+
+/*! @brief A block of information that is relevant to the generation of speech synthesis.
+    @param metadata An array of speech synthesis metadata
+    @param speechRequest The speech request associated with the metadata
+*/
+typedef void(^AVSpeechSynthesisProviderOutputBlock)(NSArray<AVSpeechSynthesisMarker *> *markers, AVSpeechSynthesisProviderRequest *speechRequest) API_AVAILABLE(ios(16.0), macos(13.0), watchos(9.0), tvos(16.0)) ;
+
+/*! @brief  Returns the voices this audio unit has available and ready for synthesis.
+    @discussion
+        This method should fetch and return the voices ready to synthesize that a user can select from (usually through Settings).
+        Required for speech synthesizer audio unit extensions. An audio unit with a dynamic list of voices can override this property's getter to perform a more complex fetch.
+*/
+@property (nonatomic, strong) NSArray<AVSpeechSynthesisProviderVoice *> *speechVoices;
+
+/*! @brief A property set by the host that is called by the audio unit to supply metadata for a speech request.
+    @discussion
+        A synthesizer should call this method when it has produced relevant data to the audio buffers it is sending back to the host. In some cases speech output may be delayed until these markers are delivered. For example, word highlighting depends on marker data from synthesizers in order to properly time which words are highlighted. Many speech synthesizers generate this information on the fly, while synthesizing the audio. The array of markers can reference future audio buffers that have yet to be delivered.
+ 
+        There may be cases in which marker data is not fully known until further audio processing is done. In these cases, and other casers where marker data has changed, calling this block with marker data that contains perviously delivered audio buffer ranges will replace that audio buffer range's marker data, as it will be considered stale.
+ */
+@property (nonatomic, copy, nullable) AVSpeechSynthesisProviderOutputBlock speechSynthesisOutputMetadataBlock;
+
+
+/*! @brief  Sends a new speech request to be synthesized
+    @discussion
+        Sends a new speech request to the synthesizer to render. When the synthesizer audio unit is finished generating audio buffers for the speech request, it should indicate this within its internal render block, @c AUInternalRenderBlock, specifically through the @c AudioUnitRenderActionFlags flag @c kAudioOfflineUnitRenderAction_Complete.
+ */
+- (void)synthesizeSpeechRequest:(AVSpeechSynthesisProviderRequest *)speechRequest;
+
+/*! @brief  Informs the audio unit that the speech request job should be discarded.
+ */
+- (void)cancelSpeechRequest;
+
+@end
+#endif
+
+NS_ASSUME_NONNULL_END
+
+#endif /* AVSpeeechSynthesisProvider_h */
Clone this wiki locally