-
Notifications
You must be signed in to change notification settings - Fork 511
AudioToolbox macOS xcode13.0 rc
Alex Soto edited this page Sep 14, 2021
·
1 revision
#AudioToolbox.framework
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h 2021-08-07 08:53:08.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h 2021-08-03 21:48:36.000000000 -0400
@@ -13,7 +13,6 @@
#import <AudioToolbox/AUParameters.h>
#import <Foundation/NSExtensionRequestHandling.h>
-#import <CoreMIDI/MIDIServices.h>
#if !TARGET_OS_IPHONE
typedef UInt32 AUAudioObjectID; // AudioObjectID
@@ -50,7 +49,7 @@
/*! @var AUEventSampleTimeImmediate
@brief A special value of AUEventSampleTime indicating "immediately."
@discussion
- Callers of AUScheduleParameterBlock, AUMIDIEventListBlock, and AUScheduleMIDIEventBlock can pass
+ Callers of AUScheduleParameterBlock and AUScheduleMIDIEventBlock can pass
AUEventSampleTimeImmediate to indicate that the event should be rendered as soon as
possible, in the next cycle. A caller may also add a small (less than 4096) sample frame
offset to this constant. The base AUAudioUnit implementation translates this constant to a
@@ -246,7 +245,7 @@
@param enabled
YES if the profile was enabled, NO if the profile was disabled.
*/
-typedef void (^AUMIDICIProfileChangedBlock)(uint8_t cable, MIDIChannelNumber channel, MIDICIProfile *profile, BOOL enabled) API_AVAILABLE(macos(10.14), ios(12.0)) __WATCHOS_PROHIBITED __TVOS_PROHIBITED;
+typedef void (^AUMIDICIProfileChangedBlock)(uint8_t cable, MIDIChannelNumber channel, MIDICIProfile *profile, BOOL enabled);
/*! @enum AUHostTransportStateFlags
@brief Flags describing the host's transport state.
@@ -482,8 +481,8 @@
/*! @property scheduleParameterBlock
@brief Block which hosts use to schedule parameters.
@discussion
- As with renderBlock, a host should fetch and cache this block before calling
- allocateRenderResources, if it intends to schedule parameters.
+ As with renderBlock, a host should fetch and cache this block before beginning to render,
+ if it intends to schedule parameters.
The block is safe to call from any thread context, including realtime audio render
threads.
@@ -593,56 +592,24 @@
/*! @property scheduleMIDIEventBlock
@brief Block used to schedule MIDI events.
@discussion
- As with renderBlock, a host should fetch and cache this block before calling
- allocateRenderResources if it intends to schedule MIDI events.
+ As with renderBlock, a host should fetch and cache this block before beginning to render,
+ if it intends to schedule MIDI events.
This is implemented in the base class. It is nil when musicDeviceOrEffect is NO.
- Subclasses should not override. When hosts schedule events via this block, they are
- sent to the Audio Unit via the list of AURenderEvents delivered to
+ Subclassers should not override. When hosts schedule events via this block, they are
+ delivered to the audio unit via the list of AURenderEvents delivered to
internalRenderBlock.
-
- All events sent via this block will be delivered to the internalRenderBlock in the MIDI
- protocol returned by the AudioUnitMIDIProtocol property. For example, if AudioUnitMIDIProtocol
- returns kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0 if necessary.
- If AudioUnitMIDIProtocol is not set, events will be delivered as legacy MIDI.
This bridged to the v2 API MusicDeviceMIDIEvent.
*/
@property (NS_NONATOMIC_IOSONLY, readonly, nullable) AUScheduleMIDIEventBlock scheduleMIDIEventBlock;
-/*! @property scheduleMIDIEventListBlock
- @brief Block used to schedule MIDIEventLists.
- @discussion
- As with renderBlock, a host should fetch and cache this block before calling
- allocateRenderResources, if it intends to schedule MIDI events.
-
- When scheduling events during the render cycle (e.g. via a render observer) eventSampleTime can be
- AUEventSampleTimeImmediate plus an optional buffer offset, in which case the event is
- scheduled at the provided offset position within the current render cycle.
-
- This is implemented in the base class. It is nil when musicDeviceOrEffect is NO.
-
- Subclassers should not override. When hosts schedule events via this block, they are
- delivered to the Audio Unit via the list of AURenderEvents delivered to
- internalRenderBlock.
-
- All events sent via this block will be delivered to the internalRenderBlock in the MIDI protocol returned by
- the AudioUnitMIDIProtocol property. For example, if this block is called with MIDI-1.0 events but
- AudioUnitMIDIProtocol returns kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0.
- If AudioUnitMIDIProtocol is not set, events will be delivered as legacy MIDI.
-
- Note: This block should be preferred over scheduleMIDIEventBlock going forward.
-
- This bridged to the v2 API MusicDeviceMIDIEventList.
-*/
-@property (NS_NONATOMIC_IOSONLY, readonly, nullable) AUMIDIEventListBlock scheduleMIDIEventListBlock API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
-
/*! @property MIDIOutputNames
@brief Count, and names of, a plug-in's MIDI outputs.
@discussion
A plug-in may override this method to inform hosts about its MIDI outputs. The size of the
- array is the number of outputs the Audio Unit supports. Each item in the array is the name
+ array is the number of outputs the audio unit supports. Each item in the array is the name
of the MIDI output at that index.
This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallbackInfo.
@@ -664,71 +631,21 @@
// These properties and methods are generally optional.
/*! @property MIDIOutputEventBlock
- @brief Block used by the host to access the MIDI output generated by an Audio Unit.
+ @brief Block used by the host to access the MIDI output generated by an audio unit.
@discussion
The host can set this block and the plug-in can call it in its renderBlock to provide to the
host the MIDI data associated with the current render cycle.
-
- All events sent via this block will be delivered to the host in the MIDI protocol returned by
- the hostMIDIProtocol property. For example, if hostMIDIProtocol is set to
- kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0. If hostMIDIProtocol
- is not set, events will be delivered as legacy MIDI.
- Note: AUMIDIEventListBlock should be preferred over this block going forward.
-
- This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
- */
-@property (NS_NONATOMIC_IOSONLY, copy, nullable) AUMIDIOutputEventBlock MIDIOutputEventBlock API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
-
-/*! @property MIDIOutputEventListBlock
- @brief Block used by the host to access the MIDIEventList output generated by an Audio Unit.
- @discussion
- The host can set this block and the plug-in can call it in its renderBlock to provide to the
- host the MIDIEventList data associated with the current render cycle.
-
- All events sent via this block will be delivered to the host in the MIDI protocol returned by
- the hostMIDIProtocol property. For example, if hostMIDIProtocol is set to
- kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0. If hostMIDIProtocol
- is not set, events will be delivered as legacy MIDI.
-
- Note: This block should be preferred over MIDIOutputEventBlock going forward.
-
- Host should setup in the following order:
- - Set hostMIDIProtocol
- - Set MIDIOutputEventBlock
- - Call allocateRenderResourcesAndReturnError
-
- This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputEventListCallback.
+ This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
*/
-@property (NS_NONATOMIC_IOSONLY, copy, nullable) AUMIDIEventListBlock MIDIOutputEventListBlock API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
-
-/*! @property AudioUnitMIDIProtocol
- @brief The MIDI protocol used by the Audio Unit for receiving MIDIEventList data.
- @discussion
- All translatable messages will be converted (if necessary) to this protocol prior to delivery
- to the Audio Unit.
-
- This is bridged to the v2 API property kAudioUnitProperty_AudioUnitMIDIProtocol.
-*/
-@property (NS_NONATOMIC_IOSONLY, readonly) MIDIProtocolID AudioUnitMIDIProtocol API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
-
-/*! @property hostMIDIProtocol
- @brief The MIDI protocol to be used by the host for receiving MIDIEventList data.
- @discussion
- Hosts should set this property to the protocol they wish to receive MIDIEventList data
- from the Audio Unit. This should be set prior to initialization, all translatable messages
- will be converted (if necessary) to this property's protocol prior to delivery to the host.
-
- This is bridged to the v2 API property kAudioUnitProperty_HostMIDIProtocol.
-*/
-@property (NS_NONATOMIC_IOSONLY) MIDIProtocolID hostMIDIProtocol API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+@property (NS_NONATOMIC_IOSONLY, copy, nullable) AUMIDIOutputEventBlock MIDIOutputEventBlock API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
/*! @property fullState
- @brief A persistable snapshot of the Audio Unit's properties and parameters, suitable for
+ @brief A persistable snapshot of the audio unit's properties and parameters, suitable for
saving as a user preset.
@discussion
- Hosts may use this property to save and restore the state of an Audio Unit being used in a
- user preset or document. The Audio Unit should not persist transitory properties such as
+ Hosts may use this property to save and restore the state of an audio unit being used in a
+ user preset or document. The audio unit should not persist transitory properties such as
stream formats, but should save and restore all parameters and custom properties.
The base class implementation of this property saves the values of all parameters
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h 2021-08-07 05:20:03.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h 2021-08-03 21:48:35.000000000 -0400
@@ -116,8 +116,7 @@
AURenderEventParameter = 1,
AURenderEventParameterRamp = 2,
AURenderEventMIDI = 8,
- AURenderEventMIDISysEx = 9,
- AURenderEventMIDIEventList = 10
+ AURenderEventMIDISysEx = 9
};
#pragma pack(4)
@@ -157,17 +156,6 @@
uint8_t data[3]; //!< The bytes of the MIDI event. Running status will not be used.
} AUMIDIEvent;
-/// Describes a single scheduled MIDIEventList.
-typedef struct AUMIDIEventList {
- union AURenderEvent *__nullable next; //!< The next event in a linked list of events.
- AUEventSampleTime eventSampleTime; //!< The sample time at which the event is scheduled to occur.
- AURenderEventType eventType; //!< AURenderEventMIDI or AURenderEventMIDISysEx.
- uint8_t reserved; //!< Must be 0.
- uint8_t cable; //!< The virtual cable number.
- MIDIEventList eventList; //!< A structure containing UMP packets.
-} AUMIDIEventList;
-
-
/*! @brief A union of the various specific render event types.
@discussion
Determine which variant to use via head.eventType. AURenderEventParameter and
@@ -178,7 +166,6 @@
AURenderEventHeader head;
AUParameterEvent parameter;
AUMIDIEvent MIDI;
- AUMIDIEventList MIDIEventsList;
} AURenderEvent;
#pragma pack()
@@ -242,9 +229,7 @@
If the plug-in produces more MIDI output data than the default size of the allocated buffer,
then the plug-in can provide this property to increase the size of this buffer.
- The value represents the number of 3-byte Legacy MIDI messages that fit into the buffer or
- a single MIDIEventList containing 1 MIDIEventPacket of 2 words when using MIDI 2.0 (MIDIEventList based API's).
-
+ The value represents the number of 3-byte MIDI 1.0 messages that fit into the buffer.
This property is set to the default value by the framework.
In case of kAudioUnitErr_MIDIOutputBufferFull errors caused by producing too much MIDI
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h 2021-08-09 03:21:09.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h 2021-08-03 21:48:36.000000000 -0400
@@ -365,8 +365,8 @@
merges the two inputs to the single output
@constant kAudioUnitSubType_NewTimePitch
- An audio unit that provides good quality time stretching and pitch shifting.
- It is computationally less expensive than kAudioUnitSubType_TimePitch.
+ An audio unit that provides good quality time stretching and pitch shifting
+ while still being very fast.
@constant kAudioUnitSubType_AUiPodTimeOther
An audio unit that provides time domain time stretching.
@@ -384,19 +384,20 @@
kAudioUnitSubType_RoundTripAAC = 'raac',
};
-#if !TARGET_OS_WATCH
+#if !TARGET_OS_IPHONE
/*!
- @enum Apple converter audio unit sub types (macOS and iOS only)
+ @enum Apple converter audio unit sub types (macOS only)
@constant kAudioUnitSubType_TimePitch
- An audio unit that provides high quality time stretching and pitch shifting.
+ An audio unit that can be used to have independent control of both playback
+ rate and pitch. It provides a generic view, so can be used in both a UI and
+ programmatic context. It also comes in an Offline version so can be used to
+ process audio files.
*/
CF_ENUM(UInt32) {
kAudioUnitSubType_TimePitch = 'tmpt'
};
-#endif //!TARGET_OS_WATCH
-
-#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST
+#elif !TARGET_OS_MACCATALYST
/*!
@enum Apple converter audio unit sub types (iOS only)
@constant kAudioUnitSubType_AUiPodTime
@@ -1102,43 +1103,23 @@
/*!
@constant kAudioComponentInstanceInvalidationNotification
- @abstract Notification generated when the connection to an audio unit extension process
- is invalidated.
+ @abstract Notification generated when an audio unit extension process exits abnormally.
@discussion
Register for this notification name with `[NSNotificationCenter defaultCenter]` or
- `CFNotificationCenterGetLocalCenter()`. The "object" refers to an AUAudioUnit instance
- to be observed, or can be nil to observe all instances.
-
- This notification can happen for several reasons, for instance the connection being
- invalidated or the process abnormally ending. There can be multiple notifications for
- the same event (i.e. a terminated process will also invalidate the connection).
-
- The notification's userInfo dictionary may contain the following keys, depending on
- the reason for the invalidation and the platform in which it's running:
-
- @"audioUnit", a NSValue whose pointerValue is the AudioUnit or
- AudioComponentInstance which is wrapping the AUAudioUnit communicating with
- the extension process. (This may be null if there is no such component instance.).
- For example:
+ `CFNotificationCenterGetLocalCenter()`. The "object" refers to an AUAudioUnit instance to
+ be observed, or can be nil to observe all instances. The notification's userInfo
+ dictionary contains a key, "audioUnit", an NSValue whose pointerValue is the
+ AudioUnit or AudioComponentInstance which is wrapping the AUAudioUnit communicating with the
+ extension process. (This may be null if there is no such component instance.) For example:
```
- [[NSNotificationCenter defaultCenter]
- addObserverForName:(NSString *)kAudioComponentInstanceInvalidationNotification
- object:nil queue:nil usingBlock:^(NSNotification *note) {
+ [[NSNotificationCenter defaultCenter] addObserverForName:(NSString *)kAudioComponentInstanceInvalidationNotification object:nil queue:nil usingBlock:^(NSNotification *note) {
AUAudioUnit *auAudioUnit = (AUAudioUnit *)note.object;
NSValue *val = note.userInfo[@"audioUnit"];
AudioUnit audioUnit = (AudioUnit)val.pointerValue;
- NSLog(@"Received kAudioComponentInstanceInvalidationNotification: auAudioUnit %@, audioUnit %p",
- auAudioUnit, audioUnit);
+ NSLog(@"Received kAudioComponentInstanceInvalidationNotification: auAudioUnit %@, audioUnit %p", auAudioUnit, audioUnit);
}];
```
-
- @"Service PID", a NSNumber with the process ID for the service.
- @"Host PID", a NSNumber with the process ID for the host.
- @"Executable Path", a NSString with the path for the executable that may be responsible
- for the abnormal exit.
- @"Descriptions" a NSArray of NSValues representing byte encoded
- AudioComponentDescriptions that may be responsible for the abnormal exit.
*/
extern const CFStringRef kAudioComponentInstanceInvalidationNotification
API_AVAILABLE(macos(10.11), ios(9.0), watchos(2.0), tvos(9.0));
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h 2021-08-09 03:21:09.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h 2021-08-03 21:48:36.000000000 -0400
@@ -712,9 +712,7 @@
converter's output format. On exit, the number of packets of converted
data that were written to outOutputData.
@param outOutputData
- The converted output data is written to this buffer. On entry, the buffers'
- mDataByteSize fields (which must all be the same) reflect buffer capacity.
- On exit, mDataByteSize is set to the number of bytes written.
+ The converted output data is written to this buffer.
@param outPacketDescription
If non-null, and the converter's output uses packet descriptions, then
packet descriptions are written to this array. It must point to a memory
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h 2021-08-07 05:20:03.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h 2021-08-03 21:48:36.000000000 -0400
@@ -46,16 +46,16 @@
clients can be informed when the service has been reset for some reason.
When a reset happens, any state the client has with AHS, such as cached data
or added listeners, must be re-established by the client.
- @constant kAudioHardwareServiceDeviceProperty_VirtualMainVolume
+ @constant kAudioHardwareServiceDeviceProperty_VirtualMasterVolume
A Float32 that represents the value of the volume control. The range is
between 0.0 and 1.0 (inclusive). This actual volume controls this property
manipulates depends on what the device provides. If the device has a true
- main volume control, this property directly controls that. If the device
+ master volume control, this property directly controls that. If the device
has individual channel volume controls, this property will apply to those
identified by the device's preferred multi-channel layout (or preferred
stereo pair if the device is stereo only). Note that this control maintains
the relative balance between all the channels it affects.
- @constant kAudioHardwareServiceDeviceProperty_VirtualMainBalance
+ @constant kAudioHardwareServiceDeviceProperty_VirtualMasterBalance
A Float32 that represents the value of the stereo balance control. The range
is from 0.0 (all power to the left) to 1.0 (all power to the right) with
the value of 0.5 signifying that the channels have equal power. This control
@@ -66,12 +66,8 @@
CF_ENUM(AudioObjectPropertySelector)
{
kAudioHardwareServiceProperty_ServiceRestarted = 'srst',
-
- kAudioHardwareServiceDeviceProperty_VirtualMainVolume = 'vmvc',
- kAudioHardwareServiceDeviceProperty_VirtualMasterVolume API_DEPRECATED_WITH_REPLACEMENT("kAudioHardwareServiceDeviceProperty_VirtualMainVolume", macos(10.5, 10.5)) API_UNAVAILABLE(ios, watchos, tvos) = kAudioHardwareServiceDeviceProperty_VirtualMainVolume,
-
- kAudioHardwareServiceDeviceProperty_VirtualMainBalance = 'vmbc',
- kAudioHardwareServiceDeviceProperty_VirtualMasterBalance API_DEPRECATED_WITH_REPLACEMENT("kAudioHardwareServiceDeviceProperty_VirtualMainBalance", macos(10.5, 10.5)) API_UNAVAILABLE(ios, watchos, tvos) = kAudioHardwareServiceDeviceProperty_VirtualMainBalance,
+ kAudioHardwareServiceDeviceProperty_VirtualMasterVolume = 'vmvc',
+ kAudioHardwareServiceDeviceProperty_VirtualMasterBalance = 'vmbc'
};
//==================================================================================================
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioSession.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioSession.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioSession.h 2021-08-07 05:20:04.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioSession.h 2021-08-03 21:48:36.000000000 -0400
@@ -174,7 +174,7 @@
kAudioSessionOverrideAudioRoute_None = 0,
kAudioSessionOverrideAudioRoute_Speaker = 'spkr'
};
-
+
//==================================================================================================
#pragma mark AudioSession reason codes for route change
@@ -375,7 +375,7 @@
The actual IO buffer duration may be different
@constant kAudioSessionProperty_AudioCategory
A UInt32 value indicating the audio category for the AudioSession (see constants above).
- @constant kAudioSessionProperty_AudioRouteChange
+ @constant kAudioSessionProperty_AudioRouteChange
The value for this property is ONLY provided with the property changed callback. You
cannot get the value of this property (or set it).
The property changed callback provides a CFDictionaryRef with keyed values:
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h 2021-08-09 03:24:07.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h 2021-08-03 21:48:35.000000000 -0400
@@ -330,8 +330,10 @@
// Parameters for the AUTimePitch, AUTimePitch (offline), AUPitch units
CF_ENUM(AudioUnitParameterID) {
kTimePitchParam_Rate = 0,
+#if !TARGET_OS_IPHONE
kTimePitchParam_Pitch = 1,
kTimePitchParam_EffectBlend = 2 // only for the AUPitch unit
+#endif
};
// Parameters for AUNewTimePitch
@@ -473,9 +475,7 @@
kDynamicsProcessorParam_ReleaseTime = 5,
// Global, dB, -40->40, 0
- kDynamicsProcessorParam_OverallGain = 6,
-
- kDynamicsProcessorParam_MasterGain API_DEPRECATED_WITH_REPLACEMENT("kDynamicsProcessorParam_OverallGain", ios(2.0, 2.0), watchos(2.0, 2.0), tvos(9.0, 9.0) ,macos(10.5,10.5)) = kDynamicsProcessorParam_OverallGain,
+ kDynamicsProcessorParam_MasterGain = 6,
// Global, dB, read-only parameter
kDynamicsProcessorParam_CompressionAmount = 1000,
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h 2021-08-07 08:53:08.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h 2021-08-03 21:48:35.000000000 -0400
@@ -406,6 +406,7 @@
@constant kAudioUnitProperty_ParameterIDName
Scope: any
+ Element: AudioUnitParameterID of the parameter being queried
Value Type: AudioUnitParameterIDName
Access: read
@@ -602,16 +603,16 @@
Access: read
Used to determine how many MIDI output streams the audio unit can generate (and the name for
- each of these outputs). Each MIDI output is a complete MIDI or MIDIEventList data stream, such as embodied
- by a MIDIEndpointRef in CoreMIDI.
+ each of these outputs). Each MIDI output is a complete MIDI data stream, such as embodied by a
+ MIDIEndpointRef in CoreMIDI.
The host can retrieve an array of CFStringRefs published by the audio unit, where :
- - the size of the array is the number of MIDI Outputs the Audio Unit supports
+ - the size of the array is the number of MIDI Outputs the audio unit supports
- each item in the array is the name for that output at that index
- The host owns this array and its elements and should release them when it is finished.
+ The host should release the array when it is finished with it.
- Once the host has determined that the Audio Unit supports this feature, it can then provide a
+ Once the host has determined that the audio unit supports this feature, it can then provide a
callback, through which the audio unit can send the MIDI data.
See the documentation for the kAudioUnitProperty_MIDIOutputCallback property.
@@ -775,79 +776,13 @@
interact with the AudioUnit through this block; it is for the exclusive use
of the OS.
- @constant kAudioUnitProperty_LastRenderSampleTime
- Scope: Global
- Value Type: Float64
- Access: read-only
-
- The absolute sample frame time of the most recent render timestamp.
-
@constant kAudioUnitProperty_LoadedOutOfProcess
Scope: Global
Value Type: UInt32
Access: read-only
- Indicates whether an Audio Unit is loaded out-of-process, which might happen
+ Indicates whether an audio unit is loaded out-of-process, which might happen
at the request of the host or when loading in-process is not possible.
-
- @constant kAudioUnitProperty_MIDIOutputEventListCallback
- Scope: Global
- Value Type: block: void (^)(AUEventSampleTime, const struct MIDIEventList *)
- Access: write
-
- The host sets this property on the Audio Unit with the callback set appropriately.
-
- Operational Parameters:
- In the render call, just as is the expected usage of the AUHostCallbacks, the audio unit can
- call the provided callback to provide MIDIEventList data to the host that it will associate with the
- current AudioUnitRender.
-
- The Audio Unit in the callback provides:
- - the AUEventSampleTime that was provided to the audio unit for this particular call of
- AudioUnitRender
- - a MIDIEventList containing MIDI data. The time stamp values contained within the
- MIDIEventPacket in this list are **sample offsets*** from the AudioTimeStamp provided.
- This allows MIDI data to be time-stamped with a sample offset that is directly associated
- with the audio data it is generating in the current call to the AudioUnitRender function
-
- Host should setup in the following order:
- - Set host desired MIDIProtocolID
- - Set kAudioUnitProperty_MIDIOutputEventListCallback
- - Initialize the Audio Unit
-
- Note: kAudioUnitProperty_HostMIDIProtocol can not be changed while the Audio Unit is initialized.
-
- There is no implied or expected association between the number (or position) of an audio unit's
- audio or MIDI outputs.
-
- Compare to property kAudioUnitProperty_MIDIOutputCallback.
-
- @constant kAudioUnitProperty_AudioUnitMIDIProtocol
- Scope: Global
- Value Type: SInt32
- Access: read
-
- A signed 32-bit integer representing the audio unit's MIDI protocol. This should be one of the
- values in the MIDIProtocolID enum (see <CoreMIDI/MIDIServices.h>)..
-
- The framework will convert all incoming MIDI data to the protocol set in this property, the host can query
- this property to detect the audio unit's current MIDI protocol.
-
- Note: This property should not be changed after the audio has been initialized.
-
- @constant kAudioUnitProperty_HostMIDIProtocol
- Scope: Global
- Value Type: SInt32
- Access: write
-
- A signed 32-bit integer representing the hosts MIDI protocol. This should be set to one of the values
- in the MIDIProtocolID enum (see <CoreMIDI/MIDIServices.h>).
-
- Hosts should set this property to the protocol that MIDI data is desired to be delivered in. The framework will
- convert all MIDI data sent to the host to the protocol value set in this property, an audio unit can query
- this property to detect the hosts MIDI protocol.
-
- Note: This property should not be changed after the audio unit has been initialized.
*/
CF_ENUM(AudioUnitPropertyID)
{
@@ -864,7 +799,7 @@
kAudioUnitProperty_SupportedNumChannels = 13,
kAudioUnitProperty_MaximumFramesPerSlice = 14,
kAudioUnitProperty_ParameterValueStrings = 16,
- kAudioUnitProperty_AudioChannelLayout = 19,
+ kAudioUnitProperty_AudioChannelLayout = 19,
kAudioUnitProperty_TailTime = 20,
kAudioUnitProperty_BypassEffect = 21,
kAudioUnitProperty_LastRenderError = 22,
@@ -882,7 +817,7 @@
kAudioUnitProperty_FrequencyResponse = 52,
kAudioUnitProperty_ParameterHistoryInfo = 53,
kAudioUnitProperty_NickName = 54,
- kAudioUnitProperty_OfflineRender = 37,
+ kAudioUnitProperty_OfflineRender = 37,
kAudioUnitProperty_ParameterIDName = 34,
kAudioUnitProperty_ParameterStringFromValue = 33,
kAudioUnitProperty_ParameterClumpName = 35,
@@ -896,7 +831,6 @@
kAudioUnitProperty_RenderContextObserver
__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads")
= 60,
- kAudioUnitProperty_LastRenderSampleTime = 61,
kAudioUnitProperty_LoadedOutOfProcess = 62,
#if !TARGET_OS_IPHONE
kAudioUnitProperty_FastDispatch = 5,
@@ -909,11 +843,6 @@
kAudioUnitProperty_MIDIOutputCallbackInfo = 47,
kAudioUnitProperty_MIDIOutputCallback = 48,
-
- kAudioUnitProperty_MIDIOutputEventListCallback = 63,
-
- kAudioUnitProperty_AudioUnitMIDIProtocol = 64,
- kAudioUnitProperty_HostMIDIProtocol = 65
};
#if AU_SUPPORT_INTERAPP_AUDIO
@@ -1359,34 +1288,6 @@
typedef void (^AURenderContextObserver)(const AudioUnitRenderContext *context)
__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads");
-/*!
- @struct MIDIEventList
- @abstract Forward declaration of MIDIEventList found in <CoreMIDI/MIDIServices.h>
-*/
-typedef struct MIDIEventList MIDIEventList;
-
-/*! @typedef AUEventSampleTime
- @brief Expresses time as a sample count.
- @discussion
- Sample times are normally positive, but hosts can propagate HAL sample times through audio
- units, and HAL sample times can be small negative numbers.
-*/
-typedef int64_t AUEventSampleTime;
-
-/*!
- @typedef AUMIDIEventListBlock
- @abstract A block used by an audio unit to send or receive MIDIEventList data.
- @param eventSampleTime
- The time in samples at which the MIDI events are to occur.
- @param cable
- The virtual cable number associated with this MIDI data.
- @param eventList
- One full MIDI, partial MIDI SysEx, or a full SysEx UMP message.
-*/
-typedef OSStatus (^ AUMIDIEventListBlock)(AUEventSampleTime eventSampleTime,
- uint8_t cable,
- const struct MIDIEventList * eventList);
-
//=====================================================================================================================
#pragma mark - Parameter Definitions
@@ -1440,7 +1341,7 @@
octaves in relative pitch where a value of 1 is equal to 1200 cents
@constant kAudioUnitParameterUnit_BPM
beats per minute, ie tempo
- @constant kAudioUnitParameterUnit_Beats
+ @constant kAudioUnitParameterUnit_Beats
time relative to tempo, i.e., 1.0 at 120 BPM would equal 1/2 a second
@constant kAudioUnitParameterUnit_Milliseconds
parameter is expressed in milliseconds
@@ -1448,8 +1349,6 @@
for compression, expansion ratio, etc.
@constant kAudioUnitParameterUnit_CustomUnit
this is the parameter unit type for parameters that present a custom unit name
- @constant kAudioUnitParameterUnit_MIDI2Controller
- a generic MIDI 2.0 controller value with 32-bit range
*/
typedef CF_ENUM(UInt32, AudioUnitParameterUnit)
{
@@ -1479,8 +1378,7 @@
kAudioUnitParameterUnit_Beats = 23,
kAudioUnitParameterUnit_Milliseconds = 24,
kAudioUnitParameterUnit_Ratio = 25,
- kAudioUnitParameterUnit_CustomUnit = 26,
- kAudioUnitParameterUnit_MIDI2Controller = 27
+ kAudioUnitParameterUnit_CustomUnit = 26
};
/*!
@@ -2544,8 +2442,7 @@
Value Type: UInt32
Access: read/write
Bypass all processing done by the voice processing unit. When set to 0
- (default), the processing is activated otherwise it is disabled. Voice Isolation
- and Wide Spectrum take priority over Bypass.
+ (default), the processing is activated otherwise it is disabled.
@constant kAUVoiceIOProperty_VoiceProcessingEnableAGC
@discussion Scope: Global
@@ -2564,37 +2461,10 @@
CF_ENUM(AudioUnitPropertyID) {
kAUVoiceIOProperty_BypassVoiceProcessing = 2100,
kAUVoiceIOProperty_VoiceProcessingEnableAGC = 2101,
- kAUVoiceIOProperty_MuteOutput = 2104
-};
-
-/*!
- @enum Speech activity event described by AUVoiceIO
-*/
-typedef CF_ENUM(UInt32, AUVoiceIOSpeechActivityEvent)
-{
- kAUVoiceIOSpeechActivityHasStarted = 0,
- kAUVoiceIOSpeechActivityHasEnded = 1
+ kAUVoiceIOProperty_MuteOutput = 2104
+
};
-/*!
- @typedef AUVoiceIOMutedSpeechActivityEventListener
- @abstract Block called to receive speech activity event while the client is muted.
-*/
-typedef void (^AUVoiceIOMutedSpeechActivityEventListener)(AUVoiceIOSpeechActivityEvent event);
-
-/*!
-
- @constant kAUVoiceIOProperty_MutedSpeechActivityEventListener
- @discussion Scope: Global
- Value Type: AUVoiceIOMutedSpeechActivityEventListener
- Access: write only
- Register a listener to be notified when speech activity event occurs while the client is muted.
- Continuous presence of or lack of speech activity during mute will not cause redundant notification.
- In order to use this API, it's expected to implement the mute via the kAUVoiceIOProperty_MuteOutput.
- */
-CF_ENUM(AudioUnitPropertyID) {
- kAUVoiceIOProperty_MutedSpeechActivityEventListener = 2106
-} API_AVAILABLE(ios(15.0)) API_UNAVAILABLE(macos, watchos, tvos);
#pragma mark - AUVoiceProcessing unit deprecated properties
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h 2021-08-01 06:51:11.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h 2021-03-16 04:44:09.000000000 -0400
@@ -11,12 +11,9 @@
#include <CoreAudioTypes/CoreAudioTypes.h>
-#if TARGET_OS_WIN32
-#define ATTRIBUTE_PACKED
-#pragma pack(push, 1)
-#else
+
#define ATTRIBUTE_PACKED __attribute__((__packed__))
-#endif
+
// In a CAF File all of these types' byte order is big endian.
// When reading or writing these values the program will need to flip byte order to native endian
@@ -357,9 +354,7 @@
} ATTRIBUTE_PACKED;
typedef struct CAFUMIDChunk CAFUMIDChunk;
-#if TARGET_OS_WIN32
-#pragma pack(pop)
-#endif
+
////////////////////////////////////////////////////////////////////////////////////////////////
#endif // AudioToolbox_CAFFile_h
diff -ruN /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h
--- /Applications/Xcode_13.0.0-beta5.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h 2021-08-07 05:20:04.000000000 -0400
+++ /Applications/Xcode_13.0.0-rc.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h 2021-08-03 21:48:36.000000000 -0400
@@ -171,12 +171,6 @@
*/
typedef AudioComponentInstance MusicDeviceComponent;
-/*!
- @struct MIDIEventList
- @abstract Forward declaration of MIDIEventList found in <CoreMIDI/MIDIServices.h>
-*/
-typedef struct MIDIEventList MIDIEventList;
-
//=====================================================================================================================
#pragma mark -
#pragma mark Functions
@@ -187,8 +181,7 @@
@discussion This is the API used to send MIDI channel messages to an audio unit. The status and data parameters
are used exactly as described by the MIDI specification, including the combination of channel and
- command in the status byte. All events sent via MusicDeviceMIDIEventList will be delivered to the
- audio unit in the MIDI protocol returned by kAudioUnitProperty_AudioUnitMIDIProtocol.
+ command in the status byte.
@param inUnit
The audio unit
@@ -236,38 +229,6 @@
const UInt8 * inData,
UInt32 inLength) API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
-/*!
- @function MusicDeviceMIDIEventList
- @abstract Used to send MIDI messages to an audio unit
-
- @discussion This API is suitable for sending Universal MIDI Packet (UMP) MIDI messages to an audio unit. The message must be
- a full non-SysEx event, a partial SysEx event, or a complete SysEx event. Running status is not allowed. MIDI 1.0 in
- universal packets (MIDI-1UP) and MIDI 2.0 messages are allowed. All events sent via MusicDeviceMIDIEventList will
- be delivered to the audio unit in the MIDI protocol returned by kAudioUnitProperty_AudioUnitMIDIProtocol.
-
- This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
-
- @param inUnit
- The audio unit
- @param inOffsetSampleFrame
- If you are scheduling the MIDIEventList from the audio unit's render thread, then you can supply a
- sample offset that the audio unit may apply within its next audio unit render.
- This allows you to schedule to the sample, the time when a MIDI command is applied and is particularly
- important when starting new notes. If you are not scheduling in the audio unit's render thread,
- then you should set this value to 0
-
- inOffsetSampleFrame should serve as the base offset for each packet's timestamp i.e.
- sampleOffset = inOffsetSampleFrame + evtList.packet[0].timeStamp
-
- @param evtList
- The MIDIEventList to be sent
-
- @result noErr, or an audio unit error code
-*/
-extern OSStatus
-MusicDeviceMIDIEventList( MusicDeviceComponent inUnit,
- UInt32 inOffsetSampleFrame,
- const struct MIDIEventList * evtList) API_AVAILABLE(macos(12), ios(15.0), tvos(15.0));
/*!
@function MusicDeviceStartNote
@@ -352,7 +313,6 @@
@constant kMusicDeviceReleaseInstrumentSelect
@constant kMusicDeviceStartNoteSelect
@constant kMusicDeviceStopNoteSelect
- @constant kMusicDeviceMIDIEventListSelect
*/
enum {
kMusicDeviceRange = 0x0100,
@@ -361,8 +321,7 @@
kMusicDevicePrepareInstrumentSelect = 0x0103,
kMusicDeviceReleaseInstrumentSelect = 0x0104,
kMusicDeviceStartNoteSelect = 0x0105,
- kMusicDeviceStopNoteSelect = 0x0106,
- kMusicDeviceMIDIEventListSelect = 0x0107
+ kMusicDeviceStopNoteSelect = 0x0106
};
//=====================================================================================================================
- README
- xcode13.0 Binding Status
- xcode13.1 Binding Status
- xcode13.2 Binding Status
- xcode13.3 Binding Status
- xcode13.4 Binding Status
- xcode14.0 Binding Status
- xcode14.1 Binding Status
- xcode14.2 Binding Status
- xcode14.3 Binding Status
- xcode15.0 Binding Status
- xcode15.1 Binding Status
- xcode15.3 Binding Status
- xcode15.4 Binding Status
- xcode16.0 Binding Status
- xcode16.1 Binding Status