Skip to content

VideoToolbox macOS xcode16.3 b2

Rolf Bjarne Kvinge edited this page Mar 6, 2025 · 1 revision

#VideoToolbox.framework

diff -ruN /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h
--- /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h	2025-02-08 04:03:22
+++ /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h	2025-02-22 12:57:34
@@ -87,21 +87,17 @@
 
 
 /*!
-	@method		processWithCommandBuffer:parameters:completionHandler
+	@method		processWithCommandBuffer:parameters
 	@abstract	This API provides a Metal API friendly version of processWithParameters.
-	@discussion	This function allows clients to add the effect to an existing Metal command buffer. This can be used by clients that have an existing Metal pipeline and want to add this effect to it. Note: this function will wait until all previously inserted tasks in the command buffer finished before running. Tasks inserted after the processWithCommandBuffer will run after the effect is applied. Note also that this is an asynchronous call
+	@discussion	This function allows clients to add the effect to an existing Metal command buffer. This can be used by clients that have an existing Metal pipeline and want to add this effect to it. Note: this function will wait until all previously inserted tasks in the command buffer finished before running. Tasks inserted after the processWithCommandBuffer will run after the effect is applied.  Processing does not happen until the commandBuffer is executed.
 	@param		commandBuffer
 		An existing Metal command buffer where the frame processing will be inserted.
 	@param		parameters
 		A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
-	@param		completionHandler
-		This completion handler will be called when frame processing in competed.  The completion handler will receive the same parameters object that was provided tot he original call, as well as an NSError which will contain an error code if processing was not successful.
 */
 
 - (void) processWithCommandBuffer:(id <MTLCommandBuffer>) commandBuffer
-                       parameters:(id<VTFrameProcessorParameters>)parameters
-                completionHandler:(void (^)(id<VTFrameProcessorParameters> , NSError * _Nullable) )completionHandler
-                    NS_SWIFT_NAME(process(commandBuffer:parameters:completionHandler:));
+                       parameters:(id<VTFrameProcessorParameters>)parameters;
 
 
 
diff -ruN /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h
--- /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h	2025-02-08 04:03:22
+++ /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h	2025-02-22 12:57:34
@@ -26,8 +26,8 @@
 
 /*!
 	@method    initWithBuffer
-	@abstract  initialize class with a CVPixelBufferRef and a presentation time. Buffer is retained.  Returns NULL if no CVPixelBuffer is provided or CVPixelBuffer is not IOSurface backed.
-	@param     buffer The CVPixelBufferRef that this VTFrameProcessorFrame will wrap.  Must be non-NULL and IOSurface backed.
+	@abstract  initialize class with a CVPixelBufferRef and a presentation time. Buffer is retained.  Returns nil if no CVPixelBuffer is provided or CVPixelBuffer is not IOSurface backed.
+	@param     buffer The CVPixelBufferRef that this VTFrameProcessorFrame will wrap.  Must not be nil and must be IOSurface backed.
 	@param     presentationTimeStamp   The presentation timestamp of the buffer.
 */
 - (nullable instancetype)initWithBuffer:(CVPixelBufferRef)buffer
@@ -64,9 +64,9 @@
 
 /*!
 	@method    initWithForwardFlow
-	@abstract  initialize class with forward and backward optical flow CVPixelBufferRefs. Instances retain the buffers backing them. Returns NULL if a NULL CVPixelBuffer is provided or CVPixelBuffers are not IOSurface backed.
-	@param     forwardFlow CVPixelBufferRef that contains forward optical flow. Must be non-NULL and IOSurface backed.
-	@param     backwardFlow CVPixelBufferRef that contains backward optical flow. Must be non-NULL and IOSurface backed.
+	@abstract  initialize class with forward and backward optical flow CVPixelBufferRefs. Instances retain the buffers backing them. Returns nil if a nil CVPixelBuffer is provided or CVPixelBuffers are not IOSurface backed.
+	@param     forwardFlow CVPixelBufferRef that contains forward optical flow. Must not be nil and must be IOSurface backed.
+	@param     backwardFlow CVPixelBufferRef that contains backward optical flow. Must not be nil and must be IOSurface backed.
 */
 - (nullable instancetype)initWithForwardFlow:(CVPixelBufferRef)forwardFlow
                        backwardFlow:(CVPixelBufferRef)backwardFlow;
diff -ruN /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h
--- /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h	2025-02-08 04:03:22
+++ /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h	2025-02-22 12:57:34
@@ -43,8 +43,9 @@
  */
 API_AVAILABLE(macos(15.4)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos)
 typedef NS_ENUM(NSInteger, VTFrameRateConversionParametersSubmissionMode) {
-	VTFrameRateConversionParametersSubmissionModeRandom             = 1,    // Frames are submitted in non-sequential order
-	VTFrameRateConversionParametersSubmissionModeSequential         = 2,    // Frames are submitted sequentially following presentation time order
+	VTFrameRateConversionParametersSubmissionModeRandom							= 1,    // Frames are submitted in non-sequential order
+	VTFrameRateConversionParametersSubmissionModeSequential						= 2,    // Frames are submitted sequentially following presentation time order
+	VTFrameRateConversionParametersSubmissionModeSequentialReferencesUnchanged	= 3,    // Frames are being submitted sequentially.  This processing request uses the same source and next reference frames as the previous submission.
 } NS_SWIFT_NAME(VTFrameRateConversionParameters.SubmissionMode);
 
 
@@ -66,6 +67,8 @@
 /*!
  @abstract Creates a new VTFrameRateConversionConfiguration with specified flow width and height.
 
+ @discussion init will return nil if dimensions are out of range or revision is unsupported.
+
  @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
 
  @param frameHeight   Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
@@ -77,11 +80,11 @@
  @param revision The specific algorithm or configuration revision that is to be used to perform the request.
  
 */
-- (instancetype)initWithFrameWidth:(NSInteger)frameWidth
-					   frameHeight:(NSInteger)frameHeight
-				usePrecomputedFlow:(BOOL)usePrecomputedFlow
-			 qualityPrioritization:(VTFrameRateConversionConfigurationQualityPrioritization)qualityPrioritization
-						  revision:(VTFrameRateConversionConfigurationRevision)revision;
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+								frameHeight:(NSInteger)frameHeight
+						 usePrecomputedFlow:(BOOL)usePrecomputedFlow
+					  qualityPrioritization:(VTFrameRateConversionConfigurationQualityPrioritization)qualityPrioritization
+								   revision:(VTFrameRateConversionConfigurationRevision)revision;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
@@ -133,20 +136,19 @@
  * @property frameSupportedPixelFormats
  * @abstract list of source frame supported pixel formats for current configuration
  */
-
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
 
 /**
  * @property sourcePixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
 */
-@property (nonatomic, readonly) NSDictionary * sourcePixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
 /**
  * @property destinationPixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
 */
-@property (nonatomic, readonly) NSDictionary * destinationPixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
 /*!
 	@property processorSupported
@@ -171,10 +173,12 @@
 
 /*!
  @abstract Creates a new VTFrameRateConversionParameters .
+ 
+ @discussion init will return nil if sourceFrame or nextFrame is nil, if sourceFrame and reference frames don't have the same pixelformat, or if interpolationPhase array count does not match destinationFrames array count.
 
  @param sourceFrame Current source frame. Must be non nil.
   
- @param nextFrame Next source frame in presentation time order. For the last frame this can be set to nil.
+ @param nextFrame Next source frame in presentation time order.  Must be non nil.
   
  @param opticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with next frame. Only needed if optical flow is pre-computed. For the first frame this will always be nil.
  
@@ -184,12 +188,12 @@
  
  @param destinationFrames Caller-allocated NSArray of VTFrameProcessorFrame that contains  pixel buffers that will receive the results.  Must contain the same number of elements as interpolationPhase NSArray.
 */
-- (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
-                           nextFrame:(VTFrameProcessorFrame * _Nullable)nextFrame
-						 opticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)opticalFlow
-				  interpolationPhase:(NSArray<NSNumber *> *) interpolationPhase
-					  submissionMode:(VTFrameRateConversionParametersSubmissionMode)submissionMode
-				   destinationFrames:(NSArray<VTFrameProcessorFrame *> *)destinationFrame NS_REFINED_FOR_SWIFT;
+- (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+									nextFrame:(VTFrameProcessorFrame *)nextFrame
+								  opticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)opticalFlow
+						   interpolationPhase:(NSArray<NSNumber *> *) interpolationPhase
+							   submissionMode:(VTFrameRateConversionParametersSubmissionMode)submissionMode
+							destinationFrames:(NSArray<VTFrameProcessorFrame *> *)destinationFrame NS_REFINED_FOR_SWIFT;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
diff -ruN /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h
--- /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h	2025-02-08 04:03:22
+++ /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h	2025-02-22 12:57:34
@@ -63,6 +63,8 @@
 /*!
  @abstract Creates a new VTMotionBlurConfiguration with specified flow width and height.
 
+ @discussion init will return nil if dimensions are out of range or revision is unsupported.
+
  @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
 
  @param frameHeight   Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
@@ -74,11 +76,11 @@
  @param revision The specific algorithm or configuration revision that is to be used to perform the request.
  
 */
-- (instancetype)initWithFrameWidth:(NSInteger)frameWidth
-					   frameHeight:(NSInteger)frameHeight
-				usePrecomputedFlow:(BOOL)usePrecomputedFlow
-			 qualityPrioritization:(VTMotionBlurConfigurationQualityPrioritization)qualityPrioritization
-						  revision:(VTMotionBlurConfigurationRevision)revision;
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+								frameHeight:(NSInteger)frameHeight
+						 usePrecomputedFlow:(BOOL)usePrecomputedFlow
+					  qualityPrioritization:(VTMotionBlurConfigurationQualityPrioritization)qualityPrioritization
+								   revision:(VTMotionBlurConfigurationRevision)revision;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
@@ -130,20 +132,19 @@
  * @property frameSupportedPixelFormats
  * @abstract list of source frame supported pixel formats for current configuration
  */
-
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
 
 /**
  * @property sourcePixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
 */
-@property (nonatomic, readonly) NSDictionary * sourcePixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
 /**
  * @property destinationPixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
 */
-@property (nonatomic, readonly) NSDictionary * destinationPixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
 
 /*!
@@ -170,6 +171,8 @@
 /*!
  @abstract Creates a new VTMotionBlurParameters .
 
+ @discussion init will return nil if sourceFrame or destinationFrame is nil, sourceFrame and reference frames  are different pixelFormats, or motionBlurStrength is out of range.
+
  @param sourceFrame Current source frame. Must be non nil.
   
  @param nextFrame Next source frame in presentation time order. For the last frame this can be set to nil.
@@ -186,14 +189,14 @@
  
  @param destinationFrame User allocated pixel buffer that will receive the results.
 */
-- (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
-                           nextFrame:(VTFrameProcessorFrame * _Nullable)nextFrame
-                       previousFrame:(VTFrameProcessorFrame * _Nullable)previousFrame
-					 nextOpticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)nextOpticalFlow
-				 previousOpticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)previousOpticalFlow
-				  motionBlurStrength:(NSInteger)motionBlurStrength
-					  submissionMode:(VTMotionBlurParametersSubmissionMode)submissionMode
-					destinationFrame:(VTFrameProcessorFrame *)destinationFrame;
+- (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+									nextFrame:(VTFrameProcessorFrame * _Nullable)nextFrame
+								previousFrame:(VTFrameProcessorFrame * _Nullable)previousFrame
+							  nextOpticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)nextOpticalFlow
+						  previousOpticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)previousOpticalFlow
+						   motionBlurStrength:(NSInteger)motionBlurStrength
+							   submissionMode:(VTMotionBlurParametersSubmissionMode)submissionMode
+							 destinationFrame:(VTFrameProcessorFrame *)destinationFrame;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
diff -ruN /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h
--- /Applications/Xcode_16.3.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h	2025-02-08 04:03:22
+++ /Applications/Xcode_16.3.0-beta2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h	2025-02-22 12:57:34
@@ -63,6 +63,8 @@
 #pragma mark --- init function(s).
 /*!
  @abstract Creates a new VTOpticalFlowConfiguration with specified flow width and height.
+ 
+ @discussion init will return nil if dimensions are out of range or revision is unsupported.
 
  @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
 
@@ -75,10 +77,10 @@
  @param revision The specific algorithm or configuration revision that is to be used to perform the request.
  
 */
-- (instancetype)initWithFrameWidth:(NSInteger)frameWidth
-					   frameHeight:(NSInteger)frameHeight
-			 qualityPrioritization:(VTOpticalFlowConfigurationQualityPrioritization)qualityPrioritization
-						  revision:(VTOpticalFlowConfigurationRevision)revision;
+- (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
+								frameHeight:(NSInteger)frameHeight
+					  qualityPrioritization:(VTOpticalFlowConfigurationQualityPrioritization)qualityPrioritization
+								   revision:(VTOpticalFlowConfigurationRevision)revision;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
@@ -124,21 +126,19 @@
  * @property frameSupportedPixelFormats
  * @abstract list of source frame supported pixel formats for current configuration
  */
-
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats;
 
-
 /**
  * @property sourcePixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
 */
-@property (nonatomic, readonly) NSDictionary * sourcePixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
 /**
  * @property destinationPixelBufferAttributes
  * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as OpticalFlow buffers
 */
-@property (nonatomic, readonly) NSDictionary * destinationPixelBufferAttributes;
+@property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
 /*!
 	@property processorSupported
@@ -164,6 +164,8 @@
 /*!
  @abstract Creates a new VTOpticalFlowParameters .
 
+ @discussion init will return nil if sourceFrame or nextFrame is nil, or sourceFrame and nextFrame are different pixelFormats..
+
  @param sourceFrame Current source frame. Must be non nil.
   
  @param nextFrame Next source frame in presentation time order.
@@ -172,10 +174,10 @@
  
  @param destinationOpticalFlow User allocated VTFrameProcessorMutableOpticalFlow that will receive the results.
 */
-- (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
-                           nextFrame:(VTFrameProcessorFrame *)nextFrame
-					  submissionMode:(VTOpticalFlowParametersSubmissionMode)submissionMode
-			  destinationOpticalFlow:(VTFrameProcessorOpticalFlow *) destinationOpticalFlow;
+- (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
+									nextFrame:(VTFrameProcessorFrame *)nextFrame
+							   submissionMode:(VTOpticalFlowParametersSubmissionMode)submissionMode
+					   destinationOpticalFlow:(VTFrameProcessorOpticalFlow *) destinationOpticalFlow;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
Clone this wiki locally