Skip to content

Instantly share code, notes, and snippets.

@g-l-i-t-c-h-o-r-s-e
Last active November 30, 2025 22:31
Show Gist options
  • Select an option

  • Save g-l-i-t-c-h-o-r-s-e/af43b44ad0d825e16c55291019e0af6f to your computer and use it in GitHub Desktop.

Select an option

Save g-l-i-t-c-h-o-r-s-e/af43b44ad0d825e16c55291019e0af6f to your computer and use it in GitHub Desktop.
AVFoundation Video Input Quartz Composer Plugin with Device Resolution/Framerate Support, Autozoom Toggle, Etc
// AVFVideoInputPlugIn.m
// Quartz Composer AVFoundation video input with selectable resolution / fps,
// plus basic camera controls for focus / exposure / white balance modes.
//
// Inputs:
// - inputModeIndex (Index): capture format / fps mode
// - inputDisableAutoFocus (Boolean): YES = force focus locked
// - inputLockExposure (Boolean): YES = lock exposure
// - inputLockWhiteBalance (Boolean): YES = lock white balance
//
// Outputs:
// - outputImage (Image): live video frames
// - outputStatus (String): description / errors
//
// Notes:
// - This version only uses APIs that are available on macOS 10.13.
// - There is no generic zoom or manual lens position control on macOS here.
// - "Color correction" is limited to auto-vs-locked white balance; for
// creative grading, use QC/Core Image filters downstream.
#import <Quartz/Quartz.h>
#import <AVFoundation/AVFoundation.h>
#import <float.h>
// Hardcoded logging toggle: set to YES to enable NSLog debug output
static const BOOL kAVFVideoInputEnableLogging = NO;
#define AVF_LOG(fmt, ...) \
do { if (kAVFVideoInputEnableLogging) NSLog(@"[AVFVideoInput] " fmt, ##__VA_ARGS__); } while (0)
// -----------------------------------------------------------------------------
// Helpers for describing formats / ranges (for logging)
// -----------------------------------------------------------------------------
static NSString *AVFDescribeRange(AVFrameRateRange *range) {
if (!range) return @"<nil range>";
return [NSString stringWithFormat:@"[%.6f–%.6f] fps",
range.minFrameRate, range.maxFrameRate];
}
static NSString *AVFDescribeFormat(AVCaptureDeviceFormat *fmt) {
if (!fmt) return @"<nil format>";
CMFormatDescriptionRef desc = fmt.formatDescription;
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(desc);
NSMutableString *s = [NSMutableString stringWithFormat:@"%dx%d",
dims.width, dims.height];
for (AVFrameRateRange *r in fmt.videoSupportedFrameRateRanges) {
[s appendFormat:@" %@", AVFDescribeRange(r)];
}
return s;
}
// -----------------------------------------------------------------------------
// Capture mode table
// Each entry is an NSDictionary with keys:
// deviceID, deviceName, width, height, minFPS, maxFPS, modeFPS, summary
// -----------------------------------------------------------------------------
static NSArray *AVFVideoInputCaptureModes(void) {
static dispatch_once_t onceToken;
static NSArray *sModes = nil;
dispatch_once(&onceToken, ^{
NSMutableArray *modes = [NSMutableArray array];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *dev in devices) {
AVF_LOG(@"Enumerating device: %@", dev.localizedName);
for (AVCaptureDeviceFormat *fmt in dev.formats) {
CMFormatDescriptionRef desc = fmt.formatDescription;
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(desc);
for (AVFrameRateRange *range in fmt.videoSupportedFrameRateRanges) {
double minFPS = range.minFrameRate;
double maxFPS = range.maxFrameRate;
// Treat the highest supported fps as the "mode fps" the user is choosing.
// For fixed-rate ranges, minFPS == maxFPS.
double modeFPS = (maxFPS > 0.0) ? maxFPS : minFPS;
NSString *summary = [NSString stringWithFormat:@"%@ – %dx%d @ %.3f fps",
dev.localizedName ?: @"(Unknown)",
dims.width, dims.height,
modeFPS];
NSDictionary *entry = @{
@"deviceID" : dev.uniqueID ?: @"",
@"deviceName" : dev.localizedName ?: @"(Unknown)",
@"width" : @(dims.width),
@"height" : @(dims.height),
@"minFPS" : @(minFPS),
@"maxFPS" : @(maxFPS),
@"modeFPS" : @(modeFPS),
@"summary" : summary
};
[modes addObject:entry];
}
}
}
// Sort nicely: by device, then width, height, fps (modeFPS)
[modes sortUsingComparator:^NSComparisonResult(NSDictionary *a, NSDictionary *b) {
NSString *da = a[@"deviceName"];
NSString *db = b[@"deviceName"];
NSComparisonResult r = [da caseInsensitiveCompare:db];
if (r != NSOrderedSame) return r;
NSInteger wa = [a[@"width"] integerValue];
NSInteger wb = [b[@"width"] integerValue];
if (wa < wb) return NSOrderedAscending;
if (wa > wb) return NSOrderedDescending;
NSInteger ha = [a[@"height"] integerValue];
NSInteger hb = [b[@"height"] integerValue];
if (ha < hb) return NSOrderedAscending;
if (ha > hb) return NSOrderedDescending;
double fa = [a[@"modeFPS"] doubleValue];
double fb = [b[@"modeFPS"] doubleValue];
if (fa < fb) return NSOrderedAscending;
if (fa > fb) return NSOrderedDescending;
return NSOrderedSame;
}];
sModes = [modes copy];
AVF_LOG(@"Built %lu capture modes", (unsigned long)[sModes count]);
});
return sModes;
}
static NSDictionary *AVFModeAtIndex(NSUInteger idx) {
NSArray *modes = AVFVideoInputCaptureModes();
if (idx >= [modes count]) return nil;
return [modes objectAtIndex:idx];
}
// -----------------------------------------------------------------------------
// QCPlugIn subclass
// -----------------------------------------------------------------------------
@interface AVFVideoInputPlugIn : QCPlugIn <AVCaptureVideoDataOutputSampleBufferDelegate>
{
@private
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureVideoDataOutput *_videoOutput;
dispatch_queue_t _captureQueue;
NSLock *_bufferLock;
CVPixelBufferRef _latestBuffer; // guarded by _bufferLock
NSUInteger _currentModeIndex;
NSString *_status;
BOOL _needsSessionRebuild;
}
@property (assign) NSUInteger inputModeIndex;
// New control properties (macOS-safe)
@property (assign) BOOL inputDisableAutoFocus; // YES = focus locked
@property (assign) BOOL inputLockExposure; // YES = exposure locked
@property (assign) BOOL inputLockWhiteBalance; // YES = white balance locked
@property (assign) id<QCPlugInOutputImageProvider> outputImage;
@property (copy) NSString *outputStatus;
@end
@implementation AVFVideoInputPlugIn
@dynamic inputModeIndex;
@dynamic inputDisableAutoFocus;
@dynamic inputLockExposure;
@dynamic inputLockWhiteBalance;
@dynamic outputImage;
@dynamic outputStatus;
+ (NSDictionary *)attributes {
return @{
QCPlugInAttributeNameKey : @"AVF Video Input",
QCPlugInAttributeDescriptionKey : @"AVFoundation camera/virtual camera input with selectable resolution, framerate, and basic focus/exposure/white-balance controls."
};
}
+ (QCPlugInExecutionMode)executionMode {
return kQCPlugInExecutionModeProvider;
}
+ (QCPlugInTimeMode)timeMode {
return kQCPlugInTimeModeIdle;
}
+ (NSDictionary *)attributesForPropertyPortWithKey:(NSString *)key {
if ([key isEqualToString:@"inputModeIndex"]) {
NSArray *modes = AVFVideoInputCaptureModes();
NSUInteger maxIndex = ([modes count] > 0) ? ([modes count] - 1) : 0;
// Build menu labels from "summary"
NSMutableArray *labels = [NSMutableArray arrayWithCapacity:[modes count]];
for (NSDictionary *mode in modes) {
NSString *s = mode[@"summary"];
[labels addObject:s ?: @"(mode)"];
}
return @{
QCPortAttributeNameKey : @"Capture Mode",
QCPortAttributeMinimumValueKey : @0U,
QCPortAttributeMaximumValueKey : @(maxIndex),
QCPortAttributeDefaultValueKey : @0U,
QCPortAttributeTypeKey : QCPortTypeIndex,
QCPortAttributeMenuItemsKey : labels
};
}
else if ([key isEqualToString:@"inputDisableAutoFocus"]) {
return @{
QCPortAttributeNameKey : @"Disable Auto Focus",
QCPortAttributeDefaultValueKey : @NO,
QCPortAttributeTypeKey : QCPortTypeBoolean
};
}
else if ([key isEqualToString:@"inputLockExposure"]) {
return @{
QCPortAttributeNameKey : @"Lock Exposure",
QCPortAttributeDefaultValueKey : @NO,
QCPortAttributeTypeKey : QCPortTypeBoolean
};
}
else if ([key isEqualToString:@"inputLockWhiteBalance"]) {
return @{
QCPortAttributeNameKey : @"Lock White Balance",
QCPortAttributeDefaultValueKey : @NO,
QCPortAttributeTypeKey : QCPortTypeBoolean
};
}
else if ([key isEqualToString:@"outputImage"]) {
return @{ QCPortAttributeNameKey : @"Image" };
}
else if ([key isEqualToString:@"outputStatus"]) {
return @{ QCPortAttributeNameKey : @"Status" };
}
return nil;
}
- (id)init {
if ((self = [super init])) {
_bufferLock = [[NSLock alloc] init];
_latestBuffer = NULL;
_currentModeIndex = NSNotFound;
_needsSessionRebuild = YES;
// Dedicated capture queue; bump priority to reduce drops under load.
_captureQueue = dispatch_queue_create("com.yourdomain.AVFVideoInput.capture",
DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_captureQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return self;
}
- (void)dealloc {
[self _tearDownSession];
[_bufferLock lock];
if (_latestBuffer) {
CVPixelBufferRelease(_latestBuffer);
_latestBuffer = NULL;
}
[_bufferLock unlock];
}
// -------------------------------------------------------------------------
// Session setup / teardown
// -------------------------------------------------------------------------
- (void)_tearDownSession {
if (_session) {
AVF_LOG(@"Tearing down session");
[_session stopRunning];
if (_videoOutput) {
[_session removeOutput:_videoOutput];
}
if (_input) {
[_session removeInput:_input];
}
_videoOutput = nil;
_input = nil;
_session = nil;
_device = nil;
}
[_bufferLock lock];
if (_latestBuffer) {
CVPixelBufferRelease(_latestBuffer);
_latestBuffer = NULL;
}
[_bufferLock unlock];
_currentModeIndex = NSNotFound;
}
// Apply focus / exposure / white balance modes to a *locked* device.
- (void)_applyDeviceControlsLocked:(AVCaptureDevice *)device {
if (!device) return;
// ---- Focus: auto vs locked ----
if (self.inputDisableAutoFocus) {
// Lock focus if supported
if ([device isFocusModeSupported:AVCaptureFocusModeLocked] &&
device.focusMode != AVCaptureFocusModeLocked)
{
AVF_LOG(@"Setting focusMode = Locked");
device.focusMode = AVCaptureFocusModeLocked;
}
} else {
// Prefer continuous AF if available
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] &&
device.focusMode != AVCaptureFocusModeContinuousAutoFocus)
{
AVF_LOG(@"Setting focusMode = ContinuousAutoFocus");
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}
else if ([device isFocusModeSupported:AVCaptureFocusModeAutoFocus] &&
device.focusMode != AVCaptureFocusModeAutoFocus)
{
AVF_LOG(@"Setting focusMode = AutoFocus");
device.focusMode = AVCaptureFocusModeAutoFocus;
}
}
// ---- Exposure: continuous auto vs locked ----
if (self.inputLockExposure) {
if ([device isExposureModeSupported:AVCaptureExposureModeLocked] &&
device.exposureMode != AVCaptureExposureModeLocked)
{
AVF_LOG(@"Setting exposureMode = Locked");
device.exposureMode = AVCaptureExposureModeLocked;
}
} else {
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] &&
device.exposureMode != AVCaptureExposureModeContinuousAutoExposure)
{
AVF_LOG(@"Setting exposureMode = ContinuousAutoExposure");
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}
else if ([device isExposureModeSupported:AVCaptureExposureModeAutoExpose] &&
device.exposureMode != AVCaptureExposureModeAutoExpose)
{
AVF_LOG(@"Setting exposureMode = AutoExpose");
device.exposureMode = AVCaptureExposureModeAutoExpose;
}
}
// ---- White balance: continuous auto vs locked ----
if (self.inputLockWhiteBalance) {
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked] &&
device.whiteBalanceMode != AVCaptureWhiteBalanceModeLocked)
{
AVF_LOG(@"Setting whiteBalanceMode = Locked");
device.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
}
} else {
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance] &&
device.whiteBalanceMode != AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance)
{
AVF_LOG(@"Setting whiteBalanceMode = ContinuousAutoWhiteBalance");
device.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
}
else if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance] &&
device.whiteBalanceMode != AVCaptureWhiteBalanceModeAutoWhiteBalance)
{
AVF_LOG(@"Setting whiteBalanceMode = AutoWhiteBalance");
device.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
}
}
}
// Update controls while session is running (called from execute).
- (void)_updateDeviceControls {
AVCaptureDevice *device = _device;
if (!device) return;
NSError *err = nil;
if (![device lockForConfiguration:&err]) {
AVF_LOG(@"lockForConfiguration for controls failed: %@", err);
return;
}
[self _applyDeviceControlsLocked:device];
[device unlockForConfiguration];
}
- (void)_selectFormatOnDevice:(AVCaptureDevice *)device
targetWidth:(int)width
height:(int)height
desiredFPS:(double)desiredFPS
outFormat:(AVCaptureDeviceFormat * __strong *)outFormat
outRange:(AVFrameRateRange * __strong *)outRange
{
AVCaptureDeviceFormat *bestFormat = nil;
AVFrameRateRange *bestRange = nil;
double bestDiff = DBL_MAX;
for (AVCaptureDeviceFormat *fmt in device.formats) {
CMFormatDescriptionRef desc = fmt.formatDescription;
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(desc);
if (dims.width != width || dims.height != height)
continue;
for (AVFrameRateRange *range in fmt.videoSupportedFrameRateRanges) {
double minFPS = range.minFrameRate;
double maxFPS = range.maxFrameRate;
// Only consider ranges that can actually support desiredFPS.
if (desiredFPS < minFPS - 0.1 || desiredFPS > maxFPS + 0.1)
continue;
// Treat the highest fps in the range as the "mode fps" for selection.
double candidateFPS = maxFPS;
double diff = fabs(candidateFPS - desiredFPS);
if (diff < bestDiff) {
bestDiff = diff;
bestFormat = fmt;
bestRange = range;
}
}
}
if (!bestFormat) {
AVF_LOG(@"[selectFormat] No exact format for %dx%d @ %.3f fps, will fall back.",
width, height, desiredFPS);
} else {
AVF_LOG(@"[selectFormat] Chosen format: %@, range %@",
AVFDescribeFormat(bestFormat), AVFDescribeRange(bestRange));
}
if (outFormat) *outFormat = bestFormat;
if (outRange) *outRange = bestRange;
}
- (BOOL)_buildSessionForModeIndex:(NSUInteger)modeIndex
context:(id<QCPlugInContext>)context
{
NSArray *modes = AVFVideoInputCaptureModes();
if (modeIndex >= [modes count]) {
_status = @"Invalid mode index";
AVF_LOG(@"Invalid mode index %lu (max %lu)",
(unsigned long)modeIndex, (unsigned long)([modes count] ? [modes count]-1 : 0));
return NO;
}
NSDictionary *mode = modes[modeIndex];
NSString *deviceID = mode[@"deviceID"];
NSString *deviceName = mode[@"deviceName"];
int width = [mode[@"width"] intValue];
int height = [mode[@"height"] intValue];
// Use the "mode fps" (highest supported fps of that range) as the target.
double desiredFPS = [mode[@"modeFPS"] doubleValue];
AVF_LOG(@"Configuring mode index %lu: %@ – %dx%d @ %.3f fps",
(unsigned long)modeIndex, deviceName, width, height, desiredFPS);
[self _tearDownSession];
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:deviceID];
if (!device) {
_status = @"Device not found";
AVF_LOG(@"Device not found for uniqueID %@", deviceID);
return NO;
}
AVF_LOG(@"Device before config: %@", device.localizedName);
AVF_LOG(@"Active format BEFORE: %@", AVFDescribeFormat(device.activeFormat));
NSError *lockError = nil;
if (![device lockForConfiguration:&lockError]) {
_status = [NSString stringWithFormat:@"lockForConfiguration failed: %@",
lockError.localizedDescription ?: @"unknown error"];
AVF_LOG(@"lockForConfiguration failed: %@", lockError);
return NO;
}
AVCaptureDeviceFormat *selectedFormat = nil;
AVFrameRateRange *selectedRange = nil;
[self _selectFormatOnDevice:device
targetWidth:width
height:height
desiredFPS:desiredFPS
outFormat:&selectedFormat
outRange:&selectedRange];
if (!selectedFormat || !selectedRange) {
AVF_LOG(@"No matching format; leaving activeFormat unchanged");
selectedFormat = device.activeFormat;
NSArray<AVFrameRateRange *> *ranges = device.activeFormat.videoSupportedFrameRateRanges;
if ([ranges count] > 0) {
selectedRange = [ranges objectAtIndex:0];
} else {
[device unlockForConfiguration];
_status = @"Active format has no frame rate ranges";
AVF_LOG(@"Active format has no frame rate ranges");
return NO;
}
}
device.activeFormat = selectedFormat;
// IMPORTANT: Use a duration that AVFoundation itself reports as valid.
// minFrameDuration corresponds to the highest supported fps in this range.
CMTime frameDuration = selectedRange.minFrameDuration;
device.activeVideoMinFrameDuration = frameDuration;
device.activeVideoMaxFrameDuration = frameDuration;
AVF_LOG(@"Desired format: %dx%d @ %@ (range %@)",
width, height, AVFDescribeRange(selectedRange), AVFDescribeRange(selectedRange));
AVF_LOG(@"Selected device format: %@", AVFDescribeFormat(selectedFormat));
AVF_LOG(@"Active format AFTER (pre-start): %@", AVFDescribeFormat(device.activeFormat));
// Apply initial focus/exposure/WB modes while still locked
[self _applyDeviceControlsLocked:device];
// Build session / input / output WHILE device is locked.
AVCaptureSession *session = [[AVCaptureSession alloc] init];
NSError *inputError = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:device error:&inputError];
if (!input) {
_status = [NSString stringWithFormat:@"DeviceInput error: %@",
inputError.localizedDescription ?: @"unknown"];
AVF_LOG(@"DeviceInput error: %@", inputError);
[device unlockForConfiguration];
return NO;
}
if ([session canAddInput:input]) {
[session addInput:input];
} else {
_status = @"Cannot add input to session";
AVF_LOG(@"Cannot add input to session");
[device unlockForConfiguration];
return NO;
}
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
// Drop late frames rather than letting latency build up.
videoOutput.alwaysDiscardsLateVideoFrames = YES;
// ARGB8 – same as earlier working plugin
videoOutput.videoSettings = @{
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB)
};
[videoOutput setSampleBufferDelegate:self queue:_captureQueue];
if ([session canAddOutput:videoOutput]) {
[session addOutput:videoOutput];
} else {
_status = @"Cannot add output to session";
AVF_LOG(@"Cannot add output to session");
[device unlockForConfiguration];
return NO;
}
AVCaptureConnection *conn = [[videoOutput connections] firstObject];
if (conn) {
if (conn.isVideoMinFrameDurationSupported)
conn.videoMinFrameDuration = frameDuration;
if (conn.isVideoMaxFrameDurationSupported)
conn.videoMaxFrameDuration = frameDuration;
}
AVF_LOG(@"Starting session…");
[session startRunning];
AVF_LOG(@"Active format FINAL after startRunning: %@",
AVFDescribeFormat(device.activeFormat));
[device unlockForConfiguration];
_session = session;
_device = device;
_input = input;
_videoOutput = videoOutput;
_currentModeIndex = modeIndex;
_needsSessionRebuild = NO;
NSString *summary = mode[@"summary"];
_status = summary ?: [NSString stringWithFormat:@"%@ – %dx%d @ %.3f fps",
deviceName, width, height, desiredFPS];
return YES;
}
// -------------------------------------------------------------------------
// QC lifecycle
// -------------------------------------------------------------------------
- (BOOL)startExecution:(id<QCPlugInContext>)context {
AVF_LOG(@"startExecution");
_needsSessionRebuild = YES;
return YES;
}
- (void)stopExecution:(id<QCPlugInContext>)context {
AVF_LOG(@"stopExecution");
[self _tearDownSession];
}
// -------------------------------------------------------------------------
// Capture delegate
// -------------------------------------------------------------------------
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!imageBuffer)
return;
CVPixelBufferRetain(imageBuffer);
[_bufferLock lock];
if (_latestBuffer) {
CVPixelBufferRelease(_latestBuffer);
}
_latestBuffer = (CVPixelBufferRef)imageBuffer;
static int sFrameCount = 0;
sFrameCount++;
if (kAVFVideoInputEnableLogging && sFrameCount <= 10) {
size_t w = CVPixelBufferGetWidth(_latestBuffer);
size_t h = CVPixelBufferGetHeight(_latestBuffer);
AVF_LOG(@"captureOutput frame %d: %zux%zu", sFrameCount, w, h);
}
[_bufferLock unlock];
}
// Release callback invoked by QC when it is done with the buffer.
static void AVFVideoInputBufferRelease(const void *address, void *ctx) {
CVPixelBufferRef buffer = (CVPixelBufferRef)ctx;
if (buffer) {
CVPixelBufferUnlockBaseAddress(buffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferRelease(buffer);
}
}
// -------------------------------------------------------------------------
// QC execute
// -------------------------------------------------------------------------
- (BOOL)execute:(id<QCPlugInContext>)context
atTime:(NSTimeInterval)time
withArguments:(NSDictionary *)arguments
{
NSUInteger desiredMode = self.inputModeIndex;
if (_needsSessionRebuild || _session == nil || _currentModeIndex != desiredMode) {
if (![self _buildSessionForModeIndex:desiredMode context:context]) {
self.outputImage = nil;
self.outputStatus = _status ?: @"Error";
return YES;
}
}
// Update camera controls every frame so QC inputs can animate.
[self _updateDeviceControls];
CVPixelBufferRef buffer = NULL;
[_bufferLock lock];
if (_latestBuffer) {
buffer = _latestBuffer;
CVPixelBufferRetain(buffer);
}
[_bufferLock unlock];
if (!buffer) {
self.outputImage = nil;
self.outputStatus = _status ?: @"No frame";
return YES;
}
CVPixelBufferLockBaseAddress(buffer, kCVPixelBufferLock_ReadOnly);
size_t width = CVPixelBufferGetWidth(buffer);
size_t height = CVPixelBufferGetHeight(buffer);
size_t rowBytes = CVPixelBufferGetBytesPerRow(buffer);
void *baseAddr = CVPixelBufferGetBaseAddress(buffer);
CGColorSpaceRef cs = [context colorSpace];
id provider = [context
outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8
pixelsWide:width
pixelsHigh:height
baseAddress:baseAddr
bytesPerRow:rowBytes
releaseCallback:AVFVideoInputBufferRelease
releaseContext:(void *)buffer
colorSpace:cs
shouldColorMatch:NO]; // disable color matching to reduce CPU
if (!provider) {
AVF_LOG(@"Failed to create image provider for %zux%zu buffer", width, height);
CVPixelBufferUnlockBaseAddress(buffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferRelease(buffer);
self.outputImage = nil;
self.outputStatus = @"Failed to create image";
return YES;
}
self.outputImage = provider;
self.outputStatus = _status ?: @"OK";
return YES;
}
@end
#!/usr/bin/env bash
set -euo pipefail
# --- config (you may override via env) ---
NAME="${NAME:-AVFVideoInput}"
CLASS="${CLASS:-AVFVideoInputPlugIn}"
SRC="${SRC:-${CLASS}.m}" # allow override via env
PLUG="$NAME.plugin"
OUT="$(pwd)/build-manual"
INST="$HOME/Library/Graphics/Quartz Composer Plug-Ins"
XCODE_APP="${XCODE_APP:-/Applications/Xcode_9.4.1.app}"
DEV="$XCODE_APP/Contents/Developer"
SDKDIR="$DEV/Platforms/MacOSX.platform/Developer/SDKs"
# Prefer 10.13 SDK for i386; allow override via $SDK
SDK="${SDK:-}"
if [[ -z "${SDK}" ]]; then
if [[ -d "$SDKDIR/MacOSX10.13.sdk" ]]; then
SDK="$SDKDIR/MacOSX10.13.sdk"
else
SDK="$(xcrun --sdk macosx --show-sdk-path 2>/dev/null || true)"
fi
fi
[[ -d "$DEV" ]] || { echo "Xcode not found: $XCODE_APP"; exit 1; }
[[ -f "$SRC" ]] || { echo "Source not found: $SRC"; exit 1; }
[[ -n "${SDK:-}" && -d "$SDK" ]] || { echo "macOS SDK not found. Looked for 10.13 at: $SDKDIR/MacOSX10.13.sdk"; exit 1; }
export DEVELOPER_DIR="$DEV"
echo "Using SDK: $SDK"
mkdir -p "$OUT/i386" "$OUT/x86_64" "$OUT/universal/$PLUG/Contents/MacOS"
COMMON_CFLAGS=(
-bundle -fobjc-arc -fobjc-link-runtime
-isysroot "$SDK"
-mmacosx-version-min=10.9
-I .
)
COMMON_LIBS=(
-framework Foundation
-framework Quartz
-framework OpenGL
-framework AVFoundation
-framework CoreMedia
-framework CoreVideo
)
echo "Compiling i386…"
clang -arch i386 "${COMMON_CFLAGS[@]}" "$SRC" "${COMMON_LIBS[@]}" -o "$OUT/i386/$NAME"
echo "Compiling x86_64…"
clang -arch x86_64 "${COMMON_CFLAGS[@]}" "$SRC" "${COMMON_LIBS[@]}" -o "$OUT/x86_64/$NAME"
echo "Creating universal binary…"
lipo -create "$OUT/i386/$NAME" "$OUT/x86_64/$NAME" -output "$OUT/universal/$PLUG/Contents/MacOS/$NAME"
# Info.plist (expand variables)
cat >"$OUT/universal/$PLUG/Contents/Info.plist" <<PLIST
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0"><dict>
<key>CFBundleDevelopmentRegion</key> <string>English</string>
<key>CFBundleExecutable</key> <string>${NAME}</string>
<key>CFBundleIdentifier</key> <string>com.yourdomain.${NAME}</string>
<key>CFBundleInfoDictionaryVersion</key> <string>6.0</string>
<key>CFBundleName</key> <string>${NAME}</string>
<key>CFBundlePackageType</key> <string>BNDL</string>
<key>CFBundleShortVersionString</key> <string>1.0</string>
<key>CFBundleSupportedPlatforms</key> <array><string>MacOSX</string></array>
<key>CFBundleVersion</key> <string>1</string>
<key>QCPlugInClasses</key>
<array>
<string>${CLASS}</string>
</array>
<key>NSPrincipalClass</key> <string>QCPlugIn</string>
</dict></plist>
PLIST
echo "Signing…"
codesign --force -s - "$OUT/universal/$PLUG" >/dev/null || true
echo "Installing to: $INST"
mkdir -p "$INST"
rsync -a "$OUT/universal/$PLUG" "$INST/"
echo "Installed: $INST/$PLUG"
echo "Restart Quartz Composer to load the new plug-in."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment