Last active
November 25, 2025 02:48
-
-
Save g-l-i-t-c-h-o-r-s-e/537353cf1e39315defef9c32b23a0c40 to your computer and use it in GitHub Desktop.
Export GL Scene as Video with FFmpeg in Quartz Composer
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| set -euo pipefail | |
| shopt -s nullglob | |
| # --- config (SCENE CONSUMER PLUGIN) --- | |
| NAME="FFExportScene" | |
| CLASS="FFExportScenePlugIn" | |
| SRC="${SRC:-${CLASS}.m}" | |
| PLUG="$NAME.plugin" | |
| OUT="$(pwd)/build-manual-scene" | |
| INST="$HOME/Library/Graphics/Quartz Composer Plug-Ins" | |
| XCODE_APP="${XCODE_APP:-/Applications/Xcode_9.4.1.app}" | |
| DEV="$XCODE_APP/Contents/Developer" | |
| SDKDIR="$DEV/Platforms/MacOSX.platform/Developer/SDKs" | |
| SDK="${SDK:-}" | |
| if [[ -z "${SDK}" ]]; then | |
| if [[ -d "$SDKDIR/MacOSX10.14.sdk" ]]; then SDK="$SDKDIR/MacOSX10.14.sdk" | |
| elif [[ -d "$SDKDIR/MacOSX10.13.sdk" ]]; then SDK="$SDKDIR/MacOSX10.13.sdk" | |
| else SDK="$(xcrun --sdk macosx --show-sdk-path 2>/dev/null || true)" | |
| fi | |
| fi | |
| [[ -d "$DEV" ]] || { echo "Xcode not found: $XCODE_APP"; exit 1; } | |
| [[ -f "$SRC" ]] || { echo "Source not found: $SRC"; exit 1; } | |
| [[ -n "$SDK" && -d "$SDK" ]] || { echo "macOS SDK not found."; exit 1; } | |
| export DEVELOPER_DIR="$DEV" | |
| # --- FFmpeg via MacPorts pkg-config --- | |
| PKGCFG="/opt/local/bin/pkg-config" | |
| [[ -x "$PKGCFG" ]] || { echo "pkg-config not found at $PKGCFG (install via MacPorts)"; exit 1; } | |
| PKG_LIBS=(libavformat libavcodec libavutil libswscale) | |
| CFLAGS_FFMPEG="$("$PKGCFG" --cflags "${PKG_LIBS[@]}")" | |
| LIBS_FFMPEG="$("$PKGCFG" --libs "${PKG_LIBS[@]}")" | |
| echo "Using SDK: $SDK" | |
| rm -rf "$OUT" | |
| mkdir -p "$OUT/x86_64" "$OUT/universal/$PLUG/Contents/MacOS" "$OUT/universal/$PLUG/Contents/Frameworks" | |
| FRAMEWORKS="$OUT/universal/$PLUG/Contents/Frameworks" | |
| if [[ -d "$INST/$PLUG" ]]; then | |
| echo "Removing installed $INST/$PLUG" | |
| rm -rf "$INST/$PLUG" | |
| fi | |
| COMMON_CFLAGS=( | |
| -bundle -fobjc-arc -fobjc-link-runtime | |
| -isysroot "$SDK" | |
| -mmacosx-version-min=10.9 | |
| -I . | |
| -I /opt/local/include | |
| ) | |
| COMMON_LIBS=( | |
| -framework Foundation | |
| -framework Quartz | |
| -framework OpenGL | |
| -framework CoreGraphics | |
| -framework AudioToolbox | |
| ) | |
| echo "Compiling x86_64 (FFmpeg scene export)…" | |
| clang -arch x86_64 \ | |
| "${COMMON_CFLAGS[@]}" \ | |
| $CFLAGS_FFMPEG \ | |
| "$SRC" \ | |
| "${COMMON_LIBS[@]}" \ | |
| $LIBS_FFMPEG \ | |
| -o "$OUT/x86_64/$NAME" | |
| # Layout bundle | |
| cp -a "$OUT/x86_64/$NAME" "$OUT/universal/$PLUG/Contents/MacOS/$NAME" | |
| # Info.plist (NOTE: separate identifier just for the scene plug-in) | |
| cat >"$OUT/universal/$PLUG/Contents/Info.plist" <<PLIST | |
| <?xml version="1.0" encoding="UTF-8"?> | |
| <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> | |
| <plist version="1.0"><dict> | |
| <key>CFBundleDevelopmentRegion</key> <string>English</string> | |
| <key>CFBundleExecutable</key> <string>${NAME}</string> | |
| <key>CFBundleIdentifier</key> <string>com.yourdomain.${NAME}</string> | |
| <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> | |
| <key>CFBundleName</key> <string>${NAME}</string> | |
| <key>CFBundlePackageType</key> <string>BNDL</string> | |
| <key>CFBundleShortVersionString</key> <string>1.0</string> | |
| <key>CFBundleSupportedPlatforms</key> <array><string>MacOSX</string></array> | |
| <key>CFBundleVersion</key> <string>1</string> | |
| <key>QCPlugInClasses</key> | |
| <array> | |
| <string>${CLASS}</string> | |
| </array> | |
| <key>NSPrincipalClass</key> <string>QCPlugIn</string> | |
| </dict></plist> | |
| PLIST | |
| # --- helpers for embedding FFmpeg dylibs from /opt/local/lib --- | |
| mk_short_symlink_if_needed() { | |
| local base="$1" | |
| if [[ "$base" =~ ^(lib[^.]+)\.([0-9]+)\.[0-9.]+\.dylib$ ]]; then | |
| local short="${BASH_REMATCH[1]}.${BASH_REMATCH[2]}.dylib" | |
| if [[ ! -e "$FRAMEWORKS/$short" ]]; then | |
| ( cd "$FRAMEWORKS" && ln -s "$base" "$short" ) | |
| fi | |
| fi | |
| } | |
| list_opt_local_deps() { | |
| otool -L "$1" | awk '$1 ~ /^\/opt\/local\/lib\// {print $1}' | |
| } | |
| copy_and_rewrite() { | |
| local src="$1"; [[ "$src" == /opt/local/lib/* ]] || return 0 | |
| local base dest; base="$(basename "$src")"; dest="$FRAMEWORKS/$base" | |
| if [[ ! -f "$dest" ]]; then | |
| echo " → Copy $base" | |
| rsync -aL "$src" "$dest" | |
| chmod u+w "$dest" | |
| install_name_tool -id "@loader_path/$base" "$dest" | |
| mk_short_symlink_if_needed "$base" | |
| while IFS= read -r dep; do | |
| local depbase; depbase="$(basename "$dep")" | |
| copy_and_rewrite "$dep" | |
| install_name_tool -change "$dep" "@loader_path/$depbase" "$dest" | |
| done < <(list_opt_local_deps "$dest") | |
| fi | |
| } | |
| seed_from_otool() { | |
| local bin="$1" | |
| while IFS= read -r path; do copy_and_rewrite "$path"; done < <( | |
| otool -L "$bin" | awk '$1 ~ /^\/opt\/local\/lib\/lib(avformat|avcodec|avutil|swscale).*\.dylib$/ {print $1}' | |
| ) | |
| } | |
| seed_from_pkgconfig() { | |
| for pc in "${PKG_LIBS[@]}"; do | |
| local libdir; libdir="$("$PKGCFG" --variable=libdir "$pc" 2>/dev/null || echo /opt/local/lib)" | |
| local cand | |
| for cand in "$libdir/${pc}".*.dylib "$libdir/${pc}.dylib"; do | |
| [[ -f "$cand" ]] && { copy_and_rewrite "$cand"; break; } | |
| done | |
| done | |
| } | |
| final_full_sweep() { | |
| for lib in "$FRAMEWORKS"/*.dylib; do | |
| while IFS= read -r dep; do | |
| local depbase; depbase="$(basename "$dep")" | |
| copy_and_rewrite "$dep" | |
| install_name_tool -change "$dep" "@loader_path/$depbase" "$lib" | |
| done < <(list_opt_local_deps "$lib") | |
| done | |
| } | |
| echo "Embedding FFmpeg dylibs…" | |
| BIN="$OUT/universal/$PLUG/Contents/MacOS/$NAME" | |
| seed_from_otool "$BIN" | |
| if ! compgen -G "$FRAMEWORKS/*.dylib" >/dev/null; then | |
| seed_from_pkgconfig | |
| fi | |
| while IFS= read -r dep; do | |
| base="$(basename "$dep")" | |
| if [[ ! -e "$FRAMEWORKS/$base" ]]; then | |
| stem="${base%.dylib}" | |
| stem="${stem%.*}" | |
| match=( "$FRAMEWORKS/$stem".*.dylib ) | |
| if [[ -e "${match[0]}" ]]; then | |
| mk_short_symlink_if_needed "$(basename "${match[0]}")" | |
| else | |
| copy_and_rewrite "$dep" | |
| fi | |
| fi | |
| install_name_tool -change "$dep" "@loader_path/../Frameworks/$base" "$BIN" | |
| done < <(list_opt_local_deps "$BIN") | |
| final_full_sweep | |
| echo "Codesigning bundled libs…" | |
| for lib in "$FRAMEWORKS"/*.dylib; do | |
| codesign --force -s - "$lib" >/dev/null || true | |
| done | |
| codesign --force -s - "$OUT/universal/$PLUG" >/dev/null || true | |
| echo "Installing to: $INST" | |
| mkdir -p "$INST" | |
| rsync -a "$OUT/universal/$PLUG" "$INST/" | |
| echo "Verifying install…" | |
| IBIN="$INST/$PLUG/Contents/MacOS/$NAME" | |
| leaks=0 | |
| if otool -L "$IBIN" | awk '$1 ~ /^\/opt\/local\/lib\//' | grep -q .; then | |
| echo "❌ main binary still references /opt/local/lib:" | |
| otool -L "$IBIN" | awk '$1 ~ /^\/opt\/local\/lib\// {print " " $1}' | |
| leaks=1 | |
| fi | |
| for lib in "$INST/$PLUG/Contents/Frameworks/"*.dylib; do | |
| if otool -L "$lib" | awk '$1 ~ /^\/opt\/local\/lib\//' | grep -q .; then | |
| echo "❌ $(basename "$lib") still references /opt/local/lib:" | |
| otool -L "$lib" | awk '$1 ~ /^\/opt\/local\/lib\// {print " " $1}' | |
| leaks=1 | |
| fi | |
| done | |
| if [[ $leaks -ne 0 ]]; then | |
| echo "Fixup failed; see above offending paths." | |
| exit 1 | |
| fi | |
| echo "Installed: $INST/$PLUG" | |
| echo "Embedded libs:" | |
| ls -1 "$INST/$PLUG/Contents/Frameworks" || true | |
| echo "Relaunch Quartz Composer and look for 'FFExport Scene (x86_64)'." |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // FFExportScenePlugIn.m — FFmpeg OpenGL scene exporter (CONSUMER) for Quartz Composer (Mojave/ARC, 64-bit) | |
| // VERSION: Direct glReadPixels from QC's OpenGL context, SyphonServer-style viewport usage, | |
| // with selectable PTS: either frame-count-based (offline-style) or QC-time-based. | |
| // Vertical flip is done on the encode queue, not on the QC thread. | |
| // | |
| // Place this patch in the top layer; it captures the rendered OpenGL scene below it. | |
| // | |
| // Inputs: | |
| // Output Path (string) | |
| // • If this is a directory (e.g. "/Users/blah/Desktop/" or "/Users/blah/Desktop"), | |
| // the filename is auto-filled as "$DATE-$TIME.mp4", e.g. "20251124-153012.mp4". | |
| // • If this is a filename containing "%", e.g. "/Users/blah/Desktop/filename%.mp4", | |
| // "%" is replaced with "$DATE-$TIME". | |
| // Record (bool toggle; start/stop & finalize) | |
| // Pause (bool toggle; pause encoding, keep file open) | |
| // Duration (sec) (number; 0 = unlimited; based on encoded frames / FPS) | |
| // FPS (number; default 30) | |
| // Limit to FPS (bool; when ON, capture at most FPS frames/sec; uses QC time for gating) | |
| // Codec Options (string; e.g. "-c:v libx264 -g 120 -bf 3 -s 1280x720 -preset veryfast -crf 18 -pix_fmt yuv444p") | |
| // Use QC Time PTS (bool; OFF = frame count PTS, ON = QC time based PTS) | |
| // Play Done Sound (bool; when ON, play system alert sound when render finishes) | |
| // | |
| // Notes: | |
| // • Uses QC's OpenGL context directly (like SyphonServer's "OpenGL Scene" mode). | |
| // • Scene size comes from the current GL viewport. | |
| // • Each frame (when capturing): | |
| // - QC renders the scene into its back buffer. | |
| // - We glReadPixels() the viewport into a scratch buffer. | |
| // - We memcpy() that into a heap buffer and push it to the encode queue. | |
| // - On the encode queue we flip vertically and feed the data to FFmpeg. | |
| // • PTS mode: | |
| // - Frame-count mode (default): pts = 0,1,2,... with time_base = 1/FPS. | |
| // Playback always at requested FPS; real-time duration may differ if QC is slow. | |
| // - QC-time mode: pts ≈ (time - start) * FPS, with time_base = 1/FPS. | |
| // Playback follows QC's time progression (real-time style). | |
| // | |
| // Link with: | |
| // -framework Foundation -framework Quartz -framework OpenGL -framework CoreGraphics -framework AudioToolbox | |
| // FFmpeg 4.4.x: avformat,avcodec,avutil,swscale | |
| #import <Quartz/Quartz.h> | |
| #import <CoreGraphics/CoreGraphics.h> | |
| #import <OpenGL/OpenGL.h> | |
| #import <OpenGL/gl.h> | |
| #import <OpenGL/CGLMacro.h> | |
| #import <AudioToolbox/AudioToolbox.h> | |
| #include <math.h> | |
| #include <string.h> | |
| #include <stdatomic.h> | |
| #ifdef __cplusplus | |
| extern "C" { | |
| #endif | |
| #include <libavformat/avformat.h> | |
| #include <libavcodec/avcodec.h> | |
| #include <libavutil/avutil.h> | |
| #include <libavutil/opt.h> | |
| #include <libavutil/imgutils.h> | |
| #include <libavutil/dict.h> | |
| #include <libavutil/pixdesc.h> | |
| #include <libswscale/swscale.h> | |
| #ifdef __cplusplus | |
| } | |
| #endif | |
| static inline double _clamp(double v,double lo,double hi){ return v<lo?lo:(v>hi?hi:v); } | |
| @interface FFExportScenePlugIn : QCPlugIn | |
| @property(assign) NSString *inputOutputPath; | |
| @property(assign) BOOL inputRecord; | |
| @property(assign) BOOL inputPause; | |
| @property(assign) double inputDuration; | |
| @property(assign) double inputFPS; | |
| @property(assign) BOOL inputLimitFPS; | |
| @property(assign) NSString *inputCodecOptions; | |
| @property(assign) BOOL inputUseTimePTS; // toggle QC-time vs frame-count PTS | |
| @property(assign) BOOL inputPlayDoneSound; // play notification when done | |
| @end | |
| static SystemSoundID gCustomSoundID = 0; | |
| static void PlayNamedSystemSound(NSString *name) | |
| { | |
| if (!name || name.length == 0) { | |
| AudioServicesPlayAlertSound(kSystemSoundID_UserPreferredAlert); | |
| return; | |
| } | |
| // Try system sounds first: /System/Library/Sounds/<name>.aiff | |
| NSString *sysPath = [NSString stringWithFormat:@"/System/Library/Sounds/%@.aiff", name]; | |
| NSString *userPath = [NSString stringWithFormat:@"%@/Library/Sounds/%@.aiff", | |
| NSHomeDirectory(), name]; | |
| NSString *path = nil; | |
| if ([[NSFileManager defaultManager] fileExistsAtPath:sysPath]) { | |
| path = sysPath; | |
| } else if ([[NSFileManager defaultManager] fileExistsAtPath:userPath]) { | |
| path = userPath; | |
| } | |
| if (!path) { | |
| // Fallback to user’s alert if the named sound doesn’t exist | |
| AudioServicesPlayAlertSound(kSystemSoundID_UserPreferredAlert); | |
| return; | |
| } | |
| if (gCustomSoundID) { | |
| AudioServicesDisposeSystemSoundID(gCustomSoundID); | |
| gCustomSoundID = 0; | |
| } | |
| NSURL *url = [NSURL fileURLWithPath:path]; | |
| if (AudioServicesCreateSystemSoundID((__bridge CFURLRef)url, &gCustomSoundID) == kAudioServicesNoError) { | |
| AudioServicesPlaySystemSound(gCustomSoundID); | |
| } else { | |
| AudioServicesPlayAlertSound(kSystemSoundID_UserPreferredAlert); | |
| } | |
| } | |
| @implementation FFExportScenePlugIn | |
| { | |
| // FFmpeg state | |
| AVFormatContext *_fmt; | |
| AVStream *_vstream; | |
| AVCodecContext *_venc; | |
| struct SwsContext *_sws; | |
| AVFrame *_frame; | |
| int _width; // encoded width | |
| int _height; // encoded height | |
| int _srcWidth; // source (scene) width | |
| int _srcHeight; // source (scene) height | |
| AVRational _timeBase; | |
| double _fps; | |
| int64_t _nextPTS; // last PTS (frame index or QC-time-derived ticks) | |
| int64_t _frameCount; // encoded frames (encoder thread) | |
| // Recording state | |
| BOOL _isRecording; | |
| BOOL _prevRecord; | |
| NSTimeInterval _recordStartTime; // QC time at start (for QC-time PTS & gating) | |
| NSTimeInterval _lastTime; // last QC time (for FPS gating) | |
| double _durationLimit; // seconds (0 = unlimited) | |
| double _nextCaptureTime; // QC time of next allowed capture when Limit to FPS is ON | |
| // Capture scratch buffer (BGRA, bottom-up as glReadPixels gives it) | |
| uint8_t *_captureBuf; | |
| size_t _captureBufSize; | |
| CGColorSpaceRef _cs; | |
| // Async encoding | |
| dispatch_queue_t _encodeQueue; | |
| int64_t _scheduledFrames; // frames we’ve queued for encoding | |
| _Atomic int _inFlightFrames; // backlog of frames currently in the encode queue | |
| BOOL _directBGRAPath; // encoder pix_fmt == BGRA && no scale => bypass swscale | |
| BOOL _finalizing; // true while trailing/cleanup is running on encodeQueue | |
| // Mode toggles | |
| BOOL _useTimePTS; // copy of inputUseTimePTS, latched at start of recording | |
| BOOL _playDoneSound; // last value of inputPlayDoneSound | |
| } | |
| @dynamic inputOutputPath, inputRecord, inputPause, inputDuration, inputFPS, | |
| inputLimitFPS, inputCodecOptions, inputUseTimePTS, inputPlayDoneSound; | |
| + (NSDictionary *)attributes | |
| { | |
| return @{ | |
| QCPlugInAttributeNameKey: @"FFExport Scene (x86_64)", | |
| QCPlugInAttributeDescriptionKey: @"FFmpeg-based exporter that captures the OpenGL scene below it.\nPlace this as the top layer. Capture is from QC's OpenGL viewport; PTS can be frame-count-based or QC-time-based." | |
| }; | |
| } | |
| + (NSDictionary *)attributesForPropertyPortWithKey:(NSString *)key | |
| { | |
| if ([key isEqualToString:@"inputOutputPath"]) | |
| return @{ QCPortAttributeNameKey: @"Output Path", | |
| QCPortAttributeTypeKey: QCPortTypeString, | |
| QCPortAttributeDefaultValueKey: @"" }; | |
| if ([key isEqualToString:@"inputRecord"]) | |
| return @{ QCPortAttributeNameKey: @"Record", | |
| QCPortAttributeTypeKey: QCPortTypeBoolean, | |
| QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputPause"]) | |
| return @{ QCPortAttributeNameKey: @"Pause", | |
| QCPortAttributeTypeKey: QCPortTypeBoolean, | |
| QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputDuration"]) | |
| return @{ QCPortAttributeNameKey: @"Duration (sec)", | |
| QCPortAttributeTypeKey: QCPortTypeNumber, | |
| QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputFPS"]) | |
| return @{ QCPortAttributeNameKey: @"FPS", | |
| QCPortAttributeTypeKey: QCPortTypeNumber, | |
| QCPortAttributeDefaultValueKey: @30.0 }; | |
| if ([key isEqualToString:@"inputLimitFPS"]) | |
| return @{ QCPortAttributeNameKey: @"Limit to FPS", | |
| QCPortAttributeTypeKey: QCPortTypeBoolean, | |
| QCPortAttributeDefaultValueKey: @1.0 }; | |
| if ([key isEqualToString:@"inputCodecOptions"]) | |
| return @{ QCPortAttributeNameKey: @"Codec Options", | |
| QCPortAttributeTypeKey: QCPortTypeString, | |
| QCPortAttributeDefaultValueKey: @"" }; | |
| if ([key isEqualToString:@"inputUseTimePTS"]) | |
| return @{ QCPortAttributeNameKey: @"Use QC Time PTS", | |
| QCPortAttributeTypeKey: QCPortTypeBoolean, | |
| QCPortAttributeDefaultValueKey: @0.0 }; // default: frame-count PTS | |
| if ([key isEqualToString:@"inputPlayDoneSound"]) | |
| return @{ QCPortAttributeNameKey: @"Play Done Sound", | |
| QCPortAttributeTypeKey: QCPortTypeBoolean, | |
| QCPortAttributeDefaultValueKey: @0.0 }; | |
| return nil; | |
| } | |
| + (NSArray *)sortedPropertyPortKeys | |
| { | |
| return @[ | |
| @"inputOutputPath", | |
| @"inputRecord", | |
| @"inputPause", | |
| @"inputDuration", | |
| @"inputFPS", | |
| @"inputLimitFPS", | |
| @"inputUseTimePTS", | |
| @"inputPlayDoneSound", | |
| @"inputCodecOptions" | |
| ]; | |
| } | |
| + (QCPlugInExecutionMode)executionMode { return kQCPlugInExecutionModeConsumer; } | |
| // We still use Idle; QC time is used for gating and (optionally) PTS. | |
| + (QCPlugInTimeMode) timeMode { return kQCPlugInTimeModeIdle; } | |
| + (BOOL)allowsSubpatches { return NO; } | |
| // -------------------------------------------------- | |
| // Lifecycle | |
| // -------------------------------------------------- | |
| - (id)init | |
| { | |
| if ((self = [super init])) { | |
| #ifdef kCGColorSpaceSRGB | |
| _cs = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); | |
| #else | |
| _cs = CGColorSpaceCreateDeviceRGB(); | |
| #endif | |
| _fmt = NULL; | |
| _vstream = NULL; | |
| _venc = NULL; | |
| _sws = NULL; | |
| _frame = NULL; | |
| _width = _height = 0; | |
| _srcWidth = _srcHeight = 0; | |
| _fps = 30.0; | |
| _timeBase = (AVRational){1,30}; | |
| _nextPTS = 0; | |
| _frameCount = 0; | |
| _isRecording = NO; | |
| _prevRecord = NO; | |
| _recordStartTime = 0.0; | |
| _lastTime = 0.0; | |
| _durationLimit = 0.0; | |
| _nextCaptureTime = 0.0; | |
| _captureBuf = NULL; | |
| _captureBufSize = 0; | |
| _encodeQueue = dispatch_queue_create("com.yourdomain.FFExportScene.encode", DISPATCH_QUEUE_SERIAL); | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| _directBGRAPath = NO; | |
| _finalizing = NO; | |
| _useTimePTS = NO; | |
| _playDoneSound = NO; | |
| } | |
| return self; | |
| } | |
| - (void)dealloc | |
| { | |
| [self _stopEncoding]; | |
| if (_cs) { CFRelease(_cs); _cs = NULL; } | |
| if (_captureBuf) { free(_captureBuf); _captureBuf = NULL; _captureBufSize = 0; } | |
| } | |
| - (BOOL)startExecution:(id<QCPlugInContext>)context | |
| { | |
| av_log_set_level(AV_LOG_ERROR); | |
| return YES; | |
| } | |
| - (void)stopExecution:(id<QCPlugInContext>)context | |
| { | |
| (void)context; | |
| [self _stopEncoding]; | |
| } | |
| // -------------------------------------------------- | |
| // FFmpeg helpers | |
| // -------------------------------------------------- | |
| - (void)_cleanupFFmpeg | |
| { | |
| if (_venc) { | |
| avcodec_free_context(&_venc); | |
| _venc = NULL; | |
| } | |
| if (_fmt) { | |
| if (!(_fmt->oformat->flags & AVFMT_NOFILE) && _fmt->pb) { | |
| avio_closep(&_fmt->pb); | |
| } | |
| avformat_free_context(_fmt); | |
| _fmt = NULL; | |
| } | |
| if (_sws) { | |
| sws_freeContext(_sws); | |
| _sws = NULL; | |
| } | |
| if (_frame) { | |
| av_frame_free(&_frame); | |
| _frame = NULL; | |
| } | |
| _vstream = NULL; | |
| _directBGRAPath = NO; | |
| } | |
| static void _parse_resolution(NSString *val, int *encW, int *encH) | |
| { | |
| if (!val || [val length] == 0) return; | |
| NSArray *parts = [val componentsSeparatedByString:@"x"]; | |
| if ([parts count] != 2) return; | |
| int w = [parts[0] intValue]; | |
| int h = [parts[1] intValue]; | |
| if (w > 0 && h > 0) { | |
| *encW = w; | |
| *encH = h; | |
| } | |
| } | |
| - (void)_parseCodecOptionsString:(NSString *)opts | |
| codecName:(NSString * __strong *)outCodecName | |
| gopPtr:(int *)outGop | |
| bfPtr:(int *)outBF | |
| encWidth:(int *)outEncW | |
| encHeight:(int *)outEncH | |
| pixFmt:(enum AVPixelFormat *)outPixFmt | |
| codecOptions:(AVDictionary **)outDict | |
| { | |
| if (outCodecName) *outCodecName = nil; | |
| if (outGop) *outGop = -1; | |
| if (outBF) *outBF = -1; | |
| if (outPixFmt) *outPixFmt = AV_PIX_FMT_NONE; | |
| AVDictionary *d = NULL; | |
| if (!opts || (id)opts == [NSNull null] || [opts length] == 0) { | |
| if (outDict) *outDict = NULL; | |
| return; | |
| } | |
| NSCharacterSet *ws = [NSCharacterSet whitespaceAndNewlineCharacterSet]; | |
| NSArray<NSString*> *tokens = [opts componentsSeparatedByCharactersInSet:ws]; | |
| NSMutableArray<NSString*> *clean = [NSMutableArray arrayWithCapacity:[tokens count]]; | |
| for (NSString *t in tokens) { | |
| if ([t length] > 0) [clean addObject:t]; | |
| } | |
| for (NSUInteger i = 0; i < [clean count]; ++i) { | |
| NSString *tok = clean[i]; | |
| if (![tok hasPrefix:@"-"]) continue; | |
| NSString *key = [tok substringFromIndex:1]; | |
| NSString *val = (i + 1 < [clean count]) ? clean[i+1] : nil; | |
| NSString *plainKey = key; | |
| if ([plainKey hasSuffix:@":v"]) { | |
| plainKey = [plainKey substringToIndex:plainKey.length - 2]; | |
| } | |
| if (([key isEqualToString:@"c:v"] || [key isEqualToString:@"codec:v"]) && val) { | |
| if (outCodecName) *outCodecName = val; | |
| i++; | |
| continue; | |
| } | |
| if ([plainKey isEqualToString:@"g"] && val && outGop) { | |
| *outGop = [val intValue]; | |
| i++; | |
| continue; | |
| } | |
| if ([plainKey isEqualToString:@"bf"] && val && outBF) { | |
| *outBF = [val intValue]; | |
| i++; | |
| continue; | |
| } | |
| if ([plainKey isEqualToString:@"s"] && val && outEncW && outEncH) { | |
| _parse_resolution(val, outEncW, outEncH); | |
| i++; | |
| continue; | |
| } | |
| if (([plainKey isEqualToString:@"pix_fmt"] || [plainKey isEqualToString:@"pixel_format"]) && val && outPixFmt) { | |
| enum AVPixelFormat pf = av_get_pix_fmt([val UTF8String]); | |
| if (pf != AV_PIX_FMT_NONE) { | |
| *outPixFmt = pf; | |
| } | |
| i++; | |
| continue; | |
| } | |
| if (val) { | |
| av_dict_set(&d, [plainKey UTF8String], [val UTF8String], 0); | |
| i++; | |
| } | |
| } | |
| if (outDict) *outDict = d; | |
| } | |
| // srcW/srcH = OpenGL viewport size; -s can override encode size, -pix_fmt overrides pixel format. | |
| - (BOOL)_startEncodingWithSourceWidth:(int)srcW | |
| sourceHeight:(int)srcH | |
| fps:(double)fps | |
| path:(NSString *)path | |
| options:(NSString *)optString | |
| { | |
| if (_finalizing) { | |
| NSLog(@"[FFExportScene] Still finalizing previous recording; ignoring new start."); | |
| return NO; | |
| } | |
| [self _cleanupFFmpeg]; | |
| if (srcW <= 0 || srcH <= 0) return NO; | |
| if (fps <= 0.0) fps = 30.0; | |
| _srcWidth = srcW; | |
| _srcHeight = srcH; | |
| _fps = fps; | |
| int encW = srcW; | |
| int encH = srcH; | |
| NSString *codecName = nil; | |
| int gopSize = -1; | |
| int maxBF = -1; | |
| AVDictionary *codecOpts = NULL; | |
| enum AVPixelFormat pixFmt = AV_PIX_FMT_NONE; | |
| [self _parseCodecOptionsString:optString | |
| codecName:&codecName | |
| gopPtr:&gopSize | |
| bfPtr:&maxBF | |
| encWidth:&encW | |
| encHeight:&encH | |
| pixFmt:&pixFmt | |
| codecOptions:&codecOpts]; | |
| if (encW <= 0 || encH <= 0) { | |
| encW = srcW; | |
| encH = srcH; | |
| } | |
| _width = encW; | |
| _height = encH; | |
| int fpsInt = (int)llround(fps); | |
| if (fpsInt < 1) fpsInt = 1; | |
| // Time base is always 1/FPS; we just change how we compute PTS. | |
| _timeBase = (AVRational){1, fpsInt}; | |
| _nextPTS = 0; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| NSString *realPath = path; | |
| if ([realPath hasPrefix:@"file://"]) { | |
| realPath = [[NSURL URLWithString:realPath] path]; | |
| } | |
| const char *filename = [realPath fileSystemRepresentation]; | |
| AVOutputFormat *ofmt = NULL; | |
| avformat_alloc_output_context2(&_fmt, NULL, NULL, filename); | |
| if (!_fmt) { | |
| avformat_alloc_output_context2(&_fmt, NULL, "mp4", filename); | |
| } | |
| if (!_fmt) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return FALSE; | |
| } | |
| ofmt = _fmt->oformat; | |
| const AVCodec *codec = NULL; | |
| if (codecName && [codecName length] > 0) { | |
| codec = avcodec_find_encoder_by_name([codecName UTF8String]); | |
| } | |
| if (!codec) { | |
| codec = avcodec_find_encoder(AV_CODEC_ID_H264); | |
| } | |
| if (!codec) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| if (pixFmt == AV_PIX_FMT_NONE) { | |
| if (codec->pix_fmts) { | |
| pixFmt = codec->pix_fmts[0]; | |
| } else { | |
| pixFmt = AV_PIX_FMT_YUV420P; | |
| } | |
| } | |
| _vstream = avformat_new_stream(_fmt, codec); | |
| if (!_vstream) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| _vstream->id = _fmt->nb_streams - 1; | |
| _venc = avcodec_alloc_context3(codec); | |
| if (!_venc) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| _venc->codec_id = codec->id; | |
| _venc->width = encW; | |
| _venc->height = encH; | |
| _venc->pix_fmt = pixFmt; | |
| _venc->time_base = _timeBase; | |
| _vstream->time_base = _timeBase; | |
| _venc->framerate = (AVRational){ fpsInt, 1 }; | |
| _vstream->avg_frame_rate = (AVRational){ fpsInt, 1 }; | |
| _vstream->r_frame_rate = (AVRational){ fpsInt, 1 }; | |
| _venc->gop_size = (gopSize > 0 ? gopSize : fpsInt); | |
| _venc->max_b_frames = (maxBF >= 0 ? maxBF : 2); | |
| _venc->bit_rate = 8 * 1000 * 1000; | |
| if (ofmt->flags & AVFMT_GLOBALHEADER) { | |
| _venc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; | |
| } | |
| if (_venc->pix_fmt == AV_PIX_FMT_YUV420P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV422P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV444P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV420P10LE || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV444P10LE) { | |
| _venc->color_primaries = AVCOL_PRI_BT709; | |
| _venc->color_trc = AVCOL_TRC_BT709; | |
| _venc->colorspace = AVCOL_SPC_BT709; | |
| _venc->color_range = AVCOL_RANGE_MPEG; | |
| } else { | |
| _venc->color_range = AVCOL_RANGE_JPEG; | |
| } | |
| if (_venc->priv_data) { | |
| if (!av_dict_get(codecOpts, "preset", NULL, 0)) { | |
| av_dict_set(&codecOpts, "preset", "medium", 0); | |
| } | |
| if (!av_dict_get(codecOpts, "tune", NULL, 0)) { | |
| av_dict_set(&codecOpts, "tune", "animation", 0); | |
| } | |
| } | |
| if (avcodec_open2(_venc, codec, &codecOpts) < 0) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| if (avcodec_parameters_from_context(_vstream->codecpar, _venc) < 0) { | |
| return NO; | |
| } | |
| if (!(ofmt->flags & AVFMT_NOFILE)) { | |
| if (avio_open(&_fmt->pb, filename, AVIO_FLAG_WRITE) < 0) { | |
| return NO; | |
| } | |
| } | |
| if (avformat_write_header(_fmt, NULL) < 0) { | |
| return NO; | |
| } | |
| _frame = av_frame_alloc(); | |
| if (!_frame) return NO; | |
| _frame->format = _venc->pix_fmt; | |
| _frame->width = _venc->width; | |
| _frame->height = _venc->height; | |
| if (av_frame_get_buffer(_frame, 32) < 0) { | |
| return NO; | |
| } | |
| _directBGRAPath = (_venc->pix_fmt == AV_PIX_FMT_BGRA && | |
| _srcWidth == _width && | |
| _srcHeight == _height); | |
| if (!_directBGRAPath) { | |
| _sws = sws_getContext(_srcWidth, _srcHeight, AV_PIX_FMT_BGRA, | |
| encW, encH, _venc->pix_fmt, | |
| SWS_BICUBIC, NULL, NULL, NULL); | |
| if (!_sws) return NO; | |
| } else { | |
| _sws = NULL; | |
| } | |
| NSLog(@"[FFExportScene] Recording started: %s (%dx%d -> %dx%d @ %.3f fps, pix_fmt=%d, directBGRA=%d, useTimePTS=%d)", | |
| filename, _srcWidth, _srcHeight, encW, encH, _fps, | |
| (int)_venc->pix_fmt, (int)_directBGRAPath, (int)_useTimePTS); | |
| return YES; | |
| } | |
| // Called only on the encode queue. | |
| - (BOOL)_encodeFrameWithBGRA_locked:(uint8_t *)src | |
| rowBytes:(int)rowBytes | |
| pts:(int64_t)pts | |
| { | |
| if (!_fmt || !_venc || !_frame) return NO; | |
| if (av_frame_make_writable(_frame) < 0) return NO; | |
| if (_directBGRAPath) { | |
| uint8_t *dst = _frame->data[0]; | |
| int dstRB = _frame->linesize[0]; | |
| int copyRB = rowBytes; | |
| if (copyRB > dstRB) copyRB = dstRB; | |
| for (int y = 0; y < _srcHeight; ++y) { | |
| memcpy(dst + (size_t)y * dstRB, src + (size_t)y * rowBytes, (size_t)copyRB); | |
| } | |
| } else { | |
| const uint8_t *srcSlice[4] = { src, NULL, NULL, NULL }; | |
| int srcStride[4] = { rowBytes, 0, 0, 0 }; | |
| if (!_sws) return NO; | |
| sws_scale(_sws, srcSlice, srcStride, 0, _srcHeight, | |
| _frame->data, _frame->linesize); | |
| } | |
| _frame->pts = pts; | |
| int ret = avcodec_send_frame(_venc, _frame); | |
| if (ret < 0) return NO; | |
| AVPacket *pkt = av_packet_alloc(); | |
| if (!pkt) return NO; | |
| for (;;) { | |
| ret = avcodec_receive_packet(_venc, pkt); | |
| if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { | |
| break; | |
| } else if (ret < 0) { | |
| av_packet_free(&pkt); | |
| return NO; | |
| } | |
| pkt->stream_index = _vstream->index; | |
| av_packet_rescale_ts(pkt, _venc->time_base, _vstream->time_base); | |
| pkt->duration = 1; | |
| ret = av_interleaved_write_frame(_fmt, pkt); | |
| av_packet_unref(pkt); | |
| if (ret < 0) { | |
| av_packet_free(&pkt); | |
| return NO; | |
| } | |
| } | |
| av_packet_free(&pkt); | |
| _frameCount++; | |
| return YES; | |
| } | |
| // Called on encode queue | |
| - (void)_flushEncoder_locked | |
| { | |
| if (!_fmt || !_venc) return; | |
| int ret = avcodec_send_frame(_venc, NULL); | |
| if (ret < 0) { | |
| return; | |
| } | |
| AVPacket *pkt = av_packet_alloc(); | |
| if (!pkt) return; | |
| for (;;) { | |
| ret = avcodec_receive_packet(_venc, pkt); | |
| if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) break; | |
| if (ret < 0) break; | |
| pkt->stream_index = _vstream->index; | |
| av_packet_rescale_ts(pkt, _venc->time_base, _vstream->time_base); | |
| pkt->duration = 1; | |
| av_interleaved_write_frame(_fmt, pkt); | |
| av_packet_unref(pkt); | |
| } | |
| av_packet_free(&pkt); | |
| } | |
| // Public stop: async — QC thread no longer blocks. | |
| - (void)_stopEncoding | |
| { | |
| if (!_fmt && !_venc) { | |
| _isRecording = NO; | |
| return; | |
| } | |
| BOOL wasRecording = _isRecording; | |
| _isRecording = NO; | |
| if (!_encodeQueue) { | |
| if (wasRecording && _fmt && _venc) { | |
| [self _flushEncoder_locked]; | |
| av_write_trailer(_fmt); | |
| } | |
| [self _cleanupFFmpeg]; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| return; | |
| } | |
| _finalizing = YES; | |
| int64_t totalFrames = _frameCount; | |
| BOOL playSound = _playDoneSound; // snapshot for this stop | |
| dispatch_async(_encodeQueue, ^{ | |
| @autoreleasepool { | |
| if (wasRecording && _fmt && _venc) { | |
| [self _flushEncoder_locked]; | |
| av_write_trailer(_fmt); | |
| } | |
| NSLog(@"[FFExportScene] Recording stopped. Encoded frames: %lld", (long long)totalFrames); | |
| [self _cleanupFFmpeg]; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| _finalizing = NO; | |
| if (playSound) { | |
| dispatch_async(dispatch_get_main_queue(), ^{ | |
| // Play the user's preferred alert sound (independent of AppKit / NSBeep / NSApp) | |
| // AudioServicesPlayAlertSound(kSystemSoundID_UserPreferredAlert); | |
| PlayNamedSystemSound(@"Glass"); // or @"Basso", @"Funk", @"Ping", etc. | |
| // Optional: also drop a notification entry (sound usually already played above) | |
| NSUserNotification *note = [[NSUserNotification alloc] init]; | |
| note.title = @"FFExport Scene"; | |
| note.informativeText = @"Video export finished."; | |
| note.soundName = nil; // sound already played | |
| [[NSUserNotificationCenter defaultUserNotificationCenter] deliverNotification:note]; | |
| }); | |
| } | |
| } | |
| }); | |
| } | |
| // -------------------------------------------------- | |
| // Output path helper (date/time auto-filename) | |
| // -------------------------------------------------- | |
| - (NSString *)_dateTimeStampString | |
| { | |
| NSDateFormatter *df = [[NSDateFormatter alloc] init]; | |
| // $DATE-$TIME style, filesystem-safe (no ':' etc.) | |
| [df setDateFormat:@"yyyyMMdd-HHmmss"]; | |
| return [df stringFromDate:[NSDate date]]; | |
| } | |
| - (NSString *)_resolvedOutputPathFromInputPath:(NSString *)rawPath | |
| { | |
| if (!rawPath || (id)rawPath == [NSNull null] || rawPath.length == 0) { | |
| return @""; | |
| } | |
| // Handle common "file://..." inputs | |
| if ([rawPath hasPrefix:@"file://"]) { | |
| NSURL *url = [NSURL URLWithString:rawPath]; | |
| if (url.path.length > 0) { | |
| rawPath = url.path; | |
| } | |
| } | |
| // Expand tilde, e.g. "~/Desktop" | |
| rawPath = [rawPath stringByExpandingTildeInPath]; | |
| NSString *path = [rawPath copy]; | |
| BOOL endsWithSlash = [path hasSuffix:@"/"]; | |
| NSString *stamp = [self _dateTimeStampString]; | |
| NSString *last = [path lastPathComponent]; | |
| // Case 2: "/users/blah/Desktop/filename%.mp4" -> replace % with date/time | |
| if (last.length > 0 && [last containsString:@"%"]) { | |
| NSString *dir = [path stringByDeletingLastPathComponent]; | |
| NSString *newName = [last stringByReplacingOccurrencesOfString:@"%" withString:stamp]; | |
| if (dir.length > 0) { | |
| path = [dir stringByAppendingPathComponent:newName]; | |
| } else { | |
| path = newName; // relative filename | |
| } | |
| } else { | |
| // Case 1: directory-only input -> auto filename "$DATE-$TIME.mp4" | |
| if (endsWithSlash) { | |
| // "/users/blah/Desktop/" -> "/users/blah/Desktop/<stamp>.mp4" | |
| NSString *filename = [NSString stringWithFormat:@"%@.mp4", stamp]; | |
| path = [path stringByAppendingPathComponent:filename]; | |
| } else { | |
| // No trailing slash: if there's no extension, treat as directory path | |
| NSString *ext = [last pathExtension]; | |
| if (ext.length == 0) { | |
| // "/users/blah/Desktop" -> "/users/blah/Desktop/<stamp>.mp4" | |
| NSString *dir = path; | |
| NSString *filename = [NSString stringWithFormat:@"%@.mp4", stamp]; | |
| path = [dir stringByAppendingPathComponent:filename]; | |
| } | |
| // If there *is* an extension, we leave it as-is. | |
| } | |
| } | |
| return path; | |
| } | |
| // -------------------------------------------------- | |
| // Capture from QC's OpenGL context (viewport) | |
| // -------------------------------------------------- | |
| // | |
| // Capture the current OpenGL scene via glReadPixels into a scratch buffer, | |
| // then heap-copy that buffer and send it to the encode queue. | |
| // PTS is either frame-count-based or QC-time-based, depending on _useTimePTS. | |
| - (BOOL)_captureSceneAtTime:(NSTimeInterval)time | |
| context:(id<QCPlugInContext>)context | |
| { | |
| CGLContextObj cgl_ctx = [context CGLContextObj]; | |
| (void)cgl_ctx; | |
| GLint viewport[4] = {0,0,0,0}; | |
| glGetIntegerv(GL_VIEWPORT, viewport); | |
| int w = viewport[2]; | |
| int h = viewport[3]; | |
| if (w <= 0 || h <= 0) return NO; | |
| if (_srcWidth == 0 || _srcHeight == 0) { | |
| _srcWidth = w; | |
| _srcHeight = h; | |
| } | |
| // If viewport changes mid-record, you could handle it here; for now we require it to stay constant. | |
| if (w != _srcWidth || h != _srcHeight) { | |
| // NSLog(@"[FFExportScene] Viewport changed during recording (%dx%d -> %dx%d), ignoring frame.", _srcWidth, _srcHeight, w, h); | |
| return NO; | |
| } | |
| size_t rowBytes = (size_t)_srcWidth * 4; | |
| size_t needed = rowBytes * (size_t)_srcHeight; | |
| if (_captureBufSize < needed) { | |
| _captureBuf = (uint8_t *)realloc(_captureBuf, needed); | |
| _captureBufSize = needed; | |
| } | |
| glPixelStorei(GL_PACK_ALIGNMENT, 4); | |
| glReadPixels(viewport[0], viewport[1], | |
| _srcWidth, _srcHeight, | |
| GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, | |
| _captureBuf); | |
| // Heap copy for encode queue | |
| uint8_t *copy = (uint8_t *)malloc(needed); | |
| if (!copy) return NO; | |
| memcpy(copy, _captureBuf, needed); | |
| // Compute PTS according to mode | |
| int64_t pts; | |
| if (_useTimePTS) { | |
| // QC-time-based: map (time - start) * FPS to PTS, enforce monotonic increase | |
| double rel = time - _recordStartTime; | |
| if (rel < 0.0) rel = 0.0; | |
| double exactFrame = rel * _fps; // _fps > 0 by construction | |
| int64_t candidate = (int64_t)llround(exactFrame); | |
| if (candidate <= _nextPTS) { | |
| candidate = _nextPTS + 1; | |
| } | |
| pts = candidate; | |
| _nextPTS = candidate; | |
| } else { | |
| // Pure frame count | |
| pts = _nextPTS++; | |
| } | |
| _scheduledFrames++; | |
| atomic_fetch_add(&_inFlightFrames, 1); | |
| int64_t ptsCopy = pts; | |
| size_t rowBytesCopy = rowBytes; | |
| dispatch_async(_encodeQueue, ^{ | |
| @autoreleasepool { | |
| // Flip vertically on the encode queue (top-down for FFmpeg). | |
| uint8_t *buf = copy; | |
| size_t rb = rowBytesCopy; | |
| uint8_t *rowTmp = (uint8_t *)malloc(rb); | |
| if (rowTmp) { | |
| uint8_t *top = buf; | |
| uint8_t *bottom = buf + (size_t)(_srcHeight - 1) * rb; | |
| for (int y = 0; y < _srcHeight / 2; ++y) { | |
| memcpy(rowTmp, top, rb); | |
| memcpy(top, bottom, rb); | |
| memcpy(bottom, rowTmp, rb); | |
| top += rb; | |
| bottom -= rb; | |
| } | |
| free(rowTmp); | |
| } | |
| [self _encodeFrameWithBGRA_locked:buf rowBytes:(int)rb pts:ptsCopy]; | |
| free(buf); | |
| atomic_fetch_sub(&_inFlightFrames, 1); | |
| } | |
| }); | |
| return YES; | |
| } | |
| // -------------------------------------------------- | |
| // Execute (drive recording state machine) | |
| // -------------------------------------------------- | |
| - (BOOL)execute:(id<QCPlugInContext>)context | |
| atTime:(NSTimeInterval)time | |
| withArguments:(NSDictionary *)arguments | |
| { | |
| @autoreleasepool { | |
| (void)arguments; | |
| NSString *path = self.inputOutputPath; | |
| if (!path || (id)path == [NSNull null]) path = @""; | |
| double fpsVal = self.inputFPS; | |
| if (fpsVal <= 0.0) fpsVal = 30.0; | |
| fpsVal = _clamp(fpsVal, 1.0, 240.0); | |
| double durVal = self.inputDuration; | |
| if (durVal < 0.0) durVal = 0.0; | |
| NSString *codecOpts = self.inputCodecOptions; | |
| if (!codecOpts || (id)codecOpts == [NSNull null]) codecOpts = @""; | |
| BOOL recVal = self.inputRecord; | |
| BOOL pauseVal = self.inputPause; | |
| BOOL limitFPSVal = self.inputLimitFPS; | |
| BOOL useTimePTS = self.inputUseTimePTS; | |
| BOOL playDoneSound = self.inputPlayDoneSound; | |
| // Latch sound toggle each tick so _stopEncoding sees last user value | |
| _playDoneSound = playDoneSound; | |
| BOOL recEdgeOn = (recVal && !_prevRecord); | |
| BOOL recEdgeOff = (!recVal && _prevRecord); | |
| _prevRecord = recVal; | |
| // Start recording on Record rising edge | |
| if (recEdgeOn && !_isRecording && !_finalizing) { | |
| // Resolve directory-only paths and "%" placeholders to a concrete filename | |
| NSString *resolvedPath = [self _resolvedOutputPathFromInputPath:path]; | |
| if (resolvedPath.length == 0) { | |
| NSLog(@"[FFExportScene] No valid output path specified."); | |
| } else { | |
| CGLContextObj cgl_ctx = [context CGLContextObj]; | |
| (void)cgl_ctx; | |
| GLint viewport[4] = {0,0,0,0}; | |
| glGetIntegerv(GL_VIEWPORT, viewport); | |
| int w = viewport[2]; | |
| int h = viewport[3]; | |
| if (w > 0 && h > 0) { | |
| // Latch PTS mode at start | |
| _useTimePTS = useTimePTS; | |
| if ([self _startEncodingWithSourceWidth:w | |
| sourceHeight:h | |
| fps:fpsVal | |
| path:resolvedPath | |
| options:codecOpts]) { | |
| _isRecording = YES; | |
| _durationLimit = durVal; | |
| _recordStartTime = time; | |
| _lastTime = time; | |
| _nextCaptureTime = time; // for LimitFPS gating | |
| } else { | |
| NSLog(@"[FFExportScene] Failed to start encoding for path: %@", resolvedPath); | |
| } | |
| } else { | |
| NSLog(@"[FFExportScene] Viewport size is zero; cannot start recording."); | |
| } | |
| } | |
| } | |
| // Duration auto-stop (based on encoded timeline = scheduledFrames / FPS) | |
| if (_isRecording && _durationLimit > 0.0 && _fps > 0.0 && _scheduledFrames > 0) { | |
| double recordedSecs = (double)_scheduledFrames / _fps; | |
| if (recordedSecs >= _durationLimit) { | |
| [self _stopEncoding]; | |
| } | |
| } | |
| // Stop & finalize when Record is untoggled | |
| if (_isRecording && recEdgeOff) { | |
| [self _stopEncoding]; | |
| } | |
| // Capture frames while recording and NOT paused | |
| if (_isRecording) { | |
| double dt = time - _lastTime; | |
| if (dt < 0.0) dt = 0.0; | |
| _lastTime = time; | |
| if (!pauseVal) { | |
| double effFPS = (_fps > 0.0 ? _fps : fpsVal); | |
| if (effFPS <= 0.0) effFPS = 30.0; | |
| double frameInterval = 1.0 / effFPS; | |
| if (limitFPSVal) { | |
| if (_nextCaptureTime <= 0.0) { | |
| _nextCaptureTime = time; | |
| } | |
| if (time >= _nextCaptureTime) { | |
| (void)[self _captureSceneAtTime:time context:context]; | |
| _nextCaptureTime += frameInterval; | |
| if (_nextCaptureTime < time) { | |
| _nextCaptureTime = time + frameInterval; | |
| } | |
| } | |
| } else { | |
| // Unlimited mode: capture once per QC tick; PTS mode decides how it plays back. | |
| (void)[self _captureSceneAtTime:time context:context]; | |
| } | |
| } | |
| } | |
| return YES; | |
| } | |
| } | |
| @end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment