diff --git a/Examples/Sources/SCRecorderViewController.m b/Examples/Sources/SCRecorderViewController.m index fd936523..613716b7 100644 --- a/Examples/Sources/SCRecorderViewController.m +++ b/Examples/Sources/SCRecorderViewController.m @@ -69,6 +69,8 @@ - (void)viewDidLoad { _recorder = [SCRecorder recorder]; _recorder.sessionPreset = AVCaptureSessionPreset1280x720; _recorder.maxRecordDuration = CMTimeMake(5, 1); + _recorder.videoConfiguration.maxFrameRate = 10; + _recorder.videoConfiguration.timeScale = 0.333; _recorder.delegate = self; _recorder.autoSetVideoOrientation = YES; diff --git a/Library/Sources/SCRecordSession.m b/Library/Sources/SCRecordSession.m index 2ae6c1af..b7cfede4 100644 --- a/Library/Sources/SCRecordSession.m +++ b/Library/Sources/SCRecordSession.m @@ -93,8 +93,10 @@ - (id)init { _timeOffset = kCMTimeZero; _lastTimeVideo = kCMTimeZero; _lastTimeAudio = kCMTimeZero; + _lastAppendedVideo = kCMTimeZero; _currentSegmentDuration = kCMTimeZero; _segmentsDuration = kCMTimeZero; + _sessionBegan = kCMTimeZero; _date = [NSDate date]; _recordSegmentsDirectory = SCRecordSessionTemporaryDirectory; _identifier = [NSString stringWithFormat:@"%ld", (long)[_date timeIntervalSince1970]]; @@ -612,6 +614,10 @@ - (BOOL)appendAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer { if ([_audioInput isReadyForMoreMediaData]) { CMTime actualBufferTime = CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer); + if (CMTIME_IS_INVALID(_sessionBegan)) { + _sessionBegan = actualBufferTime; + } + if (CMTIME_IS_INVALID(_timeOffset)) { _timeOffset = CMTimeSubtract(actualBufferTime, _currentSegmentDuration); } @@ -644,6 +650,23 @@ - (BOOL)appendVideoSampleBuffer:(CMSampleBufferRef)videoSampleBuffer { if ([_videoInput isReadyForMoreMediaData]) { CMTime actualBufferTime = CMSampleBufferGetPresentationTimeStamp(videoSampleBuffer); + if (CMTIME_IS_INVALID(_sessionBegan)) { + _sessionBegan = actualBufferTime; + } + + if (_videoConfiguration.maxFrameRate > 0) { + CMTime interval = CMTimeMake(1, _videoConfiguration.maxFrameRate); + + CMTime offset = CMTimeSubtract(actualBufferTime, _lastAppendedVideo); + if (CMTIME_COMPARE_INLINE(_lastAppendedVideo, ==, kCMTimeZero)) { + offset = CMTimeSubtract(actualBufferTime, _sessionBegan); + } + + if (CMTIME_COMPARE_INLINE(offset, <, interval)) { + return NO; + } + } + if (CMTIME_IS_INVALID(_timeOffset)) { _timeOffset = CMTimeSubtract(actualBufferTime, _currentSegmentDuration); // NSLog(@"Recomputed time offset to: %fs", CMTimeGetSeconds(_timeOffset)); @@ -662,8 +685,7 @@ - (BOOL)appendVideoSampleBuffer:(CMSampleBufferRef)videoSampleBuffer { } } - CMTime bufferTimestamp = CMTimeSubtract(CMSampleBufferGetPresentationTimeStamp(videoSampleBuffer), _timeOffset); - + CMTime bufferTimestamp = CMTimeSubtract(actualBufferTime, _timeOffset); if (_videoPixelBufferAdaptor != nil) { CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(videoSampleBuffer)]; @@ -693,6 +715,7 @@ - (BOOL)appendVideoSampleBuffer:(CMSampleBufferRef)videoSampleBuffer { CFRelease(adjustedBuffer); } + _lastAppendedVideo = actualBufferTime; _lastTimeVideo = actualBufferTime; _currentSegmentDuration = bufferTimestamp; diff --git a/Library/Sources/SCRecordSession_Internal.h b/Library/Sources/SCRecordSession_Internal.h index d999185a..6025e443 100644 --- a/Library/Sources/SCRecordSession_Internal.h +++ b/Library/Sources/SCRecordSession_Internal.h @@ -23,6 +23,8 @@ CMTime _timeOffset; CMTime _lastTimeVideo; CMTime _lastTimeAudio; + CMTime _lastAppendedVideo; + CMTime _sessionBegan; SCVideoConfiguration *_videoConfiguration; SCAudioConfiguration *_audioConfiguration; diff --git a/Library/Sources/SCVideoConfiguration.h b/Library/Sources/SCVideoConfiguration.h index d05232c8..018b93d3 100644 --- a/Library/Sources/SCVideoConfiguration.h +++ b/Library/Sources/SCVideoConfiguration.h @@ -43,7 +43,7 @@ @property (copy, nonatomic) NSString *scalingMode; /** - The maximum framerate that this SCRecordSession should handle + The maximum input framerate that this SCRecordSession should handle If the camera appends too much frames, they will be dropped. If this property's value is 0, it will use the current video framerate from the camera.