@@ -180,10 +180,18 @@ @interface FLTCam : NSObject <FlutterTexture,
180180@property (strong , nonatomic ) AVCaptureVideoDataOutput *videoOutput;
181181@property (strong , nonatomic ) AVCaptureAudioDataOutput *audioOutput;
182182@property (assign , nonatomic ) BOOL isRecording;
183+ @property (assign , nonatomic ) BOOL isRecordingPaused;
184+ @property (assign , nonatomic ) BOOL videoIsDisconnected;
185+ @property (assign , nonatomic ) BOOL audioIsDisconnected;
183186@property (assign , nonatomic ) BOOL isAudioSetup;
184187@property (assign , nonatomic ) BOOL isStreamingImages;
185188@property (assign , nonatomic ) ResolutionPreset resolutionPreset;
189+ @property (assign , nonatomic ) CMTime lastVideoSampleTime;
190+ @property (assign , nonatomic ) CMTime lastAudioSampleTime;
191+ @property (assign , nonatomic ) CMTime videoTimeOffset;
192+ @property (assign , nonatomic ) CMTime audioTimeOffset;
186193@property (nonatomic ) CMMotionManager *motionManager;
194+ @property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
187195- (instancetype )initWithCameraName : (NSString *)cameraName
188196 resolutionPreset : (NSString *)resolutionPreset
189197 enableAudio : (BOOL )enableAudio
@@ -417,25 +425,89 @@ - (void)captureOutput:(AVCaptureOutput *)output
417425 CVPixelBufferUnlockBaseAddress (pixelBuffer, kCVPixelBufferLock_ReadOnly );
418426 }
419427 }
420- if (_isRecording) {
428+ if (_isRecording && !_isRecordingPaused ) {
421429 if (_videoWriter.status == AVAssetWriterStatusFailed) {
422430 _eventSink (@{
423431 @" event" : @" error" ,
424432 @" errorDescription" : [NSString stringWithFormat: @" %@ " , _videoWriter.error]
425433 });
426434 return ;
427435 }
428- CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp (sampleBuffer);
436+
437+ CFRetain (sampleBuffer);
438+ CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp (sampleBuffer);
439+
429440 if (_videoWriter.status != AVAssetWriterStatusWriting) {
430441 [_videoWriter startWriting ];
431- [_videoWriter startSessionAtSourceTime: lastSampleTime ];
442+ [_videoWriter startSessionAtSourceTime: currentSampleTime ];
432443 }
444+
433445 if (output == _captureVideoOutput) {
434- [self newVideoSample: sampleBuffer];
435- } else if (output == _audioOutput) {
446+ if (_videoIsDisconnected) {
447+ _videoIsDisconnected = NO ;
448+
449+ if (_videoTimeOffset.value == 0 ) {
450+ _videoTimeOffset = CMTimeSubtract (currentSampleTime, _lastVideoSampleTime);
451+ } else {
452+ CMTime offset = CMTimeSubtract (currentSampleTime, _lastVideoSampleTime);
453+ _videoTimeOffset = CMTimeAdd (_videoTimeOffset, offset);
454+ }
455+
456+ return ;
457+ }
458+
459+ _lastVideoSampleTime = currentSampleTime;
460+
461+ CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
462+ CMTime nextSampleTime = CMTimeSubtract (_lastVideoSampleTime, _videoTimeOffset);
463+ [_videoAdaptor appendPixelBuffer: nextBuffer withPresentationTime: nextSampleTime];
464+ } else {
465+ CMTime dur = CMSampleBufferGetDuration (sampleBuffer);
466+
467+ if (dur.value > 0 ) {
468+ currentSampleTime = CMTimeAdd (currentSampleTime, dur);
469+ }
470+
471+ if (_audioIsDisconnected) {
472+ _audioIsDisconnected = NO ;
473+
474+ if (_audioTimeOffset.value == 0 ) {
475+ _audioTimeOffset = CMTimeSubtract (currentSampleTime, _lastAudioSampleTime);
476+ } else {
477+ CMTime offset = CMTimeSubtract (currentSampleTime, _lastAudioSampleTime);
478+ _audioTimeOffset = CMTimeAdd (_audioTimeOffset, offset);
479+ }
480+
481+ return ;
482+ }
483+
484+ _lastAudioSampleTime = currentSampleTime;
485+
486+ if (_audioTimeOffset.value != 0 ) {
487+ CFRelease (sampleBuffer);
488+ sampleBuffer = [self adjustTime: sampleBuffer by: _audioTimeOffset];
489+ }
490+
436491 [self newAudioSample: sampleBuffer];
437492 }
493+
494+ CFRelease (sampleBuffer);
495+ }
496+ }
497+
498+ - (CMSampleBufferRef)adjustTime : (CMSampleBufferRef)sample by : (CMTime)offset {
499+ CMItemCount count;
500+ CMSampleBufferGetSampleTimingInfoArray (sample, 0 , nil , &count);
501+ CMSampleTimingInfo *pInfo = malloc (sizeof (CMSampleTimingInfo) * count);
502+ CMSampleBufferGetSampleTimingInfoArray (sample, count, pInfo, &count);
503+ for (CMItemCount i = 0 ; i < count; i++) {
504+ pInfo[i].decodeTimeStamp = CMTimeSubtract (pInfo[i].decodeTimeStamp , offset);
505+ pInfo[i].presentationTimeStamp = CMTimeSubtract (pInfo[i].presentationTimeStamp , offset);
438506 }
507+ CMSampleBufferRef sout;
508+ CMSampleBufferCreateCopyWithNewTiming (nil , sample, count, pInfo, &sout);
509+ free (pInfo);
510+ return sout;
439511}
440512
441513- (void )newVideoSample : (CMSampleBufferRef)sampleBuffer {
@@ -526,6 +598,11 @@ - (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result
526598 return ;
527599 }
528600 _isRecording = YES ;
601+ _isRecordingPaused = NO ;
602+ _videoTimeOffset = CMTimeMake (0 , 1 );
603+ _audioTimeOffset = CMTimeMake (0 , 1 );
604+ _videoIsDisconnected = NO ;
605+ _audioIsDisconnected = NO ;
529606 result (nil );
530607 } else {
531608 _eventSink (@{@" event" : @" error" , @" errorDescription" : @" Video is already recording!" });
@@ -556,6 +633,16 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result {
556633 }
557634}
558635
636+ - (void )pauseVideoRecording {
637+ _isRecordingPaused = YES ;
638+ _videoIsDisconnected = YES ;
639+ _audioIsDisconnected = YES ;
640+ }
641+
642+ - (void )resumeVideoRecording {
643+ _isRecordingPaused = NO ;
644+ }
645+
559646- (void )startImageStreamWithMessenger : (NSObject <FlutterBinaryMessenger> *)messenger {
560647 if (!_isStreamingImages) {
561648 FlutterEventChannel *eventChannel =
@@ -608,6 +695,13 @@ - (BOOL)setupWriterForPath:(NSString *)path {
608695 nil ];
609696 _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
610697 outputSettings: videoSettings];
698+
699+ _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
700+ assetWriterInputPixelBufferAdaptorWithAssetWriterInput: _videoWriterInput
701+ sourcePixelBufferAttributes: @{
702+ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)
703+ }];
704+
611705 NSParameterAssert (_videoWriterInput);
612706 _videoWriterInput.expectsMediaDataInRealTime = YES ;
613707
@@ -777,6 +871,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
777871 } else if ([@" stopImageStream" isEqualToString: call.method]) {
778872 [_camera stopImageStream ];
779873 result (nil );
874+ } else if ([@" pauseVideoRecording" isEqualToString: call.method]) {
875+ [_camera pauseVideoRecording ];
876+ result (nil );
877+ } else if ([@" resumeVideoRecording" isEqualToString: call.method]) {
878+ [_camera resumeVideoRecording ];
879+ result (nil );
780880 } else {
781881 NSDictionary *argsMap = call.arguments ;
782882 NSUInteger textureId = ((NSNumber *)argsMap[@" textureId" ]).unsignedIntegerValue ;
0 commit comments