OLD | NEW |
(Empty) | |
| 1 /** |
| 2 * Copyright 2017 The WebRTC Project Authors. All rights reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #import "RTCFileVideoCapturer.h" |
| 12 |
| 13 #import "WebRTC/RTCLogging.h" |
| 14 |
| 15 @implementation RTCFileVideoCapturer { |
| 16 AVAssetReader *_reader; |
| 17 AVAssetReaderTrackOutput *_outTrack; |
| 18 BOOL _capturerStopped; |
| 19 CMTime _lastPresentationTime; |
| 20 dispatch_queue_t _frameQueue; |
| 21 } |
| 22 |
| 23 - (void)startCapturingFromFileNamed:(NSString *)nameOfFile { |
| 24 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
^{ |
| 25 if (_reader && _reader.status == AVAssetReaderStatusReading) { |
| 26 RTCLog("Capturer exists and reads another file. Start capture request fail
ed."); |
| 27 return; |
| 28 } |
| 29 NSString *pathForFile = [self pathForFileName:nameOfFile]; |
| 30 if (!pathForFile) { |
| 31 RTCLog("File %@ not found in bundle", nameOfFile); |
| 32 return; |
| 33 } |
| 34 |
| 35 _lastPresentationTime = CMTimeMake(0, 0); |
| 36 |
| 37 NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; |
| 38 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; |
| 39 |
| 40 NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; |
| 41 NSError *error = nil; |
| 42 _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; |
| 43 if (error) { |
| 44 RTCLog("File reader failed with error: %@", error); |
| 45 return; |
| 46 } |
| 47 |
| 48 NSDictionary *options = @{ |
| 49 (NSString *) |
| 50 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar
FullRange) |
| 51 }; |
| 52 _outTrack = [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstO
bject |
| 53 outputSettings:options]; |
| 54 [_reader addOutput:_outTrack]; |
| 55 |
| 56 [_reader startReading]; |
| 57 RTCLog(@"File capturer started reading"); |
| 58 [self readNextBuffer]; |
| 59 }); |
| 60 } |
| 61 |
| 62 - (void)stopCapture { |
| 63 _capturerStopped = YES; |
| 64 RTCLog(@"File capturer stopped."); |
| 65 } |
| 66 |
| 67 #pragma mark - Private |
| 68 |
| 69 - (nullable NSString *)pathForFileName:(NSString *)fileName { |
| 70 NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; |
| 71 if (nameComponents.count != 2) { |
| 72 return nil; |
| 73 } |
| 74 |
| 75 NSString *path = |
| 76 [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameCompon
ents[1]]; |
| 77 return path; |
| 78 } |
| 79 |
| 80 - (dispatch_queue_t)frameQueue { |
| 81 if (!_frameQueue) { |
| 82 _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATC
H_QUEUE_SERIAL); |
| 83 dispatch_set_target_queue(_frameQueue, |
| 84 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_
BACKGROUND, 0)); |
| 85 } |
| 86 return _frameQueue; |
| 87 } |
| 88 |
| 89 - (void)readNextBuffer { |
| 90 if (_reader.status != AVAssetReaderStatusReading || _capturerStopped) { |
| 91 [_reader cancelReading]; |
| 92 _reader = nil; |
| 93 return; |
| 94 } |
| 95 |
| 96 CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer]; |
| 97 if (!sampleBuffer) { |
| 98 [self readNextBuffer]; |
| 99 return; |
| 100 } |
| 101 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s
ampleBuffer) || |
| 102 !CMSampleBufferDataIsReady(sampleBuffer)) { |
| 103 [self readNextBuffer]; |
| 104 return; |
| 105 } |
| 106 |
| 107 [self publishSampleBuffer:sampleBuffer]; |
| 108 } |
| 109 |
| 110 - (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer { |
| 111 CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
; |
| 112 Float64 presentationDifference = |
| 113 CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); |
| 114 _lastPresentationTime = presentationTime; |
| 115 int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PE
R_SEC); |
| 116 |
| 117 __block dispatch_source_t timer = [self createStrictTimer]; |
| 118 // Strict timer that will fire |presentationDifferenceRound| ns from now and n
ever again. |
| 119 dispatch_source_set_timer(timer, |
| 120 dispatch_time(DISPATCH_TIME_NOW, presentationDiffere
nceRound), |
| 121 DISPATCH_TIME_FOREVER, |
| 122 0); |
| 123 dispatch_source_set_event_handler(timer, ^{ |
| 124 dispatch_source_cancel(timer); |
| 125 timer = nil; |
| 126 |
| 127 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
| 128 if (!pixelBuffer) { |
| 129 CFRelease(sampleBuffer); |
| 130 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,
0), ^{ |
| 131 [self readNextBuffer]; |
| 132 }); |
| 133 return; |
| 134 } |
| 135 |
| 136 NSTimeInterval timeStampSeconds = CACurrentMediaTime(); |
| 137 int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); |
| 138 RTCVideoFrame *videoFrame = |
| 139 [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeSt
ampNs:timeStampNs]; |
| 140 CFRelease(sampleBuffer); |
| 141 |
| 142 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
, ^{ |
| 143 [self readNextBuffer]; |
| 144 }); |
| 145 |
| 146 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
| 147 }); |
| 148 dispatch_activate(timer); |
| 149 } |
| 150 |
| 151 - (dispatch_source_t)createStrictTimer { |
| 152 dispatch_source_t timer = dispatch_source_create( |
| 153 DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]); |
| 154 return timer; |
| 155 } |
| 156 |
| 157 - (void)dealloc { |
| 158 [self stopCapture]; |
| 159 } |
| 160 |
| 161 @end |
OLD | NEW |