Index: webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
new file mode 100644 |
index 0000000000000000000000000000000000000000..fe0d33745290e4ff3f84bebf21c01ab925e6bfc9 |
--- /dev/null |
+++ b/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
@@ -0,0 +1,131 @@ |
+/** |
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved. |
+ * |
+ * Use of this source code is governed by a BSD-style license |
+ * that can be found in the LICENSE file in the root of the source |
+ * tree. An additional intellectual property rights grant can be found |
+ * in the file PATENTS. All contributing project authors may |
+ * be found in the AUTHORS file in the root of the source tree. |
+ */ |
+ |
+#import "RTCFileVideoCapturer.h" |
+ |
+#import "WebRTC/RTCLogging.h" |
+ |
+@implementation RTCFileVideoCapturer { |
+ AVAssetReader *_reader; |
+ CMTime _lastPresentationTime; |
+ dispatch_queue_t _frameQueue; |
+ dispatch_semaphore_t _frameSemaphore; |
+ BOOL _capturerStopped; |
sakal
2017/05/24 08:34:09
I think this should be a property declared as atom
daniela-webrtc
2017/06/01 13:08:57
Not necessarily. The worst thing that can happen i
|
+} |
+ |
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile { |
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ |
+ if (_reader && _reader.status == AVAssetReaderStatusReading) { |
+ RTCLog("Capturer exists and reads anoth6er file. Start capture request failed."); |
magjed_webrtc
2017/05/29 13:14:25
nit: anoth6er
daniela-webrtc
2017/06/01 13:08:57
Done.
|
+ return; |
+ } |
+ NSString *pathForFile = [self pathForFileName:nameOfFile]; |
+ if (!pathForFile) { |
+ RTCLog("File %@ not found in bundle", nameOfFile); |
+ return; |
+ } |
+ |
+ _lastPresentationTime = CMTimeMake(0, 0); |
+ _frameSemaphore = dispatch_semaphore_create(0); |
+ |
+ NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; |
+ AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; |
+ |
+ NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; |
+ NSError *error = nil; |
+ _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; |
+ if (error) { |
+ RTCLog("File reader failed with error: %@", error); |
+ return; |
+ } |
+ |
+ NSDictionary *options = @{ |
+ (NSString *) |
+ kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) |
magjed_webrtc
2017/05/29 13:14:25
Is it possible to use FullRange?
daniela-webrtc
2017/06/01 13:08:57
Yes. I've changed it
|
+ }; |
+ AVAssetReaderTrackOutput *outTrack = |
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject |
+ outputSettings:options]; |
+ [_reader addOutput:outTrack]; |
+ |
+ [_reader startReading]; |
+ RTCLog(@"File capturer started reading"); |
+ while (_reader.status == AVAssetReaderStatusReading && !_capturerStopped) { |
+ CMSampleBufferRef sampleBuffer = [outTrack copyNextSampleBuffer]; |
+ if (sampleBuffer) { |
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |
+ !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(sampleBuffer)) { |
+ continue; |
+ } |
+ |
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
+ if (pixelBuffer == nil) { |
+ continue; |
+ } |
+ |
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); |
+ Float64 presentationDifference = |
+ CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); |
+ int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC); |
+ _lastPresentationTime = presentationTime; |
+ |
+ // dispatch with delay, we want to achieve a real time play. |
magjed_webrtc
2017/05/29 13:14:25
nit: Begin sentence with capital letter.
|
+ dispatch_after( |
+ dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound), [self frameQueue], ^{ |
+ int64_t timeStampNs = CACurrentMediaTime() * NSEC_PER_SEC; |
+ RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer |
+ rotation:0 |
+ timeStampNs:timeStampNs]; |
+ CFRelease(sampleBuffer); |
+ |
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
+ dispatch_semaphore_signal(_frameSemaphore); |
magjed_webrtc
2017/05/29 13:14:25
I would like to only use one frame queue and avoid
daniela-webrtc
2017/06/01 13:08:57
I've removed the semaphore and the blocking and no
magjed_webrtc
2017/06/01 13:44:37
Sure, we can have a separate frameQueue to publish
|
+ }); |
+ |
+ dispatch_semaphore_wait(_frameSemaphore, DISPATCH_TIME_FOREVER); |
+ } |
+ } |
+ [_reader cancelReading]; |
+ _reader = nil; |
+ }); |
+} |
+ |
+- (void)stopCapture { |
+ _capturerStopped = YES; |
+ RTCLog(@"File capturer stopped."); |
+} |
+ |
+#pragma mark - Private |
+ |
+- (nullable NSString *)pathForFileName:(NSString *)fileName { |
+ NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; |
+ if (nameComponents.count != 2) { |
+ return nil; |
+ } |
+ |
+ NSString *path = |
+ [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]]; |
+ return path; |
+} |
+ |
+- (dispatch_queue_t)frameQueue { |
+ if (!_frameQueue) { |
+ _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL); |
+ dispatch_set_target_queue(_frameQueue, |
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)); |
+ } |
+ return _frameQueue; |
+} |
+ |
+- (void)dealloc { |
+ [self stopCapture]; |
+} |
+ |
+@end |