Index: webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
new file mode 100644 |
index 0000000000000000000000000000000000000000..0a3114b138b52f40a8bc7cab46a475468de66a29 |
--- /dev/null |
+++ b/webrtc/sdk/objc/Framework/Classes/RTCFileVideoCapturer.m |
@@ -0,0 +1,128 @@ |
+/** |
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved. |
+ * |
+ * Use of this source code is governed by a BSD-style license |
+ * that can be found in the LICENSE file in the root of the source |
+ * tree. An additional intellectual property rights grant can be found |
+ * in the file PATENTS. All contributing project authors may |
+ * be found in the AUTHORS file in the root of the source tree. |
+ */ |
+ |
+#import "RTCFileVideoCapturer.h" |
+ |
+#import "WebRTC/RTCLogging.h" |
+ |
+@implementation RTCFileVideoCapturer { |
+ AVAssetReader *_reader; |
sakal
2017/05/19 11:53:16
This seems to be accessed from multiple threads. I
daniela-webrtc
2017/05/19 13:15:09
This kind of usage with dispatch queues should be
sakal_google.com
2017/05/19 13:20:04
The main thing I am worried about is calling start
|
+ CMTime _lastPresentationTime; |
+ dispatch_queue_t _frameQueue; |
+ dispatch_semaphore_t _frameSemaphore; |
+} |
+ |
+- (void)startCapturingFromFileNamed:(NSString *)nameOfFile { |
+ if (_reader && _reader.status == AVAssetReaderStatusReading) { |
+ RTCLog("Capturer exists and reads anoter file. Start capture request failed."); |
kthelgason
2017/05/19 12:18:47
typo nit: reads another file.
|
+ return; |
+ } |
+ |
+ _lastPresentationTime = CMTimeMake(0, 0); |
+ _frameSemaphore = dispatch_semaphore_create(0); |
+ |
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ |
+ NSString *pathForFile = [self pathForFileName:nameOfFile]; |
+ if (!pathForFile) { |
+ RTCLog("File %@ not found in bundle", nameOfFile); |
+ return; |
+ } |
+ |
+ NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; |
+ AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; |
+ |
+ NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; |
+ NSError *error = nil; |
+ _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; |
+ if (error) { |
+ RTCLog("File reader failed with error: %@", error); |
+ return; |
+ } |
+ |
+ NSDictionary *options = @{ |
+ (NSString *) |
+ kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) |
+ }; |
+ AVAssetReaderTrackOutput *outTrack = |
+ [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject |
+ outputSettings:options]; |
+ [_reader addOutput:outTrack]; |
+ |
+ [_reader startReading]; |
+ while (_reader.status == AVAssetReaderStatusReading) { |
+ CMSampleBufferRef sampleBuffer = [outTrack copyNextSampleBuffer]; |
+ if (sampleBuffer) { |
+ if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |
+ !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(sampleBuffer)) { |
+ continue; |
+ } |
+ |
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
+ if (pixelBuffer == nil) { |
+ continue; |
+ } |
+ |
+ CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); |
+ Float64 presentationDifference = |
+ CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); |
+ int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC); |
+ _lastPresentationTime = presentationTime; |
+ |
+ // dispatch with delay, we want to achieve a real time play. |
+ dispatch_after( |
+ dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound), [self frameQueue], ^{ |
+ int64_t timeStampNs = CACurrentMediaTime() * NSEC_PER_SEC; |
+ RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer |
+ rotation:0 |
+ timeStampNs:timeStampNs]; |
+ CFRelease(sampleBuffer); |
kthelgason
2017/05/19 12:18:47
no need to release the pixel buffer?
daniela-webrtc
2017/05/19 13:15:09
No. The documentation for CMSampleBufferGetImageBu
|
+ |
+ [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; |
+ dispatch_semaphore_signal(_frameSemaphore); |
+ }); |
+ |
+ dispatch_semaphore_wait(_frameSemaphore, DISPATCH_TIME_FOREVER); |
+ } |
+ } |
+ }); |
+} |
+ |
+- (void)stopCapture { |
+ [_reader cancelReading]; |
+ _reader = nil; |
+} |
+ |
+#pragma mark - Private |
+ |
+- (nullable NSString *)pathForFileName:(NSString *)fileName { |
+ NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; |
+ if (nameComponents.count != 2) { |
+ return nil; |
+ } |
+ |
+ NSString *path = |
+ [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]]; |
+ return path; |
+} |
+ |
+- (dispatch_queue_t)frameQueue { |
+ if (!_frameQueue) { |
+ _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL); |
+ dispatch_set_target_queue(_frameQueue, |
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)); |
+ } |
+ return _frameQueue; |
+} |
+ |
+- (void)dealoc { |
sakal
2017/05/19 11:53:16
typo
|
+ [self stopCapture]; |
+} |
+ |
+@end |