OLD | NEW |
---|---|
(Empty) | |
1 /** | |
2 * Copyright 2017 The WebRTC Project Authors. All rights reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCFileVideoCapturer.h" | |
12 | |
13 #import "WebRTC/RTCLogging.h" | |
14 | |
15 @implementation RTCFileVideoCapturer { | |
16 AVAssetReader *_reader; | |
sakal
2017/05/19 11:53:16
This seems to be accessed from multiple threads. I
daniela-webrtc
2017/05/19 13:15:09
This kind of usage with dispatch queues should be
sakal_google.com
2017/05/19 13:20:04
The main thing I am worried about is calling start
| |
17 CMTime _lastPresentationTime; | |
18 dispatch_queue_t _frameQueue; | |
19 dispatch_semaphore_t _frameSemaphore; | |
20 } | |
21 | |
22 - (void)startCapturingFromFileNamed:(NSString *)nameOfFile { | |
23 if (_reader && _reader.status == AVAssetReaderStatusReading) { | |
24 RTCLog("Capturer exists and reads anoter file. Start capture request failed. "); | |
kthelgason
2017/05/19 12:18:47
typo nit: reads another file.
| |
25 return; | |
26 } | |
27 | |
28 _lastPresentationTime = CMTimeMake(0, 0); | |
29 _frameSemaphore = dispatch_semaphore_create(0); | |
30 | |
31 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ | |
32 NSString *pathForFile = [self pathForFileName:nameOfFile]; | |
33 if (!pathForFile) { | |
34 RTCLog("File %@ not found in bundle", nameOfFile); | |
35 return; | |
36 } | |
37 | |
38 NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; | |
39 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; | |
40 | |
41 NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; | |
42 NSError *error = nil; | |
43 _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; | |
44 if (error) { | |
45 RTCLog("File reader failed with error: %@", error); | |
46 return; | |
47 } | |
48 | |
49 NSDictionary *options = @{ | |
50 (NSString *) | |
51 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar VideoRange) | |
52 }; | |
53 AVAssetReaderTrackOutput *outTrack = | |
54 [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject | |
55 outputSettings:options]; | |
56 [_reader addOutput:outTrack]; | |
57 | |
58 [_reader startReading]; | |
59 while (_reader.status == AVAssetReaderStatusReading) { | |
60 CMSampleBufferRef sampleBuffer = [outTrack copyNextSampleBuffer]; | |
61 if (sampleBuffer) { | |
62 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || | |
63 !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(s ampleBuffer)) { | |
64 continue; | |
65 } | |
66 | |
67 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer ); | |
68 if (pixelBuffer == nil) { | |
69 continue; | |
70 } | |
71 | |
72 CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleB uffer); | |
73 Float64 presentationDifference = | |
74 CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationT ime)); | |
75 int64_t presentationDifferenceRound = lroundf(presentationDifference * N SEC_PER_SEC); | |
76 _lastPresentationTime = presentationTime; | |
77 | |
78 // dispatch with delay, we want to achieve a real time play. | |
79 dispatch_after( | |
80 dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound), [self frameQueue], ^{ | |
81 int64_t timeStampNs = CACurrentMediaTime() * NSEC_PER_SEC; | |
82 RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBu ffer:pixelBuffer | |
83 rota tion:0 | |
84 timeSta mpNs:timeStampNs]; | |
85 CFRelease(sampleBuffer); | |
kthelgason
2017/05/19 12:18:47
no need to release the pixel buffer?
daniela-webrtc
2017/05/19 13:15:09
No. The documentation for CMSampleBufferGetImageBu
| |
86 | |
87 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | |
88 dispatch_semaphore_signal(_frameSemaphore); | |
89 }); | |
90 | |
91 dispatch_semaphore_wait(_frameSemaphore, DISPATCH_TIME_FOREVER); | |
92 } | |
93 } | |
94 }); | |
95 } | |
96 | |
97 - (void)stopCapture { | |
98 [_reader cancelReading]; | |
99 _reader = nil; | |
100 } | |
101 | |
102 #pragma mark - Private | |
103 | |
104 - (nullable NSString *)pathForFileName:(NSString *)fileName { | |
105 NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; | |
106 if (nameComponents.count != 2) { | |
107 return nil; | |
108 } | |
109 | |
110 NSString *path = | |
111 [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameCompon ents[1]]; | |
112 return path; | |
113 } | |
114 | |
115 - (dispatch_queue_t)frameQueue { | |
116 if (!_frameQueue) { | |
117 _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATC H_QUEUE_SERIAL); | |
118 dispatch_set_target_queue(_frameQueue, | |
119 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ BACKGROUND, 0)); | |
120 } | |
121 return _frameQueue; | |
122 } | |
123 | |
124 - (void)dealoc { | |
sakal
2017/05/19 11:53:16
typo
| |
125 [self stopCapture]; | |
126 } | |
127 | |
128 @end | |
OLD | NEW |