OLD | NEW |
---|---|
(Empty) | |
1 /** | |
2 * Copyright 2017 The WebRTC Project Authors. All rights reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #import "RTCFileVideoCapturer.h" | |
12 | |
13 #import "WebRTC/RTCLogging.h" | |
14 | |
15 @implementation RTCFileVideoCapturer { | |
16 AVAssetReader *_reader; | |
17 AVAssetReaderTrackOutput *_outTrack; | |
18 BOOL _capturerStopped; | |
19 CMTime _lastPresentationTime; | |
20 dispatch_queue_t _frameQueue; | |
21 } | |
22 | |
23 - (void)startCapturingFromFileNamed:(NSString *)nameOfFile { | |
24 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ | |
25 if (_reader && _reader.status == AVAssetReaderStatusReading) { | |
26 RTCLog("Capturer exists and reads anoth6er file. Start capture request fai led."); | |
magjed_webrtc
2017/06/01 13:44:37
nit: still anoth6er
daniela-webrtc
2017/06/01 14:13:27
The patch didn't upload properly :D Should be fixe
| |
27 return; | |
28 } | |
29 NSString *pathForFile = [self pathForFileName:nameOfFile]; | |
30 if (!pathForFile) { | |
31 RTCLog("File %@ not found in bundle", nameOfFile); | |
32 return; | |
33 } | |
34 | |
35 _lastPresentationTime = CMTimeMake(0, 0); | |
36 | |
37 NSURL *URLForFile = [NSURL fileURLWithPath:pathForFile]; | |
38 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:URLForFile options:nil]; | |
39 | |
40 NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; | |
41 NSError *error = nil; | |
42 _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; | |
43 if (error) { | |
44 RTCLog("File reader failed with error: %@", error); | |
45 return; | |
46 } | |
47 | |
48 NSDictionary *options = @{ | |
49 (NSString *) | |
50 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanar FullRange) | |
51 }; | |
52 _outTrack = [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstO bject | |
53 outputSettings:options]; | |
54 [_reader addOutput:_outTrack]; | |
55 | |
56 [_reader startReading]; | |
57 RTCLog(@"File capturer started reading"); | |
58 [self readNextBuffer]; | |
59 }); | |
60 } | |
61 | |
62 - (void)stopCapture { | |
63 _capturerStopped = YES; | |
64 RTCLog(@"File capturer stopped."); | |
65 } | |
66 | |
67 #pragma mark - Private | |
68 | |
69 - (nullable NSString *)pathForFileName:(NSString *)fileName { | |
70 NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; | |
71 if (nameComponents.count != 2) { | |
72 return nil; | |
73 } | |
74 | |
75 NSString *path = | |
76 [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameCompon ents[1]]; | |
77 return path; | |
78 } | |
79 | |
80 - (dispatch_queue_t)frameQueue { | |
81 if (!_frameQueue) { | |
82 _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATC H_QUEUE_SERIAL); | |
83 dispatch_set_target_queue(_frameQueue, | |
84 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_ BACKGROUND, 0)); | |
85 } | |
86 return _frameQueue; | |
87 } | |
88 | |
89 - (void)readNextBuffer { | |
90 if (_reader.status != AVAssetReaderStatusReading || _capturerStopped) { | |
91 [_reader cancelReading]; | |
92 _reader = nil; | |
93 return; | |
94 } | |
95 | |
96 CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer]; | |
97 if (!sampleBuffer) { | |
98 [self readNextBuffer]; | |
99 return; | |
100 } | |
101 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(s ampleBuffer) || | |
102 !CMSampleBufferDataIsReady(sampleBuffer)) { | |
103 [self readNextBuffer]; | |
104 return; | |
105 } | |
106 | |
107 [self publishSampleBuffer:sampleBuffer]; | |
108 } | |
109 | |
110 - (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer { | |
111 CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) ; | |
112 Float64 presentationDifference = | |
113 CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); | |
114 _lastPresentationTime = presentationTime; | |
115 int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PE R_SEC); | |
116 | |
117 __block dispatch_source_t timer = [self createStrictTimer]; | |
118 // Strict timer that will fire |presentationDifferenceRound| ns from now and n ever again. | |
119 dispatch_source_set_timer(timer, | |
120 dispatch_time(DISPATCH_TIME_NOW, presentationDiffere nceRound), | |
121 DISPATCH_TIME_FOREVER, | |
122 0); | |
123 dispatch_source_set_event_handler(timer, ^{ | |
124 dispatch_source_cancel(timer); | |
125 timer = nil; | |
126 | |
127 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); | |
128 if (!pixelBuffer) { | |
magjed_webrtc
2017/06/01 13:44:37
Maybe we can extract the CVPixelBufferRef and do t
daniela-webrtc
2017/06/01 14:13:27
We need to release the sampleBuffer after it was u
magjed_webrtc
2017/06/01 15:43:03
Can we use CVBufferRetain(pixelBuffer) in readNext
| |
129 CFRelease(sampleBuffer); | |
130 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ | |
131 [self readNextBuffer]; | |
132 }); | |
magjed_webrtc
2017/06/01 13:44:38
We should return here I guess?
daniela-webrtc
2017/06/01 14:13:27
Done.
| |
133 } | |
134 | |
135 NSTimeInterval timeStampSeconds = CACurrentMediaTime(); | |
136 int64_t timeStampNs = timeStampSeconds * NSEC_PER_SEC; | |
magjed_webrtc
2017/06/01 13:44:37
Should we use lroundf here?
daniela-webrtc
2017/06/01 14:13:27
Yes. NSTimeInteval is double.
| |
137 RTCVideoFrame *videoFrame = | |
138 [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeSt ampNs:timeStampNs]; | |
139 CFRelease(sampleBuffer); | |
magjed_webrtc
2017/06/01 13:44:37
I noticed we don't do 'CFRelease(sampleBuffer);' i
daniela-webrtc
2017/06/01 14:13:27
We need to explicitly release the sampleBuffer her
| |
140 | |
141 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) , ^{ | |
142 [self readNextBuffer]; | |
143 }); | |
144 | |
145 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; | |
146 }); | |
147 dispatch_activate(timer); | |
148 } | |
149 | |
150 - (dispatch_source_t)createStrictTimer { | |
151 dispatch_source_t timer = dispatch_source_create( | |
152 DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]); | |
153 return timer; | |
154 } | |
155 | |
156 - (void)dealloc { | |
157 [self stopCapture]; | |
158 } | |
159 | |
160 @end | |
OLD | NEW |