Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(122)

Side by Side Diff: webrtc/modules/video_processing/test/video_processing_unittest.cc

Issue 2278883002: Move MutableDataY{,U,V} methods to I420Buffer only. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Update android capture and decoder code. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/modules/video_processing/test/video_processing_unittest.h" 11 #include "webrtc/modules/video_processing/test/video_processing_unittest.h"
12 12
13 #include <gflags/gflags.h> 13 #include <gflags/gflags.h>
14 14
15 #include <memory> 15 #include <memory>
16 #include <string> 16 #include <string>
17 17
18 #include "webrtc/base/keep_ref_until_done.h"
18 #include "webrtc/base/timeutils.h" 19 #include "webrtc/base/timeutils.h"
19 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" 20 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
20 #include "webrtc/test/testsupport/fileutils.h" 21 #include "webrtc/test/testsupport/fileutils.h"
21 22
22 namespace webrtc { 23 namespace webrtc {
23 24
24 namespace { 25 namespace {
25 26
26 // Define command line flag 'gen_files' (default value: false). 27 // Define command line flag 'gen_files' (default value: false).
27 DEFINE_bool(gen_files, false, "Output files for visual inspection."); 28 DEFINE_bool(gen_files, false, "Output files for visual inspection.");
28 29
29 } // namespace 30 } // namespace
30 31
31 static void PreprocessFrameAndVerify(const VideoFrame& source, 32 static void PreprocessFrameAndVerify(const VideoFrame& source,
32 int target_width, 33 int target_width,
33 int target_height, 34 int target_height,
34 VideoProcessing* vpm, 35 VideoProcessing* vpm,
35 const VideoFrame* out_frame); 36 const VideoFrame* out_frame);
36 static void CropFrame(const uint8_t* source_data, 37 rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
37 int source_width, 38 const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
38 int source_height, 39 int source_width,
39 int offset_x, 40 int source_height,
40 int offset_y, 41 int offset_x,
41 int cropped_width, 42 int offset_y,
42 int cropped_height, 43 int cropped_width,
43 VideoFrame* cropped_frame); 44 int cropped_height);
44 // The |source_data| is cropped and scaled to |target_width| x |target_height|, 45 // The |source_data| is cropped and scaled to |target_width| x |target_height|,
45 // and then scaled back to the expected cropped size. |expected_psnr| is used to 46 // and then scaled back to the expected cropped size. |expected_psnr| is used to
46 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR 47 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
47 // verified under the same conditions. 48 // verified under the same conditions.
48 static void TestSize(const VideoFrame& source_frame, 49 static void TestSize(
49 const VideoFrame& cropped_source_frame, 50 const VideoFrame& source_frame,
50 int target_width, 51 const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
51 int target_height, 52 int target_width,
52 double expected_psnr, 53 int target_height,
53 VideoProcessing* vpm); 54 double expected_psnr,
55 VideoProcessing* vpm);
54 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, 56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
55 const VideoFrame& processed); 57 const VideoFrame& processed);
56 58
57 VideoProcessingTest::VideoProcessingTest() 59 VideoProcessingTest::VideoProcessingTest()
58 : vp_(NULL), 60 : vp_(NULL),
59 source_file_(NULL), 61 source_file_(NULL),
60 width_(352), 62 width_(352),
61 half_width_((width_ + 1) / 2), 63 half_width_((width_ + 1) / 2),
62 height_(288), 64 height_(288),
63 size_y_(width_ * height_), 65 size_y_(width_ * height_),
64 size_uv_(half_width_ * ((height_ + 1) / 2)), 66 size_uv_(half_width_ * ((height_ + 1) / 2)),
65 frame_length_(CalcBufferSize(kI420, width_, height_)) {} 67 frame_length_(CalcBufferSize(kI420, width_, height_)) {}
66 68
67 void VideoProcessingTest::SetUp() { 69 void VideoProcessingTest::SetUp() {
68 vp_ = VideoProcessing::Create(); 70 vp_ = VideoProcessing::Create();
69 ASSERT_TRUE(vp_ != NULL); 71 ASSERT_TRUE(vp_ != NULL);
70 72
71 video_frame_.CreateEmptyFrame(width_, height_, width_, 73 rtc::scoped_refptr<webrtc::I420Buffer> buffer =
72 half_width_, half_width_); 74 I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
75
73 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. 76 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
74 memset(video_frame_.video_frame_buffer()->MutableDataY(), 0, 77 buffer->InitializeData();
75 video_frame_.allocated_size(kYPlane)); 78
76 memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
77 video_frame_.allocated_size(kUPlane));
78 memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
79 video_frame_.allocated_size(kVPlane));
80 const std::string video_file = 79 const std::string video_file =
81 webrtc::test::ResourcePath("foreman_cif", "yuv"); 80 webrtc::test::ResourcePath("foreman_cif", "yuv");
82 source_file_ = fopen(video_file.c_str(), "rb"); 81 source_file_ = fopen(video_file.c_str(), "rb");
83 ASSERT_TRUE(source_file_ != NULL) 82 ASSERT_TRUE(source_file_ != NULL)
84 << "Cannot read source file: " + video_file + "\n"; 83 << "Cannot read source file: " + video_file + "\n";
85 } 84 }
86 85
87 void VideoProcessingTest::TearDown() { 86 void VideoProcessingTest::TearDown() {
88 if (source_file_ != NULL) { 87 if (source_file_ != NULL) {
89 ASSERT_EQ(0, fclose(source_file_)); 88 ASSERT_EQ(0, fclose(source_file_));
(...skipping 12 matching lines...) Expand all
102 vp_->EnableTemporalDecimation(false); 101 vp_->EnableTemporalDecimation(false);
103 int resolution = 100; 102 int resolution = 100;
104 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); 103 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15));
105 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); 104 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30));
106 // Disable spatial sampling. 105 // Disable spatial sampling.
107 vp_->SetInputFrameResampleMode(kNoRescaling); 106 vp_->SetInputFrameResampleMode(kNoRescaling);
108 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); 107 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30));
109 VideoFrame* out_frame = NULL; 108 VideoFrame* out_frame = NULL;
110 // Set rescaling => output frame != NULL. 109 // Set rescaling => output frame != NULL.
111 vp_->SetInputFrameResampleMode(kFastRescaling); 110 vp_->SetInputFrameResampleMode(kFastRescaling);
112 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, 111
113 out_frame); 112 rtc::scoped_refptr<webrtc::I420Buffer> buffer =
113 I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
114
115 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
116 buffer->InitializeData();
117 VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
118
119 PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
114 // No rescaling=> output frame = NULL. 120 // No rescaling=> output frame = NULL.
115 vp_->SetInputFrameResampleMode(kNoRescaling); 121 vp_->SetInputFrameResampleMode(kNoRescaling);
116 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); 122 EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr);
117 } 123 }
118 124
119 #if defined(WEBRTC_IOS) 125 #if defined(WEBRTC_IOS)
120 TEST_F(VideoProcessingTest, DISABLED_Resampler) { 126 TEST_F(VideoProcessingTest, DISABLED_Resampler) {
121 #else 127 #else
122 TEST_F(VideoProcessingTest, Resampler) { 128 TEST_F(VideoProcessingTest, Resampler) {
123 #endif 129 #endif
124 enum { NumRuns = 1 }; 130 enum { NumRuns = 1 };
125 131
126 int64_t min_runtime = 0; 132 int64_t min_runtime = 0;
127 int64_t total_runtime = 0; 133 int64_t total_runtime = 0;
128 134
129 rewind(source_file_); 135 rewind(source_file_);
130 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; 136 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n";
131 137
132 // no temporal decimation 138 // no temporal decimation
133 vp_->EnableTemporalDecimation(false); 139 vp_->EnableTemporalDecimation(false);
134 140
135 // Reading test frame 141 // Reading test frame
136 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); 142 rtc::scoped_refptr<webrtc::I420Buffer> buffer =
137 ASSERT_EQ(frame_length_, 143 I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
138 fread(video_buffer.get(), 1, frame_length_, source_file_)); 144
139 // Using ConvertToI420 to add stride to the image. 145 ASSERT_EQ(static_cast<size_t>(size_y_),
140 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, 146 fread(buffer->MutableDataY(), 1, size_y_, source_file_));
141 0, kVideoRotation_0, &video_frame_)); 147 ASSERT_EQ(static_cast<size_t>(size_uv_),
142 // Cropped source frame that will contain the expected visible region. 148 fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
143 VideoFrame cropped_source_frame; 149 ASSERT_EQ(static_cast<size_t>(size_uv_),
144 cropped_source_frame.CopyFrame(video_frame_); 150 fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
145 151
146 for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) { 152 for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
147 // Initiate test timer. 153 // Initiate test timer.
148 const int64_t time_start = rtc::TimeNanos(); 154 const int64_t time_start = rtc::TimeNanos();
149 155
150 // Init the sourceFrame with a timestamp. 156 // Init the sourceFrame with a timestamp.
151 int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec; 157 int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
152 video_frame_.set_render_time_ms(time_start_ms); 158 VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
153 video_frame_.set_timestamp(time_start_ms * 90); 159 webrtc::kVideoRotation_0);
154 160
155 // Test scaling to different sizes: source is of |width|/|height| = 352/288. 161 // Test scaling to different sizes: source is of |width|/|height| = 352/288.
156 // Pure scaling: 162 // Pure scaling:
157 TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_); 163 TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
158 TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_); 164 TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
159 // No resampling: 165 // No resampling:
160 TestSize(video_frame_, video_frame_, width_, height_, -1, vp_); 166 TestSize(video_frame, buffer, width_, height_, -1, vp_);
161 TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_); 167 TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
162 168
163 // Scaling and cropping. The cropped source frame is the largest center 169 // Scaling and cropping. The cropped source frame is the largest center
164 // aligned region that can be used from the source while preserving aspect 170 // aligned region that can be used from the source while preserving aspect
165 // ratio. 171 // ratio.
166 CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176, 172 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
167 &cropped_source_frame); 173 100, 50, 24.0, vp_);
168 TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_); 174 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
169 175 400, 256, 31.3, vp_);
170 CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225, 176 TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
171 &cropped_source_frame); 177 480, 640, 32.15, vp_);
172 TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_); 178 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
173 179 960, 720, 32.2, vp_);
174 CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288, 180 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
175 &cropped_source_frame); 181 1280, 720, 32.15, vp_);
176 TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
177
178 CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
179 &cropped_source_frame);
180 TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
181
182 CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
183 &cropped_source_frame);
184 TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
185 182
186 // Upsampling to odd size. 183 // Upsampling to odd size.
187 CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233, 184 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
188 &cropped_source_frame); 185 501, 333, 32.05, vp_);
189 TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
190 // Downsample to odd size. 186 // Downsample to odd size.
191 CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219, 187 TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
192 &cropped_source_frame); 188 281, 175, 29.3, vp_);
193 TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
194 189
195 // Stop timer. 190 // Stop timer.
196 const int64_t runtime = 191 const int64_t runtime =
197 (rtc::TimeNanos() - time_start) / rtc::kNumNanosecsPerMicrosec; 192 (rtc::TimeNanos() - time_start) / rtc::kNumNanosecsPerMicrosec;
198 if (runtime < min_runtime || run_idx == 0) { 193 if (runtime < min_runtime || run_idx == 0) {
199 min_runtime = runtime; 194 min_runtime = runtime;
200 } 195 }
201 total_runtime += runtime; 196 total_runtime += runtime;
202 } 197 }
203 198
(...skipping 18 matching lines...) Expand all
222 } 217 }
223 218
224 // Verify the resampled frame. 219 // Verify the resampled frame.
225 EXPECT_TRUE(out_frame != NULL); 220 EXPECT_TRUE(out_frame != NULL);
226 EXPECT_EQ(source.render_time_ms(), (out_frame)->render_time_ms()); 221 EXPECT_EQ(source.render_time_ms(), (out_frame)->render_time_ms());
227 EXPECT_EQ(source.timestamp(), (out_frame)->timestamp()); 222 EXPECT_EQ(source.timestamp(), (out_frame)->timestamp());
228 EXPECT_EQ(target_width, (out_frame)->width()); 223 EXPECT_EQ(target_width, (out_frame)->width());
229 EXPECT_EQ(target_height, (out_frame)->height()); 224 EXPECT_EQ(target_height, (out_frame)->height());
230 } 225 }
231 226
232 void CropFrame(const uint8_t* source_data, 227 rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
233 int source_width, 228 const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
234 int source_height, 229 int source_width,
235 int offset_x, 230 int source_height,
236 int offset_y, 231 int offset_x,
237 int cropped_width, 232 int offset_y,
238 int cropped_height, 233 int cropped_width,
239 VideoFrame* cropped_frame) { 234 int cropped_height) {
240 cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width, 235 // Force even.
241 (cropped_width + 1) / 2, 236 offset_x &= 1;
242 (cropped_width + 1) / 2); 237 offset_y &= 1;
243 EXPECT_EQ(0, 238
244 ConvertToI420(kI420, source_data, offset_x, offset_y, source_width, 239 size_t y_start = offset_x + offset_y * source_buffer->StrideY();
245 source_height, 0, kVideoRotation_0, cropped_frame)); 240 size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
241 size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
242
243 return rtc::scoped_refptr<VideoFrameBuffer>(
244 new rtc::RefCountedObject<WrappedI420Buffer>(
245 cropped_width, cropped_height, source_buffer->DataY() + y_start,
246 source_buffer->StrideY(), source_buffer->DataU() + u_start,
247 source_buffer->StrideU(), source_buffer->DataV() + v_start,
248 source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
246 } 249 }
247 250
248 void TestSize(const VideoFrame& source_frame, 251 void TestSize(const VideoFrame& source_frame,
249 const VideoFrame& cropped_source_frame, 252 const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
250 int target_width, 253 int target_width,
251 int target_height, 254 int target_height,
252 double expected_psnr, 255 double expected_psnr,
253 VideoProcessing* vpm) { 256 VideoProcessing* vpm) {
254 // Resample source_frame to out_frame. 257 // Resample source_frame to out_frame.
255 VideoFrame* out_frame = NULL; 258 VideoFrame* out_frame = NULL;
256 vpm->SetInputFrameResampleMode(kBox); 259 vpm->SetInputFrameResampleMode(kBox);
257 PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm, 260 PreprocessFrameAndVerify(source_frame, target_width, target_height, vpm,
258 out_frame); 261 out_frame);
259 if (out_frame == NULL) 262 if (out_frame == NULL)
260 return; 263 return;
261 WriteProcessedFrameForVisualInspection(source_frame, *out_frame); 264 WriteProcessedFrameForVisualInspection(source_frame, *out_frame);
262 265
263 // Scale |resampled_source_frame| back to the source scale. 266 // Scale |resampled_source_frame| back to the source scale.
264 VideoFrame resampled_source_frame; 267 VideoFrame resampled_source_frame;
265 resampled_source_frame.CopyFrame(*out_frame); 268 resampled_source_frame.CopyFrame(*out_frame);
266 PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(), 269 PreprocessFrameAndVerify(resampled_source_frame,
267 cropped_source_frame.height(), vpm, out_frame); 270 cropped_source_buffer->width(),
271 cropped_source_buffer->height(), vpm, out_frame);
268 WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame); 272 WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
269 273
270 // Compute PSNR against the cropped source frame and check expectation. 274 // Compute PSNR against the cropped source frame and check expectation.
271 double psnr = I420PSNR(&cropped_source_frame, out_frame); 275 double psnr =
276 I420PSNR(cropped_source_buffer, out_frame->video_frame_buffer());
272 EXPECT_GT(psnr, expected_psnr); 277 EXPECT_GT(psnr, expected_psnr);
273 printf( 278 printf(
274 "PSNR: %f. PSNR is between source of size %d %d, and a modified " 279 "PSNR: %f. PSNR is between source of size %d %d, and a modified "
275 "source which is scaled down/up to: %d %d, and back to source size \n", 280 "source which is scaled down/up to: %d %d, and back to source size \n",
276 psnr, source_frame.width(), source_frame.height(), target_width, 281 psnr, source_frame.width(), source_frame.height(), target_width,
277 target_height); 282 target_height);
278 } 283 }
279 284
280 void WriteProcessedFrameForVisualInspection(const VideoFrame& source, 285 void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
281 const VideoFrame& processed) { 286 const VideoFrame& processed) {
282 // Skip if writing to files is not enabled. 287 // Skip if writing to files is not enabled.
283 if (!FLAGS_gen_files) 288 if (!FLAGS_gen_files)
284 return; 289 return;
285 // Write the processed frame to file for visual inspection. 290 // Write the processed frame to file for visual inspection.
286 std::ostringstream filename; 291 std::ostringstream filename;
287 filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width() 292 filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width()
288 << "x" << source.height() << "_to_" << processed.width() << "x" 293 << "x" << source.height() << "_to_" << processed.width() << "x"
289 << processed.height() << "_30Hz_P420.yuv"; 294 << processed.height() << "_30Hz_P420.yuv";
290 std::cout << "Watch " << filename.str() << " and verify that it is okay." 295 std::cout << "Watch " << filename.str() << " and verify that it is okay."
291 << std::endl; 296 << std::endl;
292 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); 297 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
293 if (PrintVideoFrame(processed, stand_alone_file) < 0) 298 if (PrintVideoFrame(processed, stand_alone_file) < 0)
294 std::cerr << "Failed to write: " << filename.str() << std::endl; 299 std::cerr << "Failed to write: " << filename.str() << std::endl;
295 if (stand_alone_file) 300 if (stand_alone_file)
296 fclose(stand_alone_file); 301 fclose(stand_alone_file);
297 } 302 }
298 303
299 } // namespace webrtc 304 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698