Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(10)

Side by Side Diff: webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc

Issue 1952443002: [H264][Simulcast] Implement the simulcast logic for h264 encoder Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Fix a few issues with the patch Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
11 11
12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" 12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h"
13 13
14 #include <algorithm>
14 #include <limits> 15 #include <limits>
15 16
17 #include "webrtc/common_video/libyuv/include/scaler.h"
16 #include "third_party/openh264/src/codec/api/svc/codec_api.h" 18 #include "third_party/openh264/src/codec/api/svc/codec_api.h"
17 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" 19 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h"
18 #include "third_party/openh264/src/codec/api/svc/codec_def.h" 20 #include "third_party/openh264/src/codec/api/svc/codec_def.h"
19 21
20 #include "webrtc/base/checks.h" 22 #include "webrtc/base/checks.h"
21 #include "webrtc/base/logging.h" 23 #include "webrtc/base/logging.h"
22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" 24 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
23 #include "webrtc/system_wrappers/include/metrics.h" 25 #include "webrtc/system_wrappers/include/metrics.h"
26 #include "webrtc/video_frame.h"
24 27
25 namespace webrtc { 28 namespace webrtc {
26 29
27 namespace { 30 namespace {
28 31
29 const bool kOpenH264EncoderDetailedLogging = false; 32 const bool kOpenH264EncoderDetailedLogging = false;
30 33
31 // Used by histograms. Values of entries should not be changed. 34 // Used by histograms. Values of entries should not be changed.
32 enum H264EncoderImplEvent { 35 enum H264EncoderImplEvent {
33 kH264EncoderEventInit = 0, 36 kH264EncoderEventInit = 0,
34 kH264EncoderEventError = 1, 37 kH264EncoderEventError = 1,
35 kH264EncoderEventMax = 16, 38 kH264EncoderEventMax = 16,
36 }; 39 };
37 40
38 int NumberOfThreads(int width, int height, int number_of_cores) { 41 int NumberOfThreads(int width, int height, int number_of_cores) {
39 // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac, 42 // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac,
40 // see crbug.com/583348. Until further investigated, only use one thread. 43 // see crbug.com/583348. Until further investigated, only use one thread.
41 // if (width * height >= 1920 * 1080 && number_of_cores > 8) { 44 // if (width * height >= 1920 * 1080 && number_of_cores > 8) {
42 // return 8; // 8 threads for 1080p on high perf machines. 45 // return 8; // 8 threads for 1080p on high perf machines.
43 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) { 46 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) {
44 // return 3; // 3 threads for 1080p. 47 // return 3; // 3 threads for 1080p.
45 // } else if (width * height > 640 * 480 && number_of_cores >= 3) { 48 // } else if (width * height > 640 * 480 && number_of_cores >= 3) {
46 // return 2; // 2 threads for qHD/HD. 49 // return 2; // 2 threads for qHD/HD.
47 // } else { 50 // } else {
48 // return 1; // 1 thread for VGA or less. 51 // return 1; // 1 thread for VGA or less.
49 // } 52 // }
50 return 1; 53 return 1;
51 } 54 }
52 55
56 std::vector<int> GetStreamBitratesKbps(const VideoCodec& codec,
57 int bitrate_to_allocate_kbps) {
58 if (codec.numberOfSimulcastStreams <= 1) {
59 return std::vector<int>(1, bitrate_to_allocate_kbps);
60 }
61
62 std::vector<int> bitrates_kbps(codec.numberOfSimulcastStreams);
63 // Allocate min -> target bitrates as long as we have bitrate to spend.
64 size_t last_active_stream = 0;
65 for (size_t i = 0; i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
66 bitrate_to_allocate_kbps >=
67 static_cast<int>(codec.simulcastStream[i].minBitrate);
68 ++i) {
69 last_active_stream = i;
70 int allocated_bitrate_kbps =
71 std::min(static_cast<int>(codec.simulcastStream[i].targetBitrate),
72 bitrate_to_allocate_kbps);
73 bitrates_kbps[i] = allocated_bitrate_kbps;
74 bitrate_to_allocate_kbps -= allocated_bitrate_kbps;
75 }
76
77 // Spend additional bits on the highest-quality active layer, up to max
78 // bitrate.
79 // TODO(pbos): Consider spending additional bits on last_active_stream-1 down
80 // to 0 and not just the top layer when we have additional bitrate to spend.
81 int allocated_bitrate_kbps = std::min(
82 static_cast<int>(codec.simulcastStream[last_active_stream].maxBitrate -
83 bitrates_kbps[last_active_stream]),
84 bitrate_to_allocate_kbps);
85 bitrates_kbps[last_active_stream] += allocated_bitrate_kbps;
86 bitrate_to_allocate_kbps -= allocated_bitrate_kbps;
87
88 // Make sure we can always send something. Suspending below min bitrate is
89 // controlled outside the codec implementation and is not overriden by this.
90 if (bitrates_kbps[0] < static_cast<int>(codec.simulcastStream[0].minBitrate))
91 bitrates_kbps[0] = static_cast<int>(codec.simulcastStream[0].minBitrate);
92
93 return bitrates_kbps;
94 }
95
96 uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) {
97 uint32_t bitrate_sum = 0;
98 for (int i = 0; i < streams; ++i) {
99 bitrate_sum += codec.simulcastStream[i].maxBitrate;
100 }
101 return bitrate_sum;
102 }
103
104 int NumberOfStreams(const VideoCodec& codec) {
105 int streams =
106 codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
107 uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
108 if (simulcast_max_bitrate == 0) {
109 streams = 1;
110 }
111 return streams;
112 }
113
114 bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
115 if (codec.width != codec.simulcastStream[num_streams - 1].width ||
116 codec.height != codec.simulcastStream[num_streams - 1].height) {
117 return false;
118 }
119 for (int i = 0; i < num_streams; ++i) {
120 if (codec.width * codec.simulcastStream[i].height !=
121 codec.height * codec.simulcastStream[i].width) {
122 return false;
123 }
124 }
125 return true;
126 }
127
53 } // namespace 128 } // namespace
54 129
55 static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) { 130 static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) {
56 switch (type) { 131 switch (type) {
57 case videoFrameTypeInvalid: 132 case videoFrameTypeInvalid:
58 return kEmptyFrame; 133 return kEmptyFrame;
59 case videoFrameTypeIDR: 134 case videoFrameTypeIDR:
60 return kVideoFrameKey; 135 return kVideoFrameKey;
61 case videoFrameTypeSkip: 136 case videoFrameTypeSkip:
62 case videoFrameTypeI: 137 case videoFrameTypeI:
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
141 } 216 }
142 // Copy the entire layer's data (including start codes). 217 // Copy the entire layer's data (including start codes).
143 memcpy(encoded_image->_buffer + encoded_image->_length, 218 memcpy(encoded_image->_buffer + encoded_image->_length,
144 layerInfo.pBsBuf, 219 layerInfo.pBsBuf,
145 layer_len); 220 layer_len);
146 encoded_image->_length += layer_len; 221 encoded_image->_length += layer_len;
147 } 222 }
148 } 223 }
149 224
150 H264EncoderImpl::H264EncoderImpl() 225 H264EncoderImpl::H264EncoderImpl()
151 : openh264_encoder_(nullptr), 226 : encoded_image_callback_(nullptr),
152 encoded_image_callback_(nullptr),
153 has_reported_init_(false), 227 has_reported_init_(false),
154 has_reported_error_(false) { 228 has_reported_error_(false),
229 key_frame_request_(kMaxSimulcastStreams, false) {
230 encoded_images_.reserve(kMaxSimulcastStreams);
231 encoded_image_buffers_.reserve(kMaxSimulcastStreams);
232 send_streams_.reserve(kMaxSimulcastStreams);
233 encoders_.reserve(kMaxSimulcastStreams);
234 scaled_input_frames_.reserve(kMaxSimulcastStreams);
155 } 235 }
156 236
157 H264EncoderImpl::~H264EncoderImpl() { 237 H264EncoderImpl::~H264EncoderImpl() {
158 Release(); 238 Release();
159 } 239 }
160 240
241 void H264EncoderImpl::SetStreamState(bool send_stream, int stream_idx) {
242 if (send_stream && !send_streams_[stream_idx]) {
243 // Need a key frame if we have not sent this stream before.
244 key_frame_request_[stream_idx] = true;
245 }
246 send_streams_[stream_idx] = send_stream;
247 }
248
161 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, 249 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings,
162 int32_t number_of_cores, 250 int32_t number_of_cores,
163 size_t /*max_payload_size*/) { 251 size_t /*max_payload_size*/) {
164 ReportInit(); 252 ReportInit();
165 if (!codec_settings || 253 if (!codec_settings ||
166 codec_settings->codecType != kVideoCodecH264) { 254 codec_settings->codecType != kVideoCodecH264) {
167 ReportError(); 255 ReportError();
168 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 256 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
169 } 257 }
170 if (codec_settings->maxFramerate == 0) { 258 if (codec_settings->maxFramerate == 0) {
171 ReportError(); 259 ReportError();
172 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 260 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
173 } 261 }
174 if (codec_settings->width < 1 || codec_settings->height < 1) { 262 if (codec_settings->width < 1 || codec_settings->height < 1) {
175 ReportError(); 263 ReportError();
176 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 264 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
177 } 265 }
178 266
179 int32_t release_ret = Release(); 267 int32_t release_ret = Release();
180 if (release_ret != WEBRTC_VIDEO_CODEC_OK) { 268 if (release_ret != WEBRTC_VIDEO_CODEC_OK) {
181 ReportError(); 269 ReportError();
182 return release_ret; 270 return release_ret;
183 } 271 }
184 RTC_DCHECK(!openh264_encoder_);
185 272
186 // Create encoder. 273 int number_of_streams = NumberOfStreams(*codec_settings);
187 if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) { 274 bool doing_simulcast = (number_of_streams > 1);
188 // Failed to create encoder. 275
189 LOG(LS_ERROR) << "Failed to create OpenH264 encoder"; 276 if (doing_simulcast &&
190 RTC_DCHECK(!openh264_encoder_); 277 !ValidSimulcastResolutions(*codec_settings, number_of_streams)) {
191 ReportError(); 278 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
192 return WEBRTC_VIDEO_CODEC_ERROR;
193 } 279 }
194 RTC_DCHECK(openh264_encoder_); 280 // Code expects simulcastStream resolutions to be correct, make sure they are
195 if (kOpenH264EncoderDetailedLogging) { 281 // filled even when there are no simulcast layers.
196 int trace_level = WELS_LOG_DETAIL; 282 if (codec_settings->numberOfSimulcastStreams == 0) {
197 openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, 283 codec_settings_.simulcastStream[0].width = codec_settings->width;
198 &trace_level); 284 codec_settings_.simulcastStream[0].height = codec_settings->height;
199 } 285 }
200 // else WELS_LOG_DEFAULT is used by default. 286
287 encoded_images_.resize(number_of_streams);
288 encoded_image_buffers_.resize(number_of_streams);
289 encoders_.resize(number_of_streams);
290 scaled_input_frames_.resize(number_of_streams);
291 key_frame_request_.resize(number_of_streams);
292 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
201 293
202 codec_settings_ = *codec_settings; 294 codec_settings_ = *codec_settings;
203 if (codec_settings_.targetBitrate == 0) 295 if (codec_settings_.targetBitrate == 0)
204 codec_settings_.targetBitrate = codec_settings_.startBitrate; 296 codec_settings_.targetBitrate = codec_settings_.startBitrate;
205 297
206 // Initialization parameters. 298 // Initialization parameters.
207 // There are two ways to initialize. There is SEncParamBase (cleared with 299 // There are two ways to initialize. There is SEncParamBase (cleared with
208 // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt 300 // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt
209 // which is a superset of SEncParamBase (cleared with GetDefaultParams) used 301 // which is a superset of SEncParamBase (cleared with GetDefaultParams) used
210 // in InitializeExt. 302 // in InitializeExt.
211 SEncParamExt init_params; 303
212 openh264_encoder_->GetDefaultParams(&init_params); 304 for (int i = 0; i < number_of_streams; ++i) {
213 if (codec_settings_.mode == kRealtimeVideo) { 305 // Create encoder.
214 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME; 306 if (WelsCreateSVCEncoder(&encoders_[i]) != 0) {
215 } else if (codec_settings_.mode == kScreensharing) { 307 // Failed to create encoder.
216 init_params.iUsageType = SCREEN_CONTENT_REAL_TIME; 308 LOG(LS_ERROR) << "Failed to create OpenH264 encoder";
217 } else { 309 RTC_DCHECK(!encoders_[i]);
218 ReportError(); 310 ReportError();
219 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 311 return WEBRTC_VIDEO_CODEC_ERROR;
312 }
313
314 RTC_DCHECK(encoders_[i]);
315 if (kOpenH264EncoderDetailedLogging) {
316 int trace_level = WELS_LOG_DETAIL;
317 encoders_[i]->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
318 }
319 SEncParamExt init_params;
320 memset(&init_params, 0, sizeof(SEncParamExt));
321 encoders_[i]->GetDefaultParams(&init_params);
322 if (codec_settings_.mode == kRealtimeVideo) {
323 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
324 } else if (codec_settings_.mode == kScreensharing) {
325 init_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
326 } else {
327 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
328 }
329 init_params.iPicWidth = codec_settings_.simulcastStream[i].width;
330 init_params.iPicHeight = codec_settings_.simulcastStream[i].height;
331 // |init_params| uses bit/s, |codec_settings_| uses kbit/s.
332 init_params.iTargetBitrate = codec_settings_.startBitrate * 1000;
333 init_params.iMaxBitrate = codec_settings_.maxBitrate * 1000;
334 // Rate Control mode
335 init_params.iRCMode = RC_BITRATE_MODE;
336 init_params.fMaxFrameRate =
337 static_cast<float>(codec_settings_.maxFramerate);
338
339 // The following parameters are extension parameters
340 // (they're in SEncParamExt, not in SEncParamBase).
341 init_params.bEnableFrameSkip =
342 codec_settings_.codecSpecific.H264.frameDroppingOn;
343 // |uiIntraPeriod| - multiple of GOP size
344 // |keyFrameInterval| - number of frames
345 init_params.uiIntraPeriod =
346 codec_settings_.codecSpecific.H264.keyFrameInterval;
347 init_params.uiMaxNalSize = 0;
348 init_params.iComplexityMode = ECOMPLEXITY_MODE::LOW_COMPLEXITY;
349
350 // Threading model: use auto.
351 // 0: auto (dynamic imp. internal encoder)
352 // 1: single thread (default value)
353 // >1: number of threads
354 init_params.iMultipleThreadIdc = NumberOfThreads(
355 init_params.iPicWidth, init_params.iPicHeight, number_of_cores);
356 // The base spatial layer 0 is the only one we use.
357 init_params.sSpatialLayers[0].iVideoWidth = init_params.iPicWidth;
358 init_params.sSpatialLayers[0].iVideoHeight = init_params.iPicHeight;
359 init_params.sSpatialLayers[0].fFrameRate = init_params.fMaxFrameRate;
360 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate;
361 init_params.sSpatialLayers[0].iMaxSpatialBitrate = init_params.iMaxBitrate;
362
363 // Slice num according to number of threads.
364 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE;
365 // Initialize.
366 if (encoders_[i]->InitializeExt(&init_params) != 0) {
367 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder";
368 Release();
369 ReportError();
370 return WEBRTC_VIDEO_CODEC_ERROR;
371 }
372 int video_format = EVideoFormatType::videoFormatI420;
373 encoders_[i]->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);
374 // Initialize encoded image. Default buffer size: size of unencoded data.
375 // allocate memory for encoded image
376 if (encoded_images_[i]._buffer != NULL) {
377 delete[] encoded_images_[i]._buffer;
378 }
379 encoded_images_[i]._size =
380 CalcBufferSize(kI420, codec_settings->simulcastStream[i].width,
381 codec_settings->simulcastStream[i].height);
382 encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
383 encoded_image_buffers_[i].reset(encoded_images_[i]._buffer);
384 encoded_images_[i]._completeFrame = true;
385 encoded_images_[i]._encodedWidth = 0;
386 encoded_images_[i]._encodedHeight = 0;
387 encoded_images_[i]._length = 0;
388
389 // Initialize scaled input frames.
390 scaled_input_frames_[i] = *new VideoFrame();
391 scaled_input_frames_[i].CreateEmptyFrame(
392 codec_settings->simulcastStream[i].width,
393 codec_settings->simulcastStream[i].height,
394 CalculateYStrideSize(codec_settings->simulcastStream[i].width,
395 codec_settings->simulcastStream[i].height),
396 CalculateUVStrideSize(codec_settings->simulcastStream[i].width,
397 codec_settings->simulcastStream[i].height),
398 CalculateUVStrideSize(codec_settings->simulcastStream[i].width,
399 codec_settings->simulcastStream[i].height));
220 } 400 }
221 init_params.iPicWidth = codec_settings_.width;
222 init_params.iPicHeight = codec_settings_.height;
223 // |init_params| uses bit/s, |codec_settings_| uses kbit/s.
224 init_params.iTargetBitrate = codec_settings_.targetBitrate * 1000;
225 init_params.iMaxBitrate = codec_settings_.maxBitrate * 1000;
226 // Rate Control mode
227 init_params.iRCMode = RC_BITRATE_MODE;
228 init_params.fMaxFrameRate = static_cast<float>(codec_settings_.maxFramerate);
229
230 // The following parameters are extension parameters (they're in SEncParamExt,
231 // not in SEncParamBase).
232 init_params.bEnableFrameSkip =
233 codec_settings_.codecSpecific.H264.frameDroppingOn;
234 // |uiIntraPeriod| - multiple of GOP size
235 // |keyFrameInterval| - number of frames
236 init_params.uiIntraPeriod =
237 codec_settings_.codecSpecific.H264.keyFrameInterval;
238 init_params.uiMaxNalSize = 0;
239 // Threading model: use auto.
240 // 0: auto (dynamic imp. internal encoder)
241 // 1: single thread (default value)
242 // >1: number of threads
243 init_params.iMultipleThreadIdc = NumberOfThreads(init_params.iPicWidth,
244 init_params.iPicHeight,
245 number_of_cores);
246 // The base spatial layer 0 is the only one we use.
247 init_params.sSpatialLayers[0].iVideoWidth = init_params.iPicWidth;
248 init_params.sSpatialLayers[0].iVideoHeight = init_params.iPicHeight;
249 init_params.sSpatialLayers[0].fFrameRate = init_params.fMaxFrameRate;
250 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate;
251 init_params.sSpatialLayers[0].iMaxSpatialBitrate = init_params.iMaxBitrate;
252 // Slice num according to number of threads.
253 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE;
254
255 // Initialize.
256 if (openh264_encoder_->InitializeExt(&init_params) != 0) {
257 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder";
258 Release();
259 ReportError();
260 return WEBRTC_VIDEO_CODEC_ERROR;
261 }
262 int video_format = EVideoFormatType::videoFormatI420;
263 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT,
264 &video_format);
265
266 // Initialize encoded image. Default buffer size: size of unencoded data.
267 encoded_image_._size = CalcBufferSize(
268 kI420, codec_settings_.width, codec_settings_.height);
269 encoded_image_._buffer = new uint8_t[encoded_image_._size];
270 encoded_image_buffer_.reset(encoded_image_._buffer);
271 encoded_image_._completeFrame = true;
272 encoded_image_._encodedWidth = 0;
273 encoded_image_._encodedHeight = 0;
274 encoded_image_._length = 0;
275 return WEBRTC_VIDEO_CODEC_OK; 401 return WEBRTC_VIDEO_CODEC_OK;
276 } 402 }
277 403
278 int32_t H264EncoderImpl::Release() { 404 int32_t H264EncoderImpl::Release() {
279 if (openh264_encoder_) { 405 while (!encoders_.empty()) {
280 int uninit_ret = openh264_encoder_->Uninitialize(); 406 ISVCEncoder* openh264_encoder = encoders_.back();
281 if (uninit_ret != 0) { 407 if (openh264_encoder) {
282 LOG(LS_WARNING) << "OpenH264 encoder's Uninitialize() returned " 408 int uninit_ret = openh264_encoder->Uninitialize();
283 << "unsuccessful: " << uninit_ret; 409 if (uninit_ret != 0) {
410 LOG(LS_WARNING) << "OpenH264 encoder's Uninitialize() returned "
411 << "unsuccessful: " << uninit_ret;
412 }
413 WelsDestroySVCEncoder(openh264_encoder);
414 openh264_encoder = nullptr;
415 encoders_.pop_back();
416 EncodedImage encoded_image = encoded_images_.back();
417 if (encoded_image._buffer != nullptr) {
418 encoded_image._buffer = nullptr;
419 encoded_image_buffers_.back().reset();
420 }
421 encoded_images_.pop_back();
422 encoded_image_buffers_.pop_back();
284 } 423 }
285 WelsDestroySVCEncoder(openh264_encoder_);
286 openh264_encoder_ = nullptr;
287 }
288 if (encoded_image_._buffer != nullptr) {
289 encoded_image_._buffer = nullptr;
290 encoded_image_buffer_.reset();
291 } 424 }
292 return WEBRTC_VIDEO_CODEC_OK; 425 return WEBRTC_VIDEO_CODEC_OK;
293 } 426 }
294 427
295 int32_t H264EncoderImpl::RegisterEncodeCompleteCallback( 428 int32_t H264EncoderImpl::RegisterEncodeCompleteCallback(
296 EncodedImageCallback* callback) { 429 EncodedImageCallback* callback) {
297 encoded_image_callback_ = callback; 430 encoded_image_callback_ = callback;
298 return WEBRTC_VIDEO_CODEC_OK; 431 return WEBRTC_VIDEO_CODEC_OK;
299 } 432 }
300 433
301 int32_t H264EncoderImpl::SetRates(uint32_t bitrate, uint32_t framerate) { 434 int32_t H264EncoderImpl::SetRates(uint32_t bitrate, uint32_t framerate) {
sprang_webrtc 2017/03/29 08:44:21 This is deprecated. Please use SetRateAllocation()
302 if (bitrate <= 0 || framerate <= 0) { 435 if (bitrate <= 0 || framerate <= 0) {
303 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 436 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
304 } 437 }
438 if (codec_settings_.maxBitrate > 0 && bitrate > codec_settings_.maxBitrate) {
439 bitrate = codec_settings_.maxBitrate;
440 }
441 if (bitrate < codec_settings_.minBitrate) {
442 bitrate = codec_settings_.minBitrate;
443 }
444 if (codec_settings_.numberOfSimulcastStreams > 0 &&
445 bitrate < codec_settings_.simulcastStream[0].minBitrate) {
446 bitrate = codec_settings_.simulcastStream[0].minBitrate;
447 }
305 codec_settings_.targetBitrate = bitrate; 448 codec_settings_.targetBitrate = bitrate;
306 codec_settings_.maxFramerate = framerate; 449 codec_settings_.maxFramerate = framerate;
307 450
308 SBitrateInfo target_bitrate; 451 std::vector<int> stream_bitrates =
309 memset(&target_bitrate, 0, sizeof(SBitrateInfo)); 452 GetStreamBitratesKbps(codec_settings_, bitrate);
310 target_bitrate.iLayer = SPATIAL_LAYER_ALL, 453 for (size_t i = 0; i < encoders_.size(); ++i) {
311 target_bitrate.iBitrate = codec_settings_.targetBitrate * 1000; 454 SetStreamState(stream_bitrates[i] > 0, i);
312 openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, 455 if (send_streams_[i]) {
313 &target_bitrate); 456 SBitrateInfo target_bitrate;
314 float max_framerate = static_cast<float>(codec_settings_.maxFramerate); 457 memset(&target_bitrate, 0, sizeof(SBitrateInfo));
315 openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, 458 target_bitrate.iLayer = SPATIAL_LAYER_ALL,
316 &max_framerate); 459 target_bitrate.iBitrate = stream_bitrates[i] * 1000; // bps
460 encoders_[i]->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);
461 float max_framerate = static_cast<float>(framerate);
462 encoders_[i]->SetOption(ENCODER_OPTION_FRAME_RATE, &max_framerate);
463 }
464 }
317 return WEBRTC_VIDEO_CODEC_OK; 465 return WEBRTC_VIDEO_CODEC_OK;
318 } 466 }
319 467
468 void H264EncoderImpl::Scale(const VideoFrame& input_frame,
469 VideoFrame* output_frame) {
470 Scaler scaler;
471 scaler.Set(input_frame.width(), input_frame.height(), output_frame->width(),
472 output_frame->height(), webrtc::kI420, webrtc::kI420,
473 webrtc::kScaleBilinear);
474 scaler.Scale(input_frame, output_frame);
475 }
476
320 int32_t H264EncoderImpl::Encode( 477 int32_t H264EncoderImpl::Encode(
321 const VideoFrame& frame, const CodecSpecificInfo* codec_specific_info, 478 const VideoFrame& frame, const CodecSpecificInfo* codec_specific_info,
322 const std::vector<FrameType>* frame_types) { 479 const std::vector<FrameType>* frame_types) {
323 if (!IsInitialized()) { 480 if (!IsInitialized()) {
324 ReportError(); 481 ReportError();
325 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 482 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
326 } 483 }
327 if (frame.IsZeroSize()) { 484 if (frame.IsZeroSize()) {
328 ReportError(); 485 ReportError();
329 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 486 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
330 } 487 }
331 if (!encoded_image_callback_) { 488 if (!encoded_image_callback_) {
332 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function " 489 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function "
333 << "has not been set with RegisterEncodeCompleteCallback()"; 490 << "has not been set with RegisterEncodeCompleteCallback()";
334 ReportError(); 491 ReportError();
335 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 492 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
336 } 493 }
337 if (frame.width() != codec_settings_.width || 494
338 frame.height() != codec_settings_.height) { 495 std::vector<bool> force_key_frame;
339 LOG(LS_WARNING) << "Encoder initialized for " << codec_settings_.width 496 force_key_frame.resize(encoders_.size());
340 << "x" << codec_settings_.height << " but trying to encode " 497 std::fill(force_key_frame.begin(), force_key_frame.end(), false);
341 << frame.width() << "x" << frame.height() << " frame."; 498 for (size_t i = 0; i < key_frame_request_.size() && i < send_streams_.size();
342 ReportError(); 499 ++i) {
343 return WEBRTC_VIDEO_CODEC_ERR_SIZE; 500 if (key_frame_request_[i] && send_streams_[i]) {
501 force_key_frame[i] = true;
502 }
503 }
504 if (frame_types) {
505 for (size_t i = 0; i < frame_types->size(); ++i) {
506 if ((*frame_types)[i] == kVideoFrameKey) {
507 force_key_frame[i] = true;
508 }
509 }
344 } 510 }
345 511
346 bool force_key_frame = false; 512 for (size_t i = 0; i < encoders_.size(); ++i) {
347 if (frame_types != nullptr) { 513 if (!send_streams_[i] || (*frame_types)[i] == kEmptyFrame) {
348 // We only support a single stream. 514 continue;
349 RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1));
350 // Skip frame?
351 if ((*frame_types)[0] == kEmptyFrame) {
352 return WEBRTC_VIDEO_CODEC_OK;
353 } 515 }
354 // Force key frame? 516 // Scale input to match encode dimensions
355 force_key_frame = (*frame_types)[0] == kVideoFrameKey; 517 Scale(frame, &scaled_input_frames_[i]);
356 }
357 if (force_key_frame) {
358 // API doc says ForceIntraFrame(false) does nothing, but calling this
359 // function forces a key frame regardless of the |bIDR| argument's value.
360 // (If every frame is a key frame we get lag/delays.)
361 openh264_encoder_->ForceIntraFrame(true);
362 }
363 518
364 // EncodeFrame input. 519 if (scaled_input_frames_[i].width() !=
365 SSourcePicture picture; 520 codec_settings_.simulcastStream[i].width ||
366 memset(&picture, 0, sizeof(SSourcePicture)); 521 scaled_input_frames_[i].height() !=
367 picture.iPicWidth = frame.width(); 522 codec_settings_.simulcastStream[i].height) {
368 picture.iPicHeight = frame.height(); 523 LOG(LS_ERROR) << "Encoder initialized for "
369 picture.iColorFormat = EVideoFormatType::videoFormatI420; 524 << codec_settings_.simulcastStream[i].width << "x"
370 picture.uiTimeStamp = frame.ntp_time_ms(); 525 << codec_settings_.simulcastStream[i].height
371 picture.iStride[0] = frame.stride(kYPlane); 526 << " but trying to encode "
372 picture.iStride[1] = frame.stride(kUPlane); 527 << scaled_input_frames_[i].width() << "x"
373 picture.iStride[2] = frame.stride(kVPlane); 528 << scaled_input_frames_[i].height() << " frame.";
374 picture.pData[0] = const_cast<uint8_t*>(frame.buffer(kYPlane)); 529 ReportError();
375 picture.pData[1] = const_cast<uint8_t*>(frame.buffer(kUPlane)); 530 return WEBRTC_VIDEO_CODEC_ERR_SIZE;
376 picture.pData[2] = const_cast<uint8_t*>(frame.buffer(kVPlane)); 531 }
377 532
378 // EncodeFrame output. 533 SSourcePicture picture;
379 SFrameBSInfo info; 534 memset(&picture, 0, sizeof(SSourcePicture));
380 memset(&info, 0, sizeof(SFrameBSInfo)); 535 picture.iPicWidth = scaled_input_frames_[i].width();
536 picture.iPicHeight = scaled_input_frames_[i].height();
537 picture.iColorFormat = EVideoFormatType::videoFormatI420;
538 picture.uiTimeStamp = frame.ntp_time_ms();
539 picture.iStride[0] = scaled_input_frames_[i].stride(kYPlane);
540 picture.iStride[1] = scaled_input_frames_[i].stride(kUPlane);
541 picture.iStride[2] = scaled_input_frames_[i].stride(kVPlane);
542 picture.pData[0] =
543 const_cast<uint8_t*>(scaled_input_frames_[i].buffer(kYPlane));
544 picture.pData[1] =
545 const_cast<uint8_t*>(scaled_input_frames_[i].buffer(kUPlane));
546 picture.pData[2] =
547 const_cast<uint8_t*>(scaled_input_frames_[i].buffer(kVPlane));
548 if (force_key_frame[i]) {
549 // API doc says ForceIntraFrame(false) does nothing, but calling this
550 // function forces a key frame regardless of the |bIDR| argument's value.
551 // (If every frame is a key frame we get lag/delays.)
552 encoders_[i]->ForceIntraFrame(true);
553 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
554 }
555 // EncodeFrame output.
556 SFrameBSInfo info;
557 memset(&info, 0, sizeof(SFrameBSInfo));
558 int enc_ret = encoders_[i]->EncodeFrame(&picture, &info);
559 if (enc_ret != 0) {
560 LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned "
561 << enc_ret << ".";
562 ReportError();
563 return WEBRTC_VIDEO_CODEC_ERROR;
564 }
381 565
382 // Encode! 566 encoded_images_[i]._encodedWidth = codec_settings_.simulcastStream[i].width;
383 int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info); 567 encoded_images_[i]._encodedHeight =
384 if (enc_ret != 0) { 568 codec_settings_.simulcastStream[i].height;
385 LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned " 569 encoded_images_[i]._timeStamp = frame.timestamp();
386 << enc_ret << "."; 570 encoded_images_[i].ntp_time_ms_ = frame.ntp_time_ms();
387 ReportError(); 571 encoded_images_[i].capture_time_ms_ = frame.render_time_ms();
388 return WEBRTC_VIDEO_CODEC_ERROR; 572 encoded_images_[i]._frameType =
389 } 573 EVideoFrameType_to_FrameType(info.eFrameType);
390 574 // Split encoded image up into fragments. This also updates
391 encoded_image_._encodedWidth = frame.width(); 575 // |encoded_image_|.
392 encoded_image_._encodedHeight = frame.height(); 576 RTPFragmentationHeader frag_header;
393 encoded_image_._timeStamp = frame.timestamp(); 577 RtpFragmentize(&encoded_images_[i], &encoded_image_buffers_[i], frame,
394 encoded_image_.ntp_time_ms_ = frame.ntp_time_ms(); 578 &info, &frag_header);
395 encoded_image_.capture_time_ms_ = frame.render_time_ms(); 579 if (encoded_images_[i]._length > 0) {
396 encoded_image_.rotation_ = frame.rotation(); 580 // Deliver encoded image.
397 encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType); 581 CodecSpecificInfo codec_specific;
398 582 CodecSpecificInfoH264* h264Info = &(codec_specific.codecSpecific.H264);
399 // Split encoded image up into fragments. This also updates |encoded_image_|. 583 h264Info->simulcastIdx = i;
400 RTPFragmentationHeader frag_header; 584 codec_specific.codecType = kVideoCodecH264;
401 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, frame, &info, 585 encoded_image_callback_->Encoded(encoded_images_[i], &codec_specific,
402 &frag_header); 586 &frag_header);
403 587 }
404 // Encoder can skip frames to save bandwidth in which case
405 // |encoded_image_._length| == 0.
406 if (encoded_image_._length > 0) {
407 // Deliver encoded image.
408 CodecSpecificInfo codec_specific;
409 codec_specific.codecType = kVideoCodecH264;
410 encoded_image_callback_->Encoded(encoded_image_,
411 &codec_specific,
412 &frag_header);
413 } 588 }
414 return WEBRTC_VIDEO_CODEC_OK; 589 return WEBRTC_VIDEO_CODEC_OK;
415 } 590 }
416 591
417 bool H264EncoderImpl::IsInitialized() const { 592 bool H264EncoderImpl::IsInitialized() const {
418 return openh264_encoder_ != nullptr; 593 for (auto openh264_encoder : encoders_) {
594 if (openh264_encoder == nullptr) {
595 return false;
596 }
597 }
598 return true;
419 } 599 }
420 600
421 void H264EncoderImpl::ReportInit() { 601 void H264EncoderImpl::ReportInit() {
422 if (has_reported_init_) 602 if (has_reported_init_)
423 return; 603 return;
424 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", 604 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event",
425 kH264EncoderEventInit, 605 kH264EncoderEventInit,
426 kH264EncoderEventMax); 606 kH264EncoderEventMax);
427 has_reported_init_ = true; 607 has_reported_init_ = true;
428 } 608 }
(...skipping 13 matching lines...) Expand all
442 } 622 }
443 623
444 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { 624 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) {
445 return WEBRTC_VIDEO_CODEC_OK; 625 return WEBRTC_VIDEO_CODEC_OK;
446 } 626 }
447 627
448 void H264EncoderImpl::OnDroppedFrame() { 628 void H264EncoderImpl::OnDroppedFrame() {
449 } 629 }
450 630
451 } // namespace webrtc 631 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698