OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
126 VP8Encoder* VP8Encoder::Create() { | 126 VP8Encoder* VP8Encoder::Create() { |
127 return new VP8EncoderImpl(); | 127 return new VP8EncoderImpl(); |
128 } | 128 } |
129 | 129 |
130 VP8Decoder* VP8Decoder::Create() { | 130 VP8Decoder* VP8Decoder::Create() { |
131 return new VP8DecoderImpl(); | 131 return new VP8DecoderImpl(); |
132 } | 132 } |
133 | 133 |
134 VP8EncoderImpl::VP8EncoderImpl() | 134 VP8EncoderImpl::VP8EncoderImpl() |
135 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), | 135 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), |
136 random_(rtc::TimeMicros()), | |
stefan-webrtc
2017/04/20 09:28:09
Mostly FYI, but we discovered today that TimeMicro
brandtr
2017/04/24 07:53:47
Changed to TimeMillis(). That should be granular e
| |
136 encoded_complete_callback_(nullptr), | 137 encoded_complete_callback_(nullptr), |
137 inited_(false), | 138 inited_(false), |
138 timestamp_(0), | 139 timestamp_(0), |
139 qp_max_(56), // Setting for max quantizer. | 140 qp_max_(56), // Setting for max quantizer. |
140 cpu_speed_default_(-6), | 141 cpu_speed_default_(-6), |
141 number_of_cores_(0), | 142 number_of_cores_(0), |
142 rc_max_intra_target_(0), | 143 rc_max_intra_target_(0), |
143 key_frame_request_(kMaxSimulcastStreams, false) { | 144 key_frame_request_(kMaxSimulcastStreams, false) { |
144 uint32_t seed = rtc::Time32(); | |
145 srand(seed); | |
146 | |
147 picture_id_.reserve(kMaxSimulcastStreams); | 145 picture_id_.reserve(kMaxSimulcastStreams); |
146 for (int i = 0; i < kMaxSimulcastStreams; ++i) { | |
147 picture_id_.push_back(random_.Rand<uint16_t>() & 0x7FFF); | |
148 tl0_pic_idx_.push_back(random_.Rand<uint8_t>()); | |
149 } | |
148 last_key_frame_picture_id_.reserve(kMaxSimulcastStreams); | 150 last_key_frame_picture_id_.reserve(kMaxSimulcastStreams); |
149 temporal_layers_.reserve(kMaxSimulcastStreams); | 151 temporal_layers_.reserve(kMaxSimulcastStreams); |
150 raw_images_.reserve(kMaxSimulcastStreams); | 152 raw_images_.reserve(kMaxSimulcastStreams); |
151 encoded_images_.reserve(kMaxSimulcastStreams); | 153 encoded_images_.reserve(kMaxSimulcastStreams); |
152 send_stream_.reserve(kMaxSimulcastStreams); | 154 send_stream_.reserve(kMaxSimulcastStreams); |
153 cpu_speed_.assign(kMaxSimulcastStreams, cpu_speed_default_); | 155 cpu_speed_.assign(kMaxSimulcastStreams, cpu_speed_default_); |
154 encoders_.reserve(kMaxSimulcastStreams); | 156 encoders_.reserve(kMaxSimulcastStreams); |
155 configurations_.reserve(kMaxSimulcastStreams); | 157 configurations_.reserve(kMaxSimulcastStreams); |
156 downsampling_factors_.reserve(kMaxSimulcastStreams); | 158 downsampling_factors_.reserve(kMaxSimulcastStreams); |
157 } | 159 } |
(...skipping 17 matching lines...) Expand all Loading... | |
175 } | 177 } |
176 encoders_.pop_back(); | 178 encoders_.pop_back(); |
177 } | 179 } |
178 configurations_.clear(); | 180 configurations_.clear(); |
179 send_stream_.clear(); | 181 send_stream_.clear(); |
180 cpu_speed_.clear(); | 182 cpu_speed_.clear(); |
181 while (!raw_images_.empty()) { | 183 while (!raw_images_.empty()) { |
182 vpx_img_free(&raw_images_.back()); | 184 vpx_img_free(&raw_images_.back()); |
183 raw_images_.pop_back(); | 185 raw_images_.pop_back(); |
184 } | 186 } |
185 while (!temporal_layers_.empty()) { | 187 for (size_t i = 0; i < temporal_layers_.size(); ++i) { |
186 delete temporal_layers_.back(); | 188 tl0_pic_idx_[i] = temporal_layers_[i]->Tl0PicIdx(); |
187 temporal_layers_.pop_back(); | |
188 } | 189 } |
190 temporal_layers_.clear(); | |
189 inited_ = false; | 191 inited_ = false; |
190 return ret_val; | 192 return ret_val; |
191 } | 193 } |
192 | 194 |
193 int VP8EncoderImpl::SetRateAllocation(const BitrateAllocation& bitrate, | 195 int VP8EncoderImpl::SetRateAllocation(const BitrateAllocation& bitrate, |
194 uint32_t new_framerate) { | 196 uint32_t new_framerate) { |
195 if (!inited_) | 197 if (!inited_) |
196 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 198 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
197 | 199 |
198 if (encoders_[0].err) | 200 if (encoders_[0].err) |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
263 } | 265 } |
264 send_stream_[stream_idx] = send_stream; | 266 send_stream_[stream_idx] = send_stream; |
265 } | 267 } |
266 | 268 |
267 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, | 269 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, |
268 int num_temporal_layers, | 270 int num_temporal_layers, |
269 const VideoCodec& codec) { | 271 const VideoCodec& codec) { |
270 RTC_DCHECK(codec.VP8().tl_factory != nullptr); | 272 RTC_DCHECK(codec.VP8().tl_factory != nullptr); |
271 const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory; | 273 const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory; |
272 if (num_streams == 1) { | 274 if (num_streams == 1) { |
273 temporal_layers_.push_back( | 275 temporal_layers_.emplace_back( |
274 tl_factory->Create(0, num_temporal_layers, rand())); | 276 tl_factory->Create(0, num_temporal_layers, tl0_pic_idx_[0])); |
275 } else { | 277 } else { |
276 for (int i = 0; i < num_streams; ++i) { | 278 for (int i = 0; i < num_streams; ++i) { |
277 RTC_CHECK_GT(num_temporal_layers, 0); | 279 RTC_CHECK_GT(num_temporal_layers, 0); |
278 int layers = std::max(static_cast<uint8_t>(1), | 280 int layers = std::max(static_cast<uint8_t>(1), |
279 codec.simulcastStream[i].numberOfTemporalLayers); | 281 codec.simulcastStream[i].numberOfTemporalLayers); |
280 temporal_layers_.push_back(tl_factory->Create(i, layers, rand())); | 282 temporal_layers_.emplace_back( |
283 tl_factory->Create(i, layers, tl0_pic_idx_[i])); | |
281 } | 284 } |
282 } | 285 } |
283 } | 286 } |
284 | 287 |
285 int VP8EncoderImpl::InitEncode(const VideoCodec* inst, | 288 int VP8EncoderImpl::InitEncode(const VideoCodec* inst, |
286 int number_of_cores, | 289 int number_of_cores, |
287 size_t /*maxPayloadSize */) { | 290 size_t /*maxPayloadSize */) { |
288 if (inst == NULL) { | 291 if (inst == NULL) { |
289 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 292 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
290 } | 293 } |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
327 timestamp_ = 0; | 330 timestamp_ = 0; |
328 codec_ = *inst; | 331 codec_ = *inst; |
329 | 332 |
330 // Code expects simulcastStream resolutions to be correct, make sure they are | 333 // Code expects simulcastStream resolutions to be correct, make sure they are |
331 // filled even when there are no simulcast layers. | 334 // filled even when there are no simulcast layers. |
332 if (codec_.numberOfSimulcastStreams == 0) { | 335 if (codec_.numberOfSimulcastStreams == 0) { |
333 codec_.simulcastStream[0].width = codec_.width; | 336 codec_.simulcastStream[0].width = codec_.width; |
334 codec_.simulcastStream[0].height = codec_.height; | 337 codec_.simulcastStream[0].height = codec_.height; |
335 } | 338 } |
336 | 339 |
337 picture_id_.resize(number_of_streams); | |
338 last_key_frame_picture_id_.resize(number_of_streams); | 340 last_key_frame_picture_id_.resize(number_of_streams); |
339 encoded_images_.resize(number_of_streams); | 341 encoded_images_.resize(number_of_streams); |
340 encoders_.resize(number_of_streams); | 342 encoders_.resize(number_of_streams); |
341 configurations_.resize(number_of_streams); | 343 configurations_.resize(number_of_streams); |
342 downsampling_factors_.resize(number_of_streams); | 344 downsampling_factors_.resize(number_of_streams); |
343 raw_images_.resize(number_of_streams); | 345 raw_images_.resize(number_of_streams); |
344 send_stream_.resize(number_of_streams); | 346 send_stream_.resize(number_of_streams); |
345 send_stream_[0] = true; // For non-simulcast case. | 347 send_stream_[0] = true; // For non-simulcast case. |
346 cpu_speed_.resize(number_of_streams); | 348 cpu_speed_.resize(number_of_streams); |
347 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); | 349 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); |
348 | 350 |
349 int idx = number_of_streams - 1; | 351 int idx = number_of_streams - 1; |
350 for (int i = 0; i < (number_of_streams - 1); ++i, --idx) { | 352 for (int i = 0; i < (number_of_streams - 1); ++i, --idx) { |
351 int gcd = GCD(inst->simulcastStream[idx].width, | 353 int gcd = GCD(inst->simulcastStream[idx].width, |
352 inst->simulcastStream[idx - 1].width); | 354 inst->simulcastStream[idx - 1].width); |
353 downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd; | 355 downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd; |
354 downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd; | 356 downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd; |
355 send_stream_[i] = false; | 357 send_stream_[i] = false; |
356 } | 358 } |
357 if (number_of_streams > 1) { | 359 if (number_of_streams > 1) { |
358 send_stream_[number_of_streams - 1] = false; | 360 send_stream_[number_of_streams - 1] = false; |
359 downsampling_factors_[number_of_streams - 1].num = 1; | 361 downsampling_factors_[number_of_streams - 1].num = 1; |
360 downsampling_factors_[number_of_streams - 1].den = 1; | 362 downsampling_factors_[number_of_streams - 1].den = 1; |
361 } | 363 } |
362 for (int i = 0; i < number_of_streams; ++i) { | 364 for (int i = 0; i < number_of_streams; ++i) { |
363 // Random start, 16 bits is enough. | |
364 picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT | |
365 last_key_frame_picture_id_[i] = -1; | 365 last_key_frame_picture_id_[i] = -1; |
366 // allocate memory for encoded image | 366 // allocate memory for encoded image |
367 if (encoded_images_[i]._buffer != NULL) { | 367 if (encoded_images_[i]._buffer != NULL) { |
368 delete[] encoded_images_[i]._buffer; | 368 delete[] encoded_images_[i]._buffer; |
369 } | 369 } |
370 encoded_images_[i]._size = | 370 encoded_images_[i]._size = |
371 CalcBufferSize(kI420, codec_.width, codec_.height); | 371 CalcBufferSize(kI420, codec_.width, codec_.height); |
372 encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size]; | 372 encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size]; |
373 encoded_images_[i]._completeFrame = true; | 373 encoded_images_[i]._completeFrame = true; |
374 } | 374 } |
(...skipping 777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1152 buffer_pool_.Release(); | 1152 buffer_pool_.Release(); |
1153 inited_ = false; | 1153 inited_ = false; |
1154 return WEBRTC_VIDEO_CODEC_OK; | 1154 return WEBRTC_VIDEO_CODEC_OK; |
1155 } | 1155 } |
1156 | 1156 |
1157 const char* VP8DecoderImpl::ImplementationName() const { | 1157 const char* VP8DecoderImpl::ImplementationName() const { |
1158 return "libvpx"; | 1158 return "libvpx"; |
1159 } | 1159 } |
1160 | 1160 |
1161 } // namespace webrtc | 1161 } // namespace webrtc |
OLD | NEW |