| Index: webrtc/video/video_send_stream.cc
|
| diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
|
| index 31ba5ee31c0aaa537e51f6ea5f0b13f977748361..4487f83d8466caea034c17c478fe3c484d3a41ff 100644
|
| --- a/webrtc/video/video_send_stream.cc
|
| +++ b/webrtc/video/video_send_stream.cc
|
| @@ -229,42 +229,38 @@ VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
|
| switch (video_codec.codecType) {
|
| case kVideoCodecVP8: {
|
| if (config.encoder_specific_settings) {
|
| - video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
|
| + *(video_codec.VP8()) = *reinterpret_cast<const VideoCodecVP8*>(
|
| config.encoder_specific_settings);
|
| } else {
|
| - video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
|
| + *(video_codec.VP8()) = VideoEncoder::GetDefaultVp8Settings();
|
| }
|
| - video_codec.codecSpecific.VP8.numberOfTemporalLayers =
|
| - static_cast<unsigned char>(
|
| - streams.back().temporal_layer_thresholds_bps.size() + 1);
|
| + video_codec.VP8()->numberOfTemporalLayers = static_cast<unsigned char>(
|
| + streams.back().temporal_layer_thresholds_bps.size() + 1);
|
| break;
|
| }
|
| case kVideoCodecVP9: {
|
| if (config.encoder_specific_settings) {
|
| - video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
|
| + *(video_codec.VP9()) = *reinterpret_cast<const VideoCodecVP9*>(
|
| config.encoder_specific_settings);
|
| if (video_codec.mode == kScreensharing) {
|
| - video_codec.codecSpecific.VP9.flexibleMode = true;
|
| + video_codec.VP9()->flexibleMode = true;
|
| // For now VP9 screensharing use 1 temporal and 2 spatial layers.
|
| - RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
|
| - 1);
|
| - RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
|
| + RTC_DCHECK_EQ(video_codec.VP9()->numberOfTemporalLayers, 1);
|
| + RTC_DCHECK_EQ(video_codec.VP9()->numberOfSpatialLayers, 2);
|
| }
|
| } else {
|
| - video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
|
| + *(video_codec.VP9()) = VideoEncoder::GetDefaultVp9Settings();
|
| }
|
| - video_codec.codecSpecific.VP9.numberOfTemporalLayers =
|
| - static_cast<unsigned char>(
|
| - streams.back().temporal_layer_thresholds_bps.size() + 1);
|
| + video_codec.VP9()->numberOfTemporalLayers = static_cast<unsigned char>(
|
| + streams.back().temporal_layer_thresholds_bps.size() + 1);
|
| break;
|
| }
|
| case kVideoCodecH264: {
|
| if (config.encoder_specific_settings) {
|
| - video_codec.codecSpecific.H264 =
|
| - *reinterpret_cast<const VideoCodecH264*>(
|
| - config.encoder_specific_settings);
|
| + *(video_codec.H264()) = *reinterpret_cast<const VideoCodecH264*>(
|
| + config.encoder_specific_settings);
|
| } else {
|
| - video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
|
| + *(video_codec.H264()) = VideoEncoder::GetDefaultH264Settings();
|
| }
|
| break;
|
| }
|
| @@ -288,8 +284,8 @@ VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
|
| // If the vector is empty, bitrates will be configured automatically.
|
| RTC_DCHECK(config.spatial_layers.empty() ||
|
| config.spatial_layers.size() ==
|
| - video_codec.codecSpecific.VP9.numberOfSpatialLayers);
|
| - RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
|
| + video_codec.VP9()->numberOfSpatialLayers);
|
| + RTC_DCHECK_LE(video_codec.VP9()->numberOfSpatialLayers,
|
| kMaxSimulcastStreams);
|
| for (size_t i = 0; i < config.spatial_layers.size(); ++i)
|
| video_codec.spatialLayers[i] = config.spatial_layers[i];
|
|
|