Index: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
index 2fbceb1d7904b8a5ff7714a30fcce6afe1bf9266..99413ddc0cefd103ad7070ea71b5a8455db10398 100644 |
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
@@ -289,7 +289,7 @@ void VP8EncoderImpl::SetupTemporalLayers(int num_streams, |
int VP8EncoderImpl::InitEncode(const VideoCodec* inst, |
int number_of_cores, |
size_t /*maxPayloadSize */) { |
- if (inst == NULL) { |
+ if (inst == nullptr) { |
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
} |
if (inst->maxFramerate < 1) { |
@@ -373,7 +373,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst, |
picture_id_[i] = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT |
last_key_frame_picture_id_[i] = -1; |
// allocate memory for encoded image |
- if (encoded_images_[i]._buffer != NULL) { |
+ if (encoded_images_[i]._buffer != nullptr) { |
delete[] encoded_images_[i]._buffer; |
} |
encoded_images_[i]._size = |
@@ -476,11 +476,11 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst, |
configurations_[0].g_threads = NumberOfThreads( |
configurations_[0].g_w, configurations_[0].g_h, number_of_cores); |
- // Creating a wrapper to the image - setting image data to NULL. |
+ // Creating a wrapper to the image - setting image data to null. |
// Actual pointer will be set in encode. Setting align to 1, as it |
// is meaningless (no memory allocation is done here). |
vpx_img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, inst->height, 1, |
- NULL); |
+ nullptr); |
// Note the order we use is different from webm, we have lowest resolution |
// at position 0 and they have highest resolution at position 0. |
@@ -660,7 +660,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
if (!inited_) |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
- if (encoded_complete_callback_ == NULL) |
+ if (encoded_complete_callback_ == nullptr) |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer(); |
@@ -863,7 +863,7 @@ void VP8EncoderImpl::PopulateCodecSpecific( |
int stream_idx, |
uint32_t timestamp, |
bool only_predicting_from_key_frame) { |
- assert(codec_specific != NULL); |
+ assert(codec_specific != nullptr); |
codec_specific->codecType = kVideoCodecVP8; |
codec_specific->codec_name = ImplementationName(); |
CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); |
@@ -892,7 +892,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image, |
int result = WEBRTC_VIDEO_CODEC_OK; |
for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); |
++encoder_idx, --stream_idx) { |
- vpx_codec_iter_t iter = NULL; |
+ vpx_codec_iter_t iter = nullptr; |
int part_idx = 0; |
encoded_images_[encoder_idx]._length = 0; |
encoded_images_[encoder_idx]._frameType = kVideoFrameDelta; |
@@ -901,9 +901,9 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image, |
frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) + |
1); |
CodecSpecificInfo codec_specific; |
- const vpx_codec_cx_pkt_t* pkt = NULL; |
+ const vpx_codec_cx_pkt_t* pkt = nullptr; |
while ((pkt = vpx_codec_get_cx_data(&encoders_[encoder_idx], &iter)) != |
- NULL) { |
+ nullptr) { |
switch (pkt->kind) { |
case VPX_CODEC_CX_FRAME_PKT: { |
size_t length = encoded_images_[encoder_idx]._length; |
@@ -997,12 +997,12 @@ int VP8EncoderImpl::RegisterEncodeCompleteCallback( |
VP8DecoderImpl::VP8DecoderImpl() |
: buffer_pool_(false, 300 /* max_number_of_buffers*/), |
- decode_complete_callback_(NULL), |
+ decode_complete_callback_(nullptr), |
inited_(false), |
feedback_mode_(false), |
- decoder_(NULL), |
+ decoder_(nullptr), |
image_format_(VPX_IMG_FMT_NONE), |
- ref_frame_(NULL), |
+ ref_frame_(nullptr), |
propagation_cnt_(-1), |
last_frame_width_(0), |
last_frame_height_(0), |
@@ -1018,7 +1018,7 @@ int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { |
if (ret_val < 0) { |
return ret_val; |
} |
- if (decoder_ == NULL) { |
+ if (decoder_ == nullptr) { |
decoder_ = new vpx_codec_ctx_t; |
memset(decoder_, 0, sizeof(*decoder_)); |
} |
@@ -1062,10 +1062,10 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image, |
if (!inited_) { |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
} |
- if (decode_complete_callback_ == NULL) { |
+ if (decode_complete_callback_ == nullptr) { |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
} |
- if (input_image._buffer == NULL && input_image._length > 0) { |
+ if (input_image._buffer == nullptr && input_image._length > 0) { |
// Reset to avoid requesting key frames too often. |
if (propagation_cnt_ > 0) |
propagation_cnt_ = 0; |
@@ -1114,26 +1114,26 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image, |
} |
} |
- vpx_codec_iter_t iter = NULL; |
+ vpx_codec_iter_t iter = nullptr; |
vpx_image_t* img; |
int ret; |
// Check for missing frames. |
if (missing_frames) { |
// Call decoder with zero data length to signal missing frames. |
- if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) { |
+ if (vpx_codec_decode(decoder_, nullptr, 0, 0, VPX_DL_REALTIME)) { |
// Reset to avoid requesting key frames too often. |
if (propagation_cnt_ > 0) |
propagation_cnt_ = 0; |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
img = vpx_codec_get_frame(decoder_, &iter); |
- iter = NULL; |
+ iter = nullptr; |
} |
uint8_t* buffer = input_image._buffer; |
if (input_image._length == 0) { |
- buffer = NULL; // Triggers full frame concealment. |
+ buffer = nullptr; // Triggers full frame concealment. |
} |
if (vpx_codec_decode(decoder_, buffer, input_image._length, 0, |
VPX_DL_REALTIME)) { |
@@ -1206,8 +1206,8 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image, |
int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, |
uint32_t timestamp, |
int64_t ntp_time_ms) { |
- if (img == NULL) { |
- // Decoder OK and NULL image => No show frame |
+ if (img == nullptr) { |
+ // Decoder OK and null image => No show frame |
return WEBRTC_VIDEO_CODEC_NO_OUTPUT; |
} |
last_frame_width_ = img->d_w; |
@@ -1248,17 +1248,17 @@ int VP8DecoderImpl::RegisterDecodeCompleteCallback( |
} |
int VP8DecoderImpl::Release() { |
- if (decoder_ != NULL) { |
+ if (decoder_ != nullptr) { |
if (vpx_codec_destroy(decoder_)) { |
return WEBRTC_VIDEO_CODEC_MEMORY; |
} |
delete decoder_; |
- decoder_ = NULL; |
+ decoder_ = nullptr; |
} |
- if (ref_frame_ != NULL) { |
+ if (ref_frame_ != nullptr) { |
vpx_img_free(&ref_frame_->img); |
delete ref_frame_; |
- ref_frame_ = NULL; |
+ ref_frame_ = nullptr; |
} |
buffer_pool_.Release(); |
inited_ = false; |