| Index: webrtc/api/java/jni/androidvideocapturer_jni.cc | 
| diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc | 
| index 0abc9c0d51b9c987df660924b8b9a2a6f1006288..8fd145a1b031c14c524c75def3e5828619594453 100644 | 
| --- a/webrtc/api/java/jni/androidvideocapturer_jni.cc | 
| +++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc | 
| @@ -179,10 +179,14 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, | 
| int crop_height; | 
| int crop_x; | 
| int crop_y; | 
| +  int64_t translated_camera_time_us; | 
|  | 
| -  if (!capturer_->AdaptFrame(width, height, timestamp_ns, | 
| +  if (!capturer_->AdaptFrame(width, height, | 
| +                             timestamp_ns / rtc::kNumNanosecsPerMicrosec, | 
| +                             rtc::TimeMicros(), | 
| &adapted_width, &adapted_height, | 
| -                             &crop_width, &crop_height, &crop_x, &crop_y)) { | 
| +                             &crop_width, &crop_height, &crop_x, &crop_y, | 
| +                             &translated_camera_time_us)) { | 
| return; | 
| } | 
|  | 
| @@ -235,12 +239,12 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, | 
| } | 
| buffer = scaled; | 
| } | 
| -  // TODO(nisse): Use microsecond time instead. | 
| capturer_->OnFrame(cricket::WebRtcVideoFrame( | 
| -                         buffer, timestamp_ns, | 
| +                         buffer, | 
| capturer_->apply_rotation() | 
| ? webrtc::kVideoRotation_0 | 
| -                             : static_cast<webrtc::VideoRotation>(rotation)), | 
| +                             : static_cast<webrtc::VideoRotation>(rotation), | 
| +                         translated_camera_time_us), | 
| width, height); | 
| } | 
|  | 
| @@ -259,10 +263,14 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width, | 
| int crop_height; | 
| int crop_x; | 
| int crop_y; | 
| +  int64_t translated_camera_time_us; | 
|  | 
| -  if (!capturer_->AdaptFrame(width, height, timestamp_ns, | 
| +  if (!capturer_->AdaptFrame(width, height, | 
| +                             timestamp_ns / rtc::kNumNanosecsPerMicrosec, | 
| +                             rtc::TimeMicros(), | 
| &adapted_width, &adapted_height, | 
| -                             &crop_width, &crop_height, &crop_x, &crop_y)) { | 
| +                             &crop_width, &crop_height, &crop_x, &crop_y, | 
| +                             &translated_camera_time_us)) { | 
| surface_texture_helper_->ReturnTextureFrame(); | 
| return; | 
| } | 
| @@ -282,15 +290,15 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width, | 
| matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); | 
| } | 
|  | 
| -  // TODO(nisse): Use microsecond time instead. | 
| capturer_->OnFrame( | 
| cricket::WebRtcVideoFrame( | 
| surface_texture_helper_->CreateTextureFrame( | 
| adapted_width, adapted_height, | 
| NativeHandleImpl(handle.oes_texture_id, matrix)), | 
| -          timestamp_ns, capturer_->apply_rotation() | 
| -                            ? webrtc::kVideoRotation_0 | 
| -                            : static_cast<webrtc::VideoRotation>(rotation)), | 
| +          capturer_->apply_rotation() | 
| +              ? webrtc::kVideoRotation_0 | 
| +              : static_cast<webrtc::VideoRotation>(rotation), | 
| +          translated_camera_time_us), | 
| width, height); | 
| } | 
|  | 
|  |