OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "webrtc/modules/video_render/android/video_render_android_impl.h" |
| 12 |
| 13 #include "webrtc/modules/video_render/video_render_internal.h" |
| 14 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" |
| 15 #include "webrtc/system_wrappers/include/event_wrapper.h" |
| 16 #include "webrtc/system_wrappers/include/tick_util.h" |
| 17 |
| 18 #ifdef ANDROID |
| 19 #include <android/log.h> |
| 20 #include <stdio.h> |
| 21 |
| 22 #undef WEBRTC_TRACE |
| 23 #define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC
N*", __VA_ARGS__) |
| 24 #else |
| 25 #include "webrtc/system_wrappers/include/trace.h" |
| 26 #endif |
| 27 |
| 28 namespace webrtc { |
| 29 |
| 30 JavaVM* VideoRenderAndroid::g_jvm = NULL; |
| 31 |
| 32 int32_t SetRenderAndroidVM(JavaVM* javaVM) { |
| 33 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__); |
| 34 VideoRenderAndroid::g_jvm = javaVM; |
| 35 return 0; |
| 36 } |
| 37 |
| 38 VideoRenderAndroid::VideoRenderAndroid( |
| 39 const int32_t id, |
| 40 const VideoRenderType videoRenderType, |
| 41 void* window, |
| 42 const bool /*fullscreen*/): |
| 43 _id(id), |
| 44 _critSect(*CriticalSectionWrapper::CreateCriticalSection()), |
| 45 _renderType(videoRenderType), |
| 46 _ptrWindow((jobject)(window)), |
| 47 _javaShutDownFlag(false), |
| 48 _javaShutdownEvent(*EventWrapper::Create()), |
| 49 _javaRenderEvent(*EventWrapper::Create()), |
| 50 _lastJavaRenderEvent(0), |
| 51 _javaRenderJniEnv(NULL) { |
| 52 } |
| 53 |
| 54 VideoRenderAndroid::~VideoRenderAndroid() { |
| 55 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, |
| 56 "VideoRenderAndroid dtor"); |
| 57 |
| 58 if (_javaRenderThread) |
| 59 StopRender(); |
| 60 |
| 61 for (AndroidStreamMap::iterator it = _streamsMap.begin(); |
| 62 it != _streamsMap.end(); |
| 63 ++it) { |
| 64 delete it->second; |
| 65 } |
| 66 delete &_javaShutdownEvent; |
| 67 delete &_javaRenderEvent; |
| 68 delete &_critSect; |
| 69 } |
| 70 |
| 71 int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) { |
| 72 return -1; |
| 73 } |
| 74 |
| 75 VideoRenderCallback* |
| 76 VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId, |
| 77 const uint32_t zOrder, |
| 78 const float left, const float top, |
| 79 const float right, |
| 80 const float bottom) { |
| 81 CriticalSectionScoped cs(&_critSect); |
| 82 |
| 83 AndroidStream* renderStream = NULL; |
| 84 AndroidStreamMap::iterator item = _streamsMap.find(streamId); |
| 85 if (item != _streamsMap.end() && item->second != NULL) { |
| 86 WEBRTC_TRACE(kTraceInfo, |
| 87 kTraceVideoRenderer, |
| 88 -1, |
| 89 "%s: Render stream already exists", |
| 90 __FUNCTION__); |
| 91 return renderStream; |
| 92 } |
| 93 |
| 94 renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top, |
| 95 right, bottom, *this); |
| 96 if (renderStream) { |
| 97 _streamsMap[streamId] = renderStream; |
| 98 } |
| 99 else { |
| 100 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 101 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); |
| 102 return NULL; |
| 103 } |
| 104 return renderStream; |
| 105 } |
| 106 |
| 107 int32_t VideoRenderAndroid::DeleteIncomingRenderStream( |
| 108 const uint32_t streamId) { |
| 109 CriticalSectionScoped cs(&_critSect); |
| 110 |
| 111 AndroidStreamMap::iterator item = _streamsMap.find(streamId); |
| 112 if (item == _streamsMap.end()) { |
| 113 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 114 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); |
| 115 return -1; |
| 116 } |
| 117 delete item->second; |
| 118 _streamsMap.erase(item); |
| 119 return 0; |
| 120 } |
| 121 |
| 122 int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties( |
| 123 const uint32_t streamId, |
| 124 uint32_t& zOrder, |
| 125 float& left, |
| 126 float& top, |
| 127 float& right, |
| 128 float& bottom) const { |
| 129 return -1; |
| 130 } |
| 131 |
| 132 int32_t VideoRenderAndroid::StartRender() { |
| 133 CriticalSectionScoped cs(&_critSect); |
| 134 |
| 135 if (_javaRenderThread) { |
| 136 // StartRender is called when this stream should start render. |
| 137 // However StopRender is not called when the streams stop rendering. |
| 138 // Thus the the thread is only deleted when the renderer is removed. |
| 139 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, |
| 140 "%s, Render thread already exist", __FUNCTION__); |
| 141 return 0; |
| 142 } |
| 143 |
| 144 _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this, |
| 145 "AndroidRenderThread")); |
| 146 |
| 147 _javaRenderThread->Start(); |
| 148 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started", |
| 149 __FUNCTION__); |
| 150 _javaRenderThread->SetPriority(rtc::kRealtimePriority); |
| 151 return 0; |
| 152 } |
| 153 |
| 154 int32_t VideoRenderAndroid::StopRender() { |
| 155 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__); |
| 156 { |
| 157 CriticalSectionScoped cs(&_critSect); |
| 158 if (!_javaRenderThread) |
| 159 { |
| 160 return -1; |
| 161 } |
| 162 _javaShutDownFlag = true; |
| 163 _javaRenderEvent.Set(); |
| 164 } |
| 165 |
| 166 _javaShutdownEvent.Wait(3000); |
| 167 CriticalSectionScoped cs(&_critSect); |
| 168 _javaRenderThread->Stop(); |
| 169 _javaRenderThread.reset(); |
| 170 |
| 171 return 0; |
| 172 } |
| 173 |
| 174 void VideoRenderAndroid::ReDraw() { |
| 175 CriticalSectionScoped cs(&_critSect); |
| 176 // Allow redraw if it was more than 20ms since last. |
| 177 if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) { |
| 178 _lastJavaRenderEvent = TickTime::MillisecondTimestamp(); |
| 179 _javaRenderEvent.Set(); |
| 180 } |
| 181 } |
| 182 |
| 183 bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) { |
| 184 return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess(); |
| 185 } |
| 186 |
| 187 bool VideoRenderAndroid::JavaRenderThreadProcess() |
| 188 { |
| 189 _javaRenderEvent.Wait(1000); |
| 190 |
| 191 CriticalSectionScoped cs(&_critSect); |
| 192 if (!_javaRenderJniEnv) { |
| 193 // try to attach the thread and get the env |
| 194 // Attach this thread to JVM |
| 195 jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL); |
| 196 |
| 197 // Get the JNI env for this thread |
| 198 if ((res < 0) || !_javaRenderJniEnv) { |
| 199 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 200 "%s: Could not attach thread to JVM (%d, %p)", |
| 201 __FUNCTION__, res, _javaRenderJniEnv); |
| 202 return false; |
| 203 } |
| 204 } |
| 205 |
| 206 for (AndroidStreamMap::iterator it = _streamsMap.begin(); |
| 207 it != _streamsMap.end(); |
| 208 ++it) { |
| 209 it->second->DeliverFrame(_javaRenderJniEnv); |
| 210 } |
| 211 |
| 212 if (_javaShutDownFlag) { |
| 213 if (g_jvm->DetachCurrentThread() < 0) |
| 214 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, |
| 215 "%s: Could not detach thread from JVM", __FUNCTION__); |
| 216 else { |
| 217 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, |
| 218 "%s: Java thread detached", __FUNCTION__); |
| 219 } |
| 220 _javaRenderJniEnv = NULL; |
| 221 _javaShutDownFlag = false; |
| 222 _javaShutdownEvent.Set(); |
| 223 return false; // Do not run this thread again. |
| 224 } |
| 225 return true; |
| 226 } |
| 227 |
| 228 VideoRenderType VideoRenderAndroid::RenderType() { |
| 229 return _renderType; |
| 230 } |
| 231 |
| 232 RawVideoType VideoRenderAndroid::PerferedVideoType() { |
| 233 return kVideoI420; |
| 234 } |
| 235 |
| 236 bool VideoRenderAndroid::FullScreen() { |
| 237 return false; |
| 238 } |
| 239 |
| 240 int32_t VideoRenderAndroid::GetGraphicsMemory( |
| 241 uint64_t& /*totalGraphicsMemory*/, |
| 242 uint64_t& /*availableGraphicsMemory*/) const { |
| 243 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 244 "%s - not supported on Android", __FUNCTION__); |
| 245 return -1; |
| 246 } |
| 247 |
| 248 int32_t VideoRenderAndroid::GetScreenResolution( |
| 249 uint32_t& /*screenWidth*/, |
| 250 uint32_t& /*screenHeight*/) const { |
| 251 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 252 "%s - not supported on Android", __FUNCTION__); |
| 253 return -1; |
| 254 } |
| 255 |
| 256 uint32_t VideoRenderAndroid::RenderFrameRate( |
| 257 const uint32_t /*streamId*/) { |
| 258 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 259 "%s - not supported on Android", __FUNCTION__); |
| 260 return -1; |
| 261 } |
| 262 |
| 263 int32_t VideoRenderAndroid::SetStreamCropping( |
| 264 const uint32_t /*streamId*/, |
| 265 const float /*left*/, |
| 266 const float /*top*/, |
| 267 const float /*right*/, |
| 268 const float /*bottom*/) { |
| 269 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 270 "%s - not supported on Android", __FUNCTION__); |
| 271 return -1; |
| 272 } |
| 273 |
| 274 int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) { |
| 275 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 276 "%s - not supported on Android", __FUNCTION__); |
| 277 return -1; |
| 278 } |
| 279 |
| 280 int32_t VideoRenderAndroid::ConfigureRenderer( |
| 281 const uint32_t streamId, |
| 282 const unsigned int zOrder, |
| 283 const float left, |
| 284 const float top, |
| 285 const float right, |
| 286 const float bottom) { |
| 287 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 288 "%s - not supported on Android", __FUNCTION__); |
| 289 return -1; |
| 290 } |
| 291 |
| 292 int32_t VideoRenderAndroid::SetText( |
| 293 const uint8_t textId, |
| 294 const uint8_t* text, |
| 295 const int32_t textLength, |
| 296 const uint32_t textColorRef, |
| 297 const uint32_t backgroundColorRef, |
| 298 const float left, const float top, |
| 299 const float rigth, const float bottom) { |
| 300 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 301 "%s - not supported on Android", __FUNCTION__); |
| 302 return -1; |
| 303 } |
| 304 |
| 305 int32_t VideoRenderAndroid::SetBitmap(const void* bitMap, |
| 306 const uint8_t pictureId, |
| 307 const void* colorKey, |
| 308 const float left, const float top, |
| 309 const float right, |
| 310 const float bottom) { |
| 311 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, |
| 312 "%s - not supported on Android", __FUNCTION__); |
| 313 return -1; |
| 314 } |
| 315 |
| 316 } // namespace webrtc |
OLD | NEW |