webrtc_video_encoder_gpu.cc 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454
  1. // Copyright 2017 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "remoting/codec/webrtc_video_encoder_gpu.h"
  5. #include <limits>
  6. #include <memory>
  7. #include <utility>
  8. #include "base/bind.h"
  9. #include "base/callback_helpers.h"
  10. #include "base/containers/flat_map.h"
  11. #include "base/logging.h"
  12. #include "base/memory/ptr_util.h"
  13. #include "base/memory/shared_memory_mapping.h"
  14. #include "base/memory/unsafe_shared_memory_region.h"
  15. #include "base/numerics/checked_math.h"
  16. #include "base/task/bind_post_task.h"
  17. #include "base/task/task_traits.h"
  18. #include "base/task/thread_pool.h"
  19. #include "base/threading/sequenced_task_runner_handle.h"
  20. #include "base/threading/thread_checker.h"
  21. #include "base/time/time.h"
  22. #include "base/trace_event/trace_event.h"
  23. #include "build/build_config.h"
  24. #include "gpu/config/gpu_driver_bug_workarounds.h"
  25. #include "gpu/config/gpu_preferences.h"
  26. #include "media/base/bitstream_buffer.h"
  27. #include "media/base/media_log.h"
  28. #include "media/base/video_frame.h"
  29. #include "media/gpu/gpu_video_encode_accelerator_factory.h"
  30. #include "media/video/video_encode_accelerator.h"
  31. #include "remoting/base/constants.h"
  32. #include "remoting/codec/encoder_bitrate_filter.h"
  33. #include "third_party/libyuv/include/libyuv/convert_from_argb.h"
  34. #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
  35. #include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
  36. #if BUILDFLAG(IS_WIN)
  37. #include "base/win/scoped_com_initializer.h"
  38. #include "media/gpu/windows/media_foundation_video_encode_accelerator_win.h"
  39. #endif
  40. namespace {
  41. using media::VideoCodecProfile;
  42. using media::VideoFrame;
  43. using media::VideoPixelFormat;
  44. // Currently, the WebrtcVideoEncoderWrapper only encodes a single frame at a
  45. // time. Thus, there's no reason to have this set to anything greater than one.
  46. const int kWebrtcVideoEncoderGpuOutputBufferCount = 1;
  47. constexpr VideoCodecProfile kH264Profile = VideoCodecProfile::H264PROFILE_MAIN;
  48. constexpr int kH264MinimumTargetBitrateKbpsPerMegapixel = 1800;
  49. gpu::GpuPreferences CreateGpuPreferences() {
  50. gpu::GpuPreferences gpu_preferences;
  51. #if BUILDFLAG(IS_WIN)
  52. gpu_preferences.enable_media_foundation_vea_on_windows7 = true;
  53. #endif
  54. return gpu_preferences;
  55. }
  56. gpu::GpuDriverBugWorkarounds CreateGpuWorkarounds() {
  57. gpu::GpuDriverBugWorkarounds gpu_workarounds;
  58. return gpu_workarounds;
  59. }
  60. gpu::GPUInfo::GPUDevice CreateGpuDevice() {
  61. gpu::GPUInfo::GPUDevice device;
  62. return device;
  63. }
  64. struct OutputBuffer {
  65. base::UnsafeSharedMemoryRegion region;
  66. base::WritableSharedMemoryMapping mapping;
  67. bool IsValid();
  68. };
  69. bool OutputBuffer::IsValid() {
  70. return region.IsValid() && mapping.IsValid();
  71. }
  72. } // namespace
  73. namespace remoting {
  74. // WebrtcVideoEncoderGpu::Core handles the initialization, usage, and teardown
  75. // of a VideoEncodeAccelerator object which is used to encode desktop frames for
  76. // presentation on the client.
  77. //
  78. // A brief explanation of how this class is initialized:
  79. // 1. An instance of WebrtcVideoEncoderGpu is created using the static
  80. // CreateForH264() function. At this point its |core_| member (an instance
  81. // of this class) is created with a state of UNINITIALIZED. After this
  82. // point, WebrtcVideoEncoderGpu will forward all Encode calls to its
  83. // |core_| member.
  84. // 2. On the first encode call, the incoming DesktopFrame's dimensions are
  85. // stored and the Encode params are saved in |pending_encode_|. Before
  86. // returning, BeginInitialization() is called.
  87. // 3. In BeginInitialization(), the Core instance constructs the
  88. // VideoEncodeAccelerator using the saved dimensions from the DesktopFrame.
  89. // If the VideoEncodeAccelerator is constructed successfully, the state is
  90. // set to INITIALIZING. If not, the state isset to INITIALIZATION_ERROR.
  91. // 4. Some time later, the VideoEncodeAccelerator sets itself up and is ready
  92. // to encode. At this point, it calls the Core instance's
  93. // RequireBitstreamBuffers() method. Once bitstream buffers are allocated,
  94. // the state is INITIALIZED.
  95. class WebrtcVideoEncoderGpu::Core
  96. : public WebrtcVideoEncoder,
  97. public media::VideoEncodeAccelerator::Client {
  98. public:
  99. explicit Core(media::VideoCodecProfile codec_profile);
  100. Core(const Core&) = delete;
  101. Core& operator=(const Core&) = delete;
  102. ~Core() override;
  103. // WebrtcVideoEncoder interface.
  104. void Encode(std::unique_ptr<webrtc::DesktopFrame> frame,
  105. const FrameParams& params,
  106. WebrtcVideoEncoder::EncodeCallback done) override;
  107. // VideoEncodeAccelerator::Client interface.
  108. void RequireBitstreamBuffers(unsigned int input_count,
  109. const gfx::Size& input_coded_size,
  110. size_t output_buffer_size) override;
  111. void BitstreamBufferReady(
  112. int32_t bitstream_buffer_id,
  113. const media::BitstreamBufferMetadata& metadata) override;
  114. void NotifyError(media::VideoEncodeAccelerator::Error error) override;
  115. private:
  116. enum State { UNINITIALIZED, INITIALIZING, INITIALIZED, INITIALIZATION_ERROR };
  117. void BeginInitialization();
  118. void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id);
  119. void RunAnyPendingEncode();
  120. #if BUILDFLAG(IS_WIN)
  121. // This object is required by Chromium to ensure proper init/uninit of COM on
  122. // this thread. The guidance is to match the lifetime of this object to the
  123. // lifetime of the thread if possible.
  124. std::unique_ptr<base::win::ScopedCOMInitializer> scoped_com_initializer_;
  125. #endif
  126. State state_ = UNINITIALIZED;
  127. // Only after the first encode request do we know how large the incoming
  128. // frames will be. Thus, we initialize after the first encode request,
  129. // postponing the encode until the encoder has been initialized.
  130. base::OnceClosure pending_encode_;
  131. std::unique_ptr<media::VideoEncodeAccelerator> video_encode_accelerator_;
  132. base::TimeDelta previous_timestamp_;
  133. media::VideoCodecProfile codec_profile_;
  134. // Shared memory with which the VEA transfers output to us.
  135. std::vector<std::unique_ptr<OutputBuffer>> output_buffers_;
  136. gfx::Size input_coded_size_;
  137. gfx::Size input_visible_size_;
  138. size_t output_buffer_size_;
  139. base::flat_map<base::TimeDelta, WebrtcVideoEncoder::EncodeCallback>
  140. callbacks_;
  141. EncoderBitrateFilter bitrate_filter_{
  142. kH264MinimumTargetBitrateKbpsPerMegapixel};
  143. THREAD_CHECKER(thread_checker_);
  144. };
  145. WebrtcVideoEncoderGpu::WebrtcVideoEncoderGpu(VideoCodecProfile codec_profile)
  146. : core_(std::make_unique<WebrtcVideoEncoderGpu::Core>(codec_profile)),
  147. hw_encode_task_runner_(base::ThreadPool::CreateSingleThreadTaskRunner(
  148. {base::MayBlock(), base::WithBaseSyncPrimitives(),
  149. base::TaskPriority::HIGHEST},
  150. base::SingleThreadTaskRunnerThreadMode::DEDICATED)) {}
  151. WebrtcVideoEncoderGpu::~WebrtcVideoEncoderGpu() {
  152. hw_encode_task_runner_->DeleteSoon(FROM_HERE, core_.release());
  153. }
  154. void WebrtcVideoEncoderGpu::Encode(std::unique_ptr<webrtc::DesktopFrame> frame,
  155. const FrameParams& params,
  156. WebrtcVideoEncoder::EncodeCallback done) {
  157. DCHECK(core_);
  158. DCHECK(frame);
  159. DCHECK(done);
  160. DCHECK_GT(params.duration, base::Milliseconds(0));
  161. hw_encode_task_runner_->PostTask(
  162. FROM_HERE,
  163. base::BindOnce(&WebrtcVideoEncoderGpu::Core::Encode,
  164. base::Unretained(core_.get()), std::move(frame), params,
  165. base::BindPostTask(base::SequencedTaskRunnerHandle::Get(),
  166. std::move(done))));
  167. }
  168. WebrtcVideoEncoderGpu::Core::Core(media::VideoCodecProfile codec_profile)
  169. : codec_profile_(codec_profile) {
  170. DETACH_FROM_THREAD(thread_checker_);
  171. }
  172. WebrtcVideoEncoderGpu::Core::~Core() {
  173. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  174. }
  175. void WebrtcVideoEncoderGpu::Core::Encode(
  176. std::unique_ptr<webrtc::DesktopFrame> frame,
  177. const FrameParams& params,
  178. WebrtcVideoEncoder::EncodeCallback done) {
  179. TRACE_EVENT0("media", "WebrtcVideoEncoderGpu::Core::Encode");
  180. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  181. bitrate_filter_.SetFrameSize(frame->size().width(), frame->size().height());
  182. if (state_ == INITIALIZATION_ERROR) {
  183. // TODO(zijiehe): The screen resolution limitation of H264 encoder is much
  184. // smaller (3840x2176) than VP8 (16k x 16k) or VP9 (65k x 65k). It's more
  185. // likely the initialization may fail by using H264 encoder. We should
  186. // provide a way to tell the WebrtcVideoStream to stop the video stream.
  187. DLOG(ERROR) << "Encoder failed to initialize; dropping encode request";
  188. // Initialization fails only when the input frame size exceeds the
  189. // limitation.
  190. std::move(done).Run(EncodeResult::FRAME_SIZE_EXCEEDS_CAPABILITY, nullptr);
  191. return;
  192. }
  193. if (state_ == UNINITIALIZED ||
  194. input_visible_size_.width() != frame->size().width() ||
  195. input_visible_size_.height() != frame->size().height()) {
  196. input_visible_size_ =
  197. gfx::Size(frame->size().width(), frame->size().height());
  198. pending_encode_ = base::BindOnce(&WebrtcVideoEncoderGpu::Core::Encode,
  199. base::Unretained(this), std::move(frame),
  200. params, std::move(done));
  201. BeginInitialization();
  202. return;
  203. }
  204. // If we get to this point and state_ != INITIALIZED, we may be attempting to
  205. // have multiple outstanding encode requests, which is not currently
  206. // supported. The current assumption is that the WebrtcVideoEncoderWrapper
  207. // will wait for an Encode to finish before attempting another.
  208. DCHECK_EQ(state_, INITIALIZED);
  209. scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateFrame(
  210. VideoPixelFormat::PIXEL_FORMAT_NV12, input_coded_size_,
  211. gfx::Rect(input_visible_size_), input_visible_size_, base::TimeDelta());
  212. base::TimeDelta new_timestamp = previous_timestamp_ + params.duration;
  213. video_frame->set_timestamp(new_timestamp);
  214. previous_timestamp_ = new_timestamp;
  215. // H264 encoder on Windows uses NV12 so convert here.
  216. libyuv::ARGBToNV12(frame->data(), frame->stride(),
  217. video_frame->data(VideoFrame::kYPlane),
  218. video_frame->stride(VideoFrame::kYPlane),
  219. video_frame->data(VideoFrame::kUVPlane),
  220. video_frame->stride(VideoFrame::kUVPlane),
  221. video_frame->visible_rect().width(),
  222. video_frame->visible_rect().height());
  223. callbacks_[video_frame->timestamp()] = std::move(done);
  224. if (params.bitrate_kbps > 0 && params.fps > 0) {
  225. // TODO(zijiehe): Forward frame_rate from FrameParams.
  226. bitrate_filter_.SetBandwidthEstimateKbps(params.bitrate_kbps);
  227. base::CheckedNumeric<uint32_t> checked_bitrate = base::CheckMul<uint32_t>(
  228. std::max(bitrate_filter_.GetTargetBitrateKbps(), 0), 1000);
  229. uint32_t bitrate_bps =
  230. checked_bitrate.ValueOrDefault(std::numeric_limits<uint32_t>::max());
  231. video_encode_accelerator_->RequestEncodingParametersChange(
  232. media::Bitrate::ConstantBitrate(bitrate_bps), params.fps);
  233. }
  234. video_encode_accelerator_->Encode(video_frame, params.key_frame);
  235. }
  236. void WebrtcVideoEncoderGpu::Core::RequireBitstreamBuffers(
  237. unsigned int input_count,
  238. const gfx::Size& input_coded_size,
  239. size_t output_buffer_size) {
  240. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  241. DCHECK(state_ == INITIALIZING);
  242. input_coded_size_ = input_coded_size;
  243. output_buffer_size_ = output_buffer_size;
  244. output_buffers_.clear();
  245. for (unsigned int i = 0; i < kWebrtcVideoEncoderGpuOutputBufferCount; i++) {
  246. auto output_buffer = std::make_unique<OutputBuffer>();
  247. output_buffer->region =
  248. base::UnsafeSharedMemoryRegion::Create(output_buffer_size_);
  249. output_buffer->mapping = output_buffer->region.Map();
  250. // TODO(gusss): Do we need to handle mapping failure more gracefully?
  251. CHECK(output_buffer->IsValid());
  252. output_buffers_.push_back(std::move(output_buffer));
  253. }
  254. for (size_t i = 0; i < output_buffers_.size(); i++) {
  255. UseOutputBitstreamBufferId(i);
  256. }
  257. state_ = INITIALIZED;
  258. RunAnyPendingEncode();
  259. }
  260. void WebrtcVideoEncoderGpu::Core::BitstreamBufferReady(
  261. int32_t bitstream_buffer_id,
  262. const media::BitstreamBufferMetadata& metadata) {
  263. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  264. auto encoded_frame = std::make_unique<EncodedFrame>();
  265. OutputBuffer* output_buffer = output_buffers_[bitstream_buffer_id].get();
  266. DCHECK(output_buffer->IsValid());
  267. base::span<uint8_t> data_span =
  268. output_buffer->mapping.GetMemoryAsSpan<uint8_t>(
  269. metadata.payload_size_bytes);
  270. encoded_frame->data =
  271. webrtc::EncodedImageBuffer::Create(data_span.data(), data_span.size());
  272. encoded_frame->key_frame = metadata.key_frame;
  273. encoded_frame->dimensions = {input_coded_size_.width(),
  274. input_coded_size_.height()};
  275. encoded_frame->quantizer = 0;
  276. encoded_frame->codec = webrtc::kVideoCodecH264;
  277. UseOutputBitstreamBufferId(bitstream_buffer_id);
  278. auto callback_it = callbacks_.find(metadata.timestamp);
  279. DCHECK(callback_it != callbacks_.end())
  280. << "Callback not found for timestamp " << metadata.timestamp;
  281. std::move(std::get<1>(*callback_it)).Run(
  282. EncodeResult::SUCCEEDED, std::move(encoded_frame));
  283. callbacks_.erase(metadata.timestamp);
  284. }
  285. void WebrtcVideoEncoderGpu::Core::NotifyError(
  286. media::VideoEncodeAccelerator::Error error) {
  287. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  288. LOG(ERROR) << __func__ << " error: " << error;
  289. }
  290. void WebrtcVideoEncoderGpu::Core::BeginInitialization() {
  291. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  292. #if BUILDFLAG(IS_WIN)
  293. if (!scoped_com_initializer_) {
  294. scoped_com_initializer_ =
  295. std::make_unique<base::win::ScopedCOMInitializer>();
  296. }
  297. #endif
  298. VideoPixelFormat input_format = VideoPixelFormat::PIXEL_FORMAT_NV12;
  299. // TODO(zijiehe): Implement some logical way to set an initial bitrate.
  300. // Currently we set the bitrate to 8M bits / 1M bytes per frame, and 30 frames
  301. // per second.
  302. // TODO(joedow): Use the framerate from SessionOptions instead of the constant
  303. // framerate value if we decide to make H.264 generally available.
  304. media::Bitrate initial_bitrate = media::Bitrate::ConstantBitrate(
  305. static_cast<uint32_t>(kTargetFrameRate * 1024 * 1024 * 8));
  306. const media::VideoEncodeAccelerator::Config config(
  307. input_format, input_visible_size_, codec_profile_, initial_bitrate);
  308. video_encode_accelerator_ =
  309. media::GpuVideoEncodeAcceleratorFactory::CreateVEA(
  310. config, this, CreateGpuPreferences(), CreateGpuWorkarounds(),
  311. CreateGpuDevice());
  312. if (!video_encode_accelerator_) {
  313. LOG(ERROR) << "Could not create VideoEncodeAccelerator";
  314. state_ = INITIALIZATION_ERROR;
  315. RunAnyPendingEncode();
  316. return;
  317. }
  318. state_ = INITIALIZING;
  319. }
  320. void WebrtcVideoEncoderGpu::Core::UseOutputBitstreamBufferId(
  321. int32_t bitstream_buffer_id) {
  322. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  323. video_encode_accelerator_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
  324. bitstream_buffer_id,
  325. output_buffers_[bitstream_buffer_id]->region.Duplicate(),
  326. output_buffers_[bitstream_buffer_id]->region.GetSize()));
  327. }
  328. void WebrtcVideoEncoderGpu::Core::RunAnyPendingEncode() {
  329. DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
  330. if (pending_encode_) {
  331. std::move(pending_encode_).Run();
  332. }
  333. }
  334. // static
  335. std::unique_ptr<WebrtcVideoEncoder> WebrtcVideoEncoderGpu::CreateForH264() {
  336. LOG(WARNING) << "H264 video encoder is created.";
  337. // HIGH profile requires Windows 8 or upper. Considering encoding latency,
  338. // frame size and image quality, MAIN should be fine for us.
  339. return base::WrapUnique(new WebrtcVideoEncoderGpu(kH264Profile));
  340. }
  341. // static
  342. bool WebrtcVideoEncoderGpu::IsSupportedByH264(const Profile& profile) {
  343. #if BUILDFLAG(IS_WIN)
  344. // This object is required by Chromium to ensure proper init/uninit of COM on
  345. // this thread. The guidance is to match the lifetime of this object to the
  346. // lifetime of the thread if possible. Since we are still experimenting with
  347. // H.264 and run the encoder on a different thread, we use a locally scoped
  348. // object for now.
  349. base::win::ScopedCOMInitializer scoped_com_initializer;
  350. // Ensure the required MF DLLs are loaded before we call into the VEA below.
  351. media::MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization();
  352. #endif
  353. media::VideoEncodeAccelerator::SupportedProfiles profiles =
  354. media::GpuVideoEncodeAcceleratorFactory::GetSupportedProfiles(
  355. CreateGpuPreferences(), CreateGpuWorkarounds(), CreateGpuDevice());
  356. for (const auto& supported_profile : profiles) {
  357. if (supported_profile.profile != kH264Profile) {
  358. continue;
  359. }
  360. double supported_framerate = supported_profile.max_framerate_numerator;
  361. supported_framerate /= supported_profile.max_framerate_denominator;
  362. if (profile.frame_rate > supported_framerate) {
  363. continue;
  364. }
  365. if (profile.resolution.GetArea() >
  366. supported_profile.max_resolution.GetArea()) {
  367. continue;
  368. }
  369. return true;
  370. }
  371. return false;
  372. }
  373. } // namespace remoting