video_encode_accelerator_adapter.cc 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825
  1. // Copyright 2020 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "media/video/video_encode_accelerator_adapter.h"
  5. #include <limits>
  6. #include <vector>
  7. #include "base/callback_helpers.h"
  8. #include "base/logging.h"
  9. #include "base/memory/ref_counted.h"
  10. #include "base/numerics/checked_math.h"
  11. #include "base/numerics/safe_conversions.h"
  12. #include "base/synchronization/waitable_event.h"
  13. #include "base/task/bind_post_task.h"
  14. #include "base/task/sequenced_task_runner.h"
  15. #include "base/time/time.h"
  16. #include "base/trace_event/trace_event.h"
  17. #include "build/build_config.h"
  18. #include "media/base/bind_to_current_loop.h"
  19. #include "media/base/bitstream_buffer.h"
  20. #include "media/base/media_log.h"
  21. #include "media/base/svc_scalability_mode.h"
  22. #include "media/base/video_frame.h"
  23. #include "media/base/video_util.h"
  24. #if BUILDFLAG(USE_PROPRIETARY_CODECS)
  25. #include "media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h"
  26. #endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
  27. #include "media/video/gpu_video_accelerator_factories.h"
  28. namespace media {
  29. namespace {
  30. // HW encoders expect a nonzero bitrate, so |kVEADefaultBitratePerPixel| is used
  31. // to estimate bits per second for ~30 fps with ~1/16 compression rate.
  32. constexpr int kVEADefaultBitratePerPixel = 2;
  33. uint32_t ComputeCheckedDefaultBitrate(const gfx::Size& frame_size) {
  34. base::CheckedNumeric<uint32_t> checked_bitrate_product =
  35. base::CheckMul<uint32_t>(frame_size.width(), frame_size.height(),
  36. kVEADefaultBitratePerPixel);
  37. // If the product has overflowed, clamp it to uint32_t max
  38. return checked_bitrate_product.ValueOrDefault(
  39. std::numeric_limits<uint32_t>::max());
  40. }
  41. uint32_t ComputeCheckedPeakBitrate(uint32_t target_bitrate) {
  42. // TODO(crbug.com/1342850): Reconsider whether this is good peak bps.
  43. base::CheckedNumeric<uint32_t> checked_bitrate_product =
  44. base::CheckMul<uint32_t>(target_bitrate, 10u);
  45. return checked_bitrate_product.ValueOrDefault(
  46. std::numeric_limits<uint32_t>::max());
  47. }
  48. Bitrate CreateBitrate(
  49. const absl::optional<Bitrate>& requested_bitrate,
  50. const gfx::Size& frame_size,
  51. VideoEncodeAccelerator::SupportedRateControlMode supported_rc_modes) {
  52. uint32_t default_bitrate = ComputeCheckedDefaultBitrate(frame_size);
  53. if (supported_rc_modes & VideoEncodeAccelerator::kVariableMode) {
  54. // VEA supports VBR. Use |requested_bitrate| or VBR if bitrate is not
  55. // specified.
  56. return requested_bitrate.value_or(Bitrate::VariableBitrate(
  57. default_bitrate, ComputeCheckedPeakBitrate(default_bitrate)));
  58. }
  59. // VEA doesn't support VBR. The bitrate configured to VEA must be CBR. In
  60. // other words, if |requested_bitrate| is CBR, bitrate mode fallbacks to VBR.
  61. if (requested_bitrate &&
  62. requested_bitrate->mode() == Bitrate::Mode::kConstant) {
  63. return *requested_bitrate;
  64. }
  65. return Bitrate::ConstantBitrate(
  66. requested_bitrate ? requested_bitrate->target_bps() : default_bitrate);
  67. }
  68. VideoEncodeAccelerator::Config SetUpVeaConfig(
  69. VideoCodecProfile profile,
  70. const VideoEncoder::Options& opts,
  71. VideoPixelFormat format,
  72. VideoFrame::StorageType storage_type,
  73. VideoEncodeAccelerator::SupportedRateControlMode supported_rc_modes) {
  74. absl::optional<uint32_t> initial_framerate;
  75. if (opts.framerate.has_value())
  76. initial_framerate = static_cast<uint32_t>(opts.framerate.value());
  77. Bitrate bitrate =
  78. CreateBitrate(opts.bitrate, opts.frame_size, supported_rc_modes);
  79. auto config =
  80. VideoEncodeAccelerator::Config(format, opts.frame_size, profile, bitrate,
  81. initial_framerate, opts.keyframe_interval);
  82. size_t num_temporal_layers = 1;
  83. if (opts.scalability_mode) {
  84. switch (opts.scalability_mode.value()) {
  85. case SVCScalabilityMode::kL1T2:
  86. num_temporal_layers = 2;
  87. break;
  88. case SVCScalabilityMode::kL1T3:
  89. num_temporal_layers = 3;
  90. break;
  91. default:
  92. NOTREACHED() << "Unsupported SVC: "
  93. << GetScalabilityModeName(opts.scalability_mode.value());
  94. }
  95. }
  96. if (num_temporal_layers > 1) {
  97. VideoEncodeAccelerator::Config::SpatialLayer layer;
  98. layer.width = opts.frame_size.width();
  99. layer.height = opts.frame_size.height();
  100. layer.bitrate_bps = config.bitrate.target_bps();
  101. if (initial_framerate.has_value())
  102. layer.framerate = initial_framerate.value();
  103. layer.num_of_temporal_layers = num_temporal_layers;
  104. config.spatial_layers.push_back(layer);
  105. }
  106. config.require_low_delay =
  107. opts.latency_mode == VideoEncoder::LatencyMode::Realtime;
  108. const bool is_rgb =
  109. format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
  110. format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
  111. // Override the provided format if incoming frames are RGB -- they'll be
  112. // converted to I420 or NV12 depending on the VEA configuration.
  113. if (is_rgb)
  114. config.input_format = PIXEL_FORMAT_I420;
  115. #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
  116. if (storage_type == VideoFrame::STORAGE_DMABUFS ||
  117. storage_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
  118. if (is_rgb)
  119. config.input_format = PIXEL_FORMAT_NV12;
  120. config.storage_type =
  121. VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
  122. }
  123. #endif
  124. return config;
  125. }
  126. } // namespace
  127. VideoEncodeAcceleratorAdapter::PendingOp::PendingOp() = default;
  128. VideoEncodeAcceleratorAdapter::PendingOp::~PendingOp() = default;
  129. VideoEncodeAcceleratorAdapter::VideoEncodeAcceleratorAdapter(
  130. GpuVideoAcceleratorFactories* gpu_factories,
  131. std::unique_ptr<MediaLog> media_log,
  132. scoped_refptr<base::SequencedTaskRunner> callback_task_runner)
  133. : output_pool_(base::MakeRefCounted<base::UnsafeSharedMemoryPool>()),
  134. input_pool_(base::MakeRefCounted<base::UnsafeSharedMemoryPool>()),
  135. gpu_factories_(gpu_factories),
  136. media_log_(std::move(media_log)),
  137. accelerator_task_runner_(gpu_factories_->GetTaskRunner()),
  138. callback_task_runner_(std::move(callback_task_runner)) {
  139. DETACH_FROM_SEQUENCE(accelerator_sequence_checker_);
  140. }
  141. VideoEncodeAcceleratorAdapter::~VideoEncodeAcceleratorAdapter() {
  142. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  143. input_pool_->Shutdown();
  144. output_pool_->Shutdown();
  145. }
  146. void VideoEncodeAcceleratorAdapter::DestroyAsync(
  147. std::unique_ptr<VideoEncodeAcceleratorAdapter> self) {
  148. DCHECK(self);
  149. auto runner = self->accelerator_task_runner_;
  150. DCHECK(runner);
  151. if (!runner->RunsTasksInCurrentSequence())
  152. runner->DeleteSoon(FROM_HERE, std::move(self));
  153. }
  154. void VideoEncodeAcceleratorAdapter::SetInputBufferPreferenceForTesting(
  155. InputBufferKind pref) {
  156. input_buffer_preference_ = pref;
  157. }
  158. void VideoEncodeAcceleratorAdapter::Initialize(VideoCodecProfile profile,
  159. const Options& options,
  160. OutputCB output_cb,
  161. EncoderStatusCB done_cb) {
  162. DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
  163. accelerator_task_runner_->PostTask(
  164. FROM_HERE,
  165. base::BindOnce(
  166. &VideoEncodeAcceleratorAdapter::InitializeOnAcceleratorThread,
  167. base::Unretained(this), profile, options,
  168. WrapCallback(std::move(output_cb)),
  169. WrapCallback(std::move(done_cb))));
  170. }
  171. void VideoEncodeAcceleratorAdapter::InitializeOnAcceleratorThread(
  172. VideoCodecProfile profile,
  173. const Options& options,
  174. OutputCB output_cb,
  175. EncoderStatusCB done_cb) {
  176. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  177. if (state_ != State::kNotInitialized) {
  178. std::move(done_cb).Run(
  179. EncoderStatus(EncoderStatus::Codes::kEncoderInitializeTwice,
  180. "Encoder has already been initialized."));
  181. return;
  182. }
  183. accelerator_ = gpu_factories_->CreateVideoEncodeAccelerator();
  184. if (!accelerator_) {
  185. std::move(done_cb).Run(
  186. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  187. "Failed to create video encode accelerator."));
  188. return;
  189. }
  190. if (options.frame_size.width() <= 0 || options.frame_size.height() <= 0) {
  191. std::move(done_cb).Run(
  192. EncoderStatus(EncoderStatus::Codes::kEncoderUnsupportedConfig,
  193. "Negative width or height values."));
  194. return;
  195. }
  196. if (!options.frame_size.GetCheckedArea().IsValid()) {
  197. std::move(done_cb).Run(
  198. EncoderStatus(EncoderStatus::Codes::kEncoderUnsupportedConfig,
  199. "Frame is too large."));
  200. return;
  201. }
  202. auto supported_profiles =
  203. gpu_factories_->GetVideoEncodeAcceleratorSupportedProfiles();
  204. if (!supported_profiles) {
  205. InitCompleted(
  206. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  207. "No profile is supported by video encode accelerator."));
  208. return;
  209. }
  210. auto supported_rc_modes =
  211. VideoEncodeAccelerator::SupportedRateControlMode::kNoMode;
  212. for (const auto& supported_profile : *supported_profiles) {
  213. if (supported_profile.profile == profile) {
  214. supported_rc_modes = supported_profile.rate_control_modes;
  215. break;
  216. }
  217. }
  218. if (supported_rc_modes ==
  219. VideoEncodeAccelerator::SupportedRateControlMode::kNoMode) {
  220. std::move(done_cb).Run(EncoderStatus(
  221. EncoderStatus::Codes::kEncoderInitializationError,
  222. "The profile is not supported by video encode accelerator."));
  223. return;
  224. }
  225. profile_ = profile;
  226. supported_rc_modes_ = supported_rc_modes;
  227. options_ = options;
  228. output_cb_ = std::move(output_cb);
  229. state_ = State::kWaitingForFirstFrame;
  230. #if BUILDFLAG(USE_PROPRIETARY_CODECS)
  231. if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX &&
  232. !options_.avc.produce_annexb) {
  233. h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
  234. }
  235. #endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
  236. std::move(done_cb).Run(EncoderStatus::Codes::kOk);
  237. // The accelerator will be initialized for real once we have the first frame.
  238. }
  239. void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
  240. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  241. DCHECK_EQ(state_, State::kWaitingForFirstFrame);
  242. DCHECK(!pending_encodes_.empty());
  243. // We use the first frame to setup the VEA config so that we can ensure that
  244. // zero copy hardware encoding from the camera can be used.
  245. const auto& first_frame = pending_encodes_.front()->frame;
  246. const auto format = first_frame->format();
  247. const bool is_rgb =
  248. format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
  249. format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
  250. const bool supported_format =
  251. format == PIXEL_FORMAT_NV12 || format == PIXEL_FORMAT_I420 || is_rgb;
  252. if (!supported_format) {
  253. InitCompleted(EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode,
  254. "Unexpected frame format.")
  255. .WithData("frame", first_frame->AsHumanReadableString()));
  256. return;
  257. }
  258. auto vea_config =
  259. SetUpVeaConfig(profile_, options_, format, first_frame->storage_type(),
  260. supported_rc_modes_);
  261. #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
  262. // Linux/ChromeOS require a special configuration to use dmabuf storage.
  263. // We need to keep sending frames the same way the first frame was sent.
  264. // Other platforms will happily mix GpuMemoryBuffer storage with regular
  265. // storage, so we don't care about mismatches on other platforms.
  266. if (input_buffer_preference_ == InputBufferKind::Any) {
  267. if (vea_config.storage_type ==
  268. VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer) {
  269. input_buffer_preference_ = InputBufferKind::GpuMemBuf;
  270. } else {
  271. input_buffer_preference_ = InputBufferKind::CpuMemBuf;
  272. }
  273. }
  274. #endif
  275. if (!accelerator_->Initialize(vea_config, this, media_log_->Clone())) {
  276. InitCompleted(
  277. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  278. "Failed to initialize video encode accelerator."));
  279. return;
  280. }
  281. state_ = State::kInitializing;
  282. format_ = vea_config.input_format;
  283. }
  284. void VideoEncodeAcceleratorAdapter::Encode(scoped_refptr<VideoFrame> frame,
  285. bool key_frame,
  286. EncoderStatusCB done_cb) {
  287. DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
  288. accelerator_task_runner_->PostTask(
  289. FROM_HERE,
  290. base::BindOnce(&VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread,
  291. base::Unretained(this), std::move(frame), key_frame,
  292. WrapCallback(std::move(done_cb))));
  293. }
  294. void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
  295. scoped_refptr<VideoFrame> frame,
  296. bool key_frame,
  297. EncoderStatusCB done_cb) {
  298. TRACE_EVENT1("media",
  299. "VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread",
  300. "timestamp", frame->timestamp());
  301. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  302. if (state_ == State::kWaitingForFirstFrame ||
  303. state_ == State::kInitializing) {
  304. auto pending_encode = std::make_unique<PendingEncode>();
  305. pending_encode->done_callback = std::move(done_cb);
  306. pending_encode->frame = std::move(frame);
  307. pending_encode->key_frame = key_frame;
  308. pending_encodes_.push_back(std::move(pending_encode));
  309. if (state_ == State::kWaitingForFirstFrame)
  310. InitializeInternalOnAcceleratorThread();
  311. return;
  312. }
  313. if (state_ != State::kReadyToEncode) {
  314. std::move(done_cb).Run(
  315. EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode,
  316. "Encoder can't encode now."));
  317. return;
  318. }
  319. const bool frame_needs_resizing =
  320. frame->visible_rect().size() != options_.frame_size;
  321. // Try using a frame with GPU buffer both are true:
  322. // 1. the frame already has GPU buffer
  323. // 2. frame doesn't need resizing or can be resized by GPU encoder.
  324. bool use_gpu_buffer = frame->HasGpuMemoryBuffer() &&
  325. (!frame_needs_resizing || gpu_resize_supported_);
  326. // Currently configured encoder's preference takes precedence overe heuristic
  327. // above.
  328. if (input_buffer_preference_ == InputBufferKind::GpuMemBuf)
  329. use_gpu_buffer = true;
  330. if (input_buffer_preference_ == InputBufferKind::CpuMemBuf)
  331. use_gpu_buffer = false;
  332. EncoderStatus::Or<scoped_refptr<VideoFrame>> result(nullptr);
  333. if (use_gpu_buffer)
  334. result = PrepareGpuFrame(input_coded_size_, frame);
  335. else
  336. result = PrepareCpuFrame(input_coded_size_, frame);
  337. if (result.has_error()) {
  338. std::move(done_cb).Run(
  339. std::move(result)
  340. .error()
  341. .WithData("frame", frame->AsHumanReadableString())
  342. .AddHere());
  343. return;
  344. }
  345. frame = std::move(result).value();
  346. if (last_frame_color_space_ != frame->ColorSpace()) {
  347. last_frame_color_space_ = frame->ColorSpace();
  348. key_frame = true;
  349. }
  350. auto active_encode = std::make_unique<PendingOp>();
  351. active_encode->done_callback = std::move(done_cb);
  352. active_encode->timestamp = frame->timestamp();
  353. active_encode->color_space = frame->ColorSpace();
  354. active_encodes_.push_back(std::move(active_encode));
  355. accelerator_->Encode(frame, key_frame);
  356. }
  357. void VideoEncodeAcceleratorAdapter::ChangeOptions(const Options& options,
  358. OutputCB output_cb,
  359. EncoderStatusCB done_cb) {
  360. DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
  361. accelerator_task_runner_->PostTask(
  362. FROM_HERE,
  363. base::BindOnce(
  364. &VideoEncodeAcceleratorAdapter::ChangeOptionsOnAcceleratorThread,
  365. base::Unretained(this), options, WrapCallback(std::move(output_cb)),
  366. WrapCallback(std::move(done_cb))));
  367. }
  368. void VideoEncodeAcceleratorAdapter::ChangeOptionsOnAcceleratorThread(
  369. const Options options,
  370. OutputCB output_cb,
  371. EncoderStatusCB done_cb) {
  372. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  373. DCHECK(active_encodes_.empty());
  374. DCHECK(pending_encodes_.empty());
  375. DCHECK_EQ(state_, State::kReadyToEncode);
  376. if (options.frame_size != options_.frame_size) {
  377. auto status =
  378. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  379. "Resolution change is not supported.");
  380. std::move(done_cb).Run(status);
  381. return;
  382. }
  383. if (options.bitrate && options_.bitrate &&
  384. options.bitrate->mode() != options_.bitrate->mode()) {
  385. std::move(done_cb).Run(
  386. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  387. "Bitrate mode change is not supported."));
  388. return;
  389. }
  390. Bitrate bitrate =
  391. CreateBitrate(options.bitrate, options.frame_size, supported_rc_modes_);
  392. uint32_t framerate = base::ClampRound<uint32_t>(
  393. options.framerate.value_or(VideoEncodeAccelerator::kDefaultFramerate));
  394. accelerator_->RequestEncodingParametersChange(bitrate, framerate);
  395. #if BUILDFLAG(USE_PROPRIETARY_CODECS)
  396. if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX) {
  397. if (options.avc.produce_annexb) {
  398. h264_converter_.reset();
  399. } else if (!h264_converter_) {
  400. h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
  401. }
  402. }
  403. #endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
  404. options_ = options;
  405. if (!output_cb.is_null())
  406. output_cb_ = std::move(output_cb);
  407. std::move(done_cb).Run(EncoderStatus::Codes::kOk);
  408. }
  409. void VideoEncodeAcceleratorAdapter::Flush(EncoderStatusCB done_cb) {
  410. DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
  411. accelerator_task_runner_->PostTask(
  412. FROM_HERE,
  413. base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushOnAcceleratorThread,
  414. base::Unretained(this), WrapCallback(std::move(done_cb))));
  415. }
  416. void VideoEncodeAcceleratorAdapter::FlushOnAcceleratorThread(
  417. EncoderStatusCB done_cb) {
  418. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  419. if (state_ == State::kWaitingForFirstFrame) {
  420. // Nothing to do since we haven't actually initialized yet.
  421. std::move(done_cb).Run(EncoderStatus::Codes::kOk);
  422. return;
  423. }
  424. if (state_ != State::kReadyToEncode && state_ != State::kInitializing) {
  425. std::move(done_cb).Run(EncoderStatus(
  426. EncoderStatus::Codes::kEncoderFailedFlush, "Encoder can't flush now"));
  427. return;
  428. }
  429. if (active_encodes_.empty() && pending_encodes_.empty()) {
  430. // No active or pending encodes, nothing to flush.
  431. std::move(done_cb).Run(EncoderStatus::Codes::kOk);
  432. return;
  433. }
  434. // When initializing the flush will be handled after pending encodes are sent.
  435. if (state_ != State::kInitializing) {
  436. DCHECK_EQ(state_, State::kReadyToEncode);
  437. state_ = State::kFlushing;
  438. }
  439. pending_flush_ = std::make_unique<PendingOp>();
  440. pending_flush_->done_callback = std::move(done_cb);
  441. // If flush is not supported FlushCompleted() will be called by
  442. // BitstreamBufferReady() when |active_encodes_| is empty.
  443. if (state_ == State::kFlushing && flush_support_.value()) {
  444. accelerator_->Flush(
  445. base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
  446. base::Unretained(this)));
  447. }
  448. }
  449. void VideoEncodeAcceleratorAdapter::RequireBitstreamBuffers(
  450. unsigned int input_count,
  451. const gfx::Size& input_coded_size,
  452. size_t output_buffer_size) {
  453. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  454. input_coded_size_ = input_coded_size;
  455. input_buffer_size_ =
  456. VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_coded_size);
  457. output_handle_holder_ = output_pool_->MaybeAllocateBuffer(output_buffer_size);
  458. if (!output_handle_holder_) {
  459. InitCompleted(EncoderStatus::Codes::kEncoderInitializationError);
  460. return;
  461. }
  462. const base::UnsafeSharedMemoryRegion& region =
  463. output_handle_holder_->GetRegion();
  464. // There is always one output buffer.
  465. accelerator_->UseOutputBitstreamBuffer(
  466. BitstreamBuffer(0, region.Duplicate(), region.GetSize()));
  467. InitCompleted(EncoderStatus::Codes::kOk);
  468. }
  469. void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
  470. int32_t buffer_id,
  471. const BitstreamBufferMetadata& metadata) {
  472. absl::optional<CodecDescription> desc;
  473. VideoEncoderOutput result;
  474. result.key_frame = metadata.key_frame;
  475. result.timestamp = metadata.timestamp;
  476. result.size = metadata.payload_size_bytes;
  477. if (metadata.h264.has_value())
  478. result.temporal_id = metadata.h264.value().temporal_idx;
  479. else if (metadata.vp9.has_value())
  480. result.temporal_id = metadata.vp9.value().temporal_idx;
  481. else if (metadata.vp8.has_value())
  482. result.temporal_id = metadata.vp8.value().temporal_idx;
  483. else if (metadata.av1.has_value())
  484. result.temporal_id = metadata.av1.value().temporal_idx;
  485. DCHECK_EQ(buffer_id, 0);
  486. // There is always one output buffer.
  487. const base::WritableSharedMemoryMapping& mapping =
  488. output_handle_holder_->GetMapping();
  489. DCHECK_LE(result.size, mapping.size());
  490. if (result.size > 0) {
  491. #if BUILDFLAG(USE_PROPRIETARY_CODECS)
  492. if (h264_converter_) {
  493. uint8_t* src = static_cast<uint8_t*>(mapping.memory());
  494. size_t dst_size = result.size;
  495. size_t actual_output_size = 0;
  496. bool config_changed = false;
  497. auto dst = std::make_unique<uint8_t[]>(dst_size);
  498. auto status = h264_converter_->ConvertChunk(
  499. base::span<uint8_t>(src, result.size),
  500. base::span<uint8_t>(dst.get(), dst_size), &config_changed,
  501. &actual_output_size);
  502. if (status.code() == MP4Status::Codes::kBufferTooSmall) {
  503. // Between AnnexB and AVCC bitstream formats, the start code length and
  504. // the nal size length can be different. See H.264 specification at
  505. // http://www.itu.int/rec/T-REC-H.264. Retry the conversion if the
  506. // output buffer size is too small.
  507. dst_size = actual_output_size;
  508. dst = std::make_unique<uint8_t[]>(dst_size);
  509. status = h264_converter_->ConvertChunk(
  510. base::span<uint8_t>(src, result.size),
  511. base::span<uint8_t>(dst.get(), dst_size), &config_changed,
  512. &actual_output_size);
  513. }
  514. if (!status.is_ok()) {
  515. LOG(ERROR) << status.message();
  516. NotifyError(VideoEncodeAccelerator::kPlatformFailureError);
  517. return;
  518. }
  519. result.size = actual_output_size;
  520. result.data = std::move(dst);
  521. if (config_changed) {
  522. const auto& config = h264_converter_->GetCurrentConfig();
  523. desc = CodecDescription();
  524. if (!config.Serialize(desc.value())) {
  525. NotifyError(VideoEncodeAccelerator::kPlatformFailureError);
  526. return;
  527. }
  528. }
  529. } else {
  530. #endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
  531. result.data = std::make_unique<uint8_t[]>(result.size);
  532. memcpy(result.data.get(), mapping.memory(), result.size);
  533. #if BUILDFLAG(USE_PROPRIETARY_CODECS)
  534. }
  535. #endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
  536. }
  537. // Give the buffer back to |accelerator_|
  538. const base::UnsafeSharedMemoryRegion& region =
  539. output_handle_holder_->GetRegion();
  540. accelerator_->UseOutputBitstreamBuffer(
  541. BitstreamBuffer(buffer_id, region.Duplicate(), region.GetSize()));
  542. bool erased_active_encode = false;
  543. for (auto it = active_encodes_.begin(); it != active_encodes_.end(); ++it) {
  544. if ((*it)->timestamp == result.timestamp) {
  545. result.color_space = (*it)->color_space;
  546. std::move((*it)->done_callback).Run(EncoderStatus::Codes::kOk);
  547. active_encodes_.erase(it);
  548. erased_active_encode = true;
  549. break;
  550. }
  551. }
  552. DCHECK(erased_active_encode);
  553. if (result.size > 0) {
  554. // Size = 0 means that frame was dropped by the platform encoder, we don't
  555. // need to call the output callback in such cases.
  556. output_cb_.Run(std::move(result), std::move(desc));
  557. }
  558. if (active_encodes_.empty() && !flush_support_.value()) {
  559. // Manually call FlushCompleted(), since |accelerator_| won't do it for us.
  560. FlushCompleted(true);
  561. }
  562. }
  563. void VideoEncodeAcceleratorAdapter::NotifyError(
  564. VideoEncodeAccelerator::Error error) {
  565. if (state_ == State::kInitializing) {
  566. InitCompleted(
  567. EncoderStatus(EncoderStatus::Codes::kEncoderInitializationError,
  568. "VideoEncodeAccelerator encountered an error")
  569. .WithData("VideoEncodeAccelerator::Error", int32_t{error}));
  570. return;
  571. }
  572. if (state_ == State::kFlushing)
  573. FlushCompleted(false);
  574. // Report the error to all encoding-done callbacks
  575. for (auto& encode : active_encodes_) {
  576. auto status =
  577. EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode,
  578. "VideoEncodeAccelerator encountered an error")
  579. .WithData("VideoEncodeAccelerator::Error", int32_t{error});
  580. std::move(encode->done_callback).Run(status);
  581. }
  582. active_encodes_.clear();
  583. state_ = State::kNotInitialized;
  584. }
  585. void VideoEncodeAcceleratorAdapter::NotifyEncoderInfoChange(
  586. const VideoEncoderInfo& info) {}
  587. void VideoEncodeAcceleratorAdapter::InitCompleted(EncoderStatus status) {
  588. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  589. if (!status.is_ok()) {
  590. // Report the error to all encoding-done callbacks
  591. for (auto& encode : pending_encodes_)
  592. std::move(encode->done_callback).Run(status);
  593. if (pending_flush_)
  594. FlushCompleted(false);
  595. DCHECK(active_encodes_.empty());
  596. pending_encodes_.clear();
  597. state_ = State::kNotInitialized;
  598. return;
  599. }
  600. state_ = State::kReadyToEncode;
  601. flush_support_ = accelerator_->IsFlushSupported();
  602. gpu_resize_supported_ = accelerator_->IsGpuFrameResizeSupported();
  603. // Send off the encodes that came in while we were waiting for initialization.
  604. for (auto& encode : pending_encodes_) {
  605. EncodeOnAcceleratorThread(std::move(encode->frame), encode->key_frame,
  606. std::move(encode->done_callback));
  607. }
  608. pending_encodes_.clear();
  609. // If a Flush() came in during initialization, transition to flushing now that
  610. // all the pending encodes have been sent.
  611. if (pending_flush_) {
  612. state_ = State::kFlushing;
  613. if (flush_support_.value()) {
  614. accelerator_->Flush(
  615. base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
  616. base::Unretained(this)));
  617. }
  618. }
  619. }
  620. void VideoEncodeAcceleratorAdapter::FlushCompleted(bool success) {
  621. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  622. if (!pending_flush_)
  623. return;
  624. std::move(pending_flush_->done_callback)
  625. .Run(success ? EncoderStatus::Codes::kOk
  626. : EncoderStatus::Codes::kEncoderFailedFlush);
  627. pending_flush_.reset();
  628. state_ = State::kReadyToEncode;
  629. }
  630. template <class T>
  631. T VideoEncodeAcceleratorAdapter::WrapCallback(T cb) {
  632. DCHECK(callback_task_runner_);
  633. if (cb.is_null())
  634. return cb;
  635. return base::BindPostTask(callback_task_runner_, std::move(cb));
  636. }
  637. // Copy a frame into a shared mem buffer and resize it as the same time. Input
  638. // frames can I420, NV12, or RGB -- they'll be converted to I420 if needed.
  639. EncoderStatus::Or<scoped_refptr<VideoFrame>>
  640. VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
  641. const gfx::Size& size,
  642. scoped_refptr<VideoFrame> src_frame) {
  643. TRACE_EVENT0("media", "VideoEncodeAcceleratorAdapter::PrepareCpuFrame");
  644. auto handle = input_pool_->MaybeAllocateBuffer(input_buffer_size_);
  645. if (!handle)
  646. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode);
  647. const base::UnsafeSharedMemoryRegion& region = handle->GetRegion();
  648. const base::WritableSharedMemoryMapping& mapping = handle->GetMapping();
  649. auto mapped_src_frame = src_frame->HasGpuMemoryBuffer()
  650. ? ConvertToMemoryMappedFrame(src_frame)
  651. : src_frame;
  652. auto shared_frame = VideoFrame::WrapExternalData(
  653. PIXEL_FORMAT_I420, size, gfx::Rect(size), size,
  654. mapping.GetMemoryAsSpan<uint8_t>().data(), mapping.size(),
  655. src_frame->timestamp());
  656. if (!shared_frame || !mapped_src_frame)
  657. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode);
  658. shared_frame->BackWithSharedMemory(&region);
  659. // Keep the SharedMemoryHolder until the frame is destroyed so that the
  660. // memory is not freed prematurely.
  661. shared_frame->AddDestructionObserver(BindToCurrentLoop(base::BindOnce(
  662. [](std::unique_ptr<base::UnsafeSharedMemoryPool::Handle>) {},
  663. std::move(handle))));
  664. auto status =
  665. ConvertAndScaleFrame(*mapped_src_frame, *shared_frame, resize_buf_);
  666. if (!status.is_ok())
  667. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode)
  668. .AddCause(std::move(status));
  669. return shared_frame;
  670. }
  671. // Copy a frame into a GPU buffer and resize it as the same time. Input frames
  672. // can I420, NV12, or RGB -- they'll be converted to NV12 if needed.
  673. EncoderStatus::Or<scoped_refptr<VideoFrame>>
  674. VideoEncodeAcceleratorAdapter::PrepareGpuFrame(
  675. const gfx::Size& size,
  676. scoped_refptr<VideoFrame> src_frame) {
  677. TRACE_EVENT0("media", "VideoEncodeAcceleratorAdapter::PrepareGpuFrame");
  678. DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
  679. DCHECK(src_frame);
  680. if (src_frame->HasGpuMemoryBuffer() &&
  681. src_frame->format() == PIXEL_FORMAT_NV12 &&
  682. (gpu_resize_supported_ || src_frame->visible_rect().size() == size)) {
  683. // Nothing to do here, the input frame is already what we need
  684. return src_frame;
  685. }
  686. auto gmb = gpu_factories_->CreateGpuMemoryBuffer(
  687. size, gfx::BufferFormat::YUV_420_BIPLANAR,
  688. gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
  689. if (!gmb)
  690. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode);
  691. gmb->SetColorSpace(src_frame->ColorSpace());
  692. gpu::MailboxHolder empty_mailboxes[media::VideoFrame::kMaxPlanes];
  693. auto gpu_frame = VideoFrame::WrapExternalGpuMemoryBuffer(
  694. gfx::Rect(size), size, std::move(gmb), empty_mailboxes,
  695. base::NullCallback(), src_frame->timestamp());
  696. gpu_frame->set_color_space(src_frame->ColorSpace());
  697. gpu_frame->metadata().MergeMetadataFrom(src_frame->metadata());
  698. // Don't be scared. ConvertToMemoryMappedFrame() doesn't copy pixel data
  699. // it just maps GPU buffer owned by |gpu_frame| and presents it as mapped
  700. // view in CPU memory. It allows us to use ConvertAndScaleFrame() without
  701. // having to tinker with libyuv and GpuMemoryBuffer memory views.
  702. // |mapped_gpu_frame| doesn't own anything, but unmaps the buffer when freed.
  703. // This is true because |gpu_frame| is created with
  704. // |VEA_READ_CAMERA_AND_CPU_READ_WRITE| usage flag.
  705. auto mapped_gpu_frame = ConvertToMemoryMappedFrame(gpu_frame);
  706. auto mapped_src_frame = src_frame->HasGpuMemoryBuffer()
  707. ? ConvertToMemoryMappedFrame(src_frame)
  708. : src_frame;
  709. if (!mapped_gpu_frame || !mapped_src_frame)
  710. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode);
  711. auto status =
  712. ConvertAndScaleFrame(*mapped_src_frame, *mapped_gpu_frame, resize_buf_);
  713. if (!status.is_ok())
  714. return EncoderStatus(EncoderStatus::Codes::kEncoderFailedEncode)
  715. .AddCause(std::move(status));
  716. return gpu_frame;
  717. }
  718. } // namespace media