vp9_vaapi_video_encoder_delegate.cc 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585
  1. // Copyright 2019 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h"
  5. #include <algorithm>
  6. #include <numeric>
  7. #include <va/va.h>
  8. #include "base/bits.h"
  9. #include "base/memory/ref_counted_memory.h"
  10. #include "base/numerics/safe_conversions.h"
  11. #include "base/strings/stringprintf.h"
  12. #include "media/gpu/gpu_video_encode_accelerator_helpers.h"
  13. #include "media/gpu/macros.h"
  14. #include "media/gpu/vaapi/vaapi_common.h"
  15. #include "media/gpu/vaapi/vaapi_wrapper.h"
  16. #include "media/gpu/video_rate_control.h"
  17. #include "media/gpu/vp9_svc_layers.h"
  18. #include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h"
  19. namespace media {
  20. namespace {
  21. // Keyframe period.
  22. constexpr size_t kKFPeriod = 3000;
  23. // Quantization parameter. They are vp9 ac/dc indices and their ranges are
  24. // 0-255. Based on WebRTC's defaults.
  25. constexpr uint8_t kMinQP = 4;
  26. constexpr uint8_t kMaxQP = 112;
  27. // The upper limitation of the quantization parameter for the software rate
  28. // controller. This is larger than |kMaxQP| because a driver might ignore the
  29. // specified maximum quantization parameter when the driver determines the
  30. // value, but it doesn't ignore the quantization parameter by the software rate
  31. // controller.
  32. constexpr uint8_t kMaxQPForSoftwareRateCtrl = 224;
  33. // This stands for 31 as a real ac value (see rfc 8.6.1 table
  34. // ac_qlookup[3][256]). Note: This needs to be revisited once we have 10&12 bit
  35. // encoder support.
  36. constexpr uint8_t kDefaultQP = 24;
  37. // filter level may affect on quality at lower bitrates; for now,
  38. // we set a constant value (== 10) which is what other VA-API
  39. // implementations like libyami and gstreamer-vaapi are using.
  40. constexpr uint8_t kDefaultLfLevel = 10;
  41. // Convert Qindex, whose range is 0-255, to the quantizer parameter used in
  42. // libvpx vp9 rate control, whose range is 0-63.
  43. // Cited from //third_party/libvpx/source/libvpx/vp9/encoder/vp9_quantize.cc.
  44. uint8_t QindexToQuantizer(uint8_t q_index) {
  45. constexpr uint8_t kQuantizerToQindex[] = {
  46. 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48,
  47. 52, 56, 60, 64, 68, 72, 76, 80, 84, 88, 92, 96, 100,
  48. 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144, 148, 152,
  49. 156, 160, 164, 168, 172, 176, 180, 184, 188, 192, 196, 200, 204,
  50. 208, 212, 216, 220, 224, 228, 232, 236, 240, 244, 249, 255,
  51. };
  52. for (size_t q = 0; q < std::size(kQuantizerToQindex); ++q) {
  53. if (kQuantizerToQindex[q] >= q_index)
  54. return q;
  55. }
  56. return std::size(kQuantizerToQindex) - 1;
  57. }
  58. // TODO(crbug.com/752720): remove this in favor of std::gcd if c++17 is enabled
  59. // to use.
  60. int GCD(int a, int b) {
  61. return a == 0 ? b : GCD(b % a, a);
  62. }
  63. // The return value is expressed as a percentage of the average. For example,
  64. // to allocate no more than 4.5 frames worth of bitrate to a keyframe, the
  65. // return value is 450.
  66. uint32_t MaxSizeOfKeyframeAsPercentage(uint32_t optimal_buffer_size,
  67. uint32_t max_framerate) {
  68. // Set max to the optimal buffer level (normalized by target BR),
  69. // and scaled by a scale_par.
  70. // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps].
  71. // This value is presented in percentage of perFrameBw:
  72. // perFrameBw = targetBR[Kbps] * 1000 / framerate.
  73. // The target in % is as follows:
  74. const double target_size_byte_per_frame = optimal_buffer_size * 0.5;
  75. const uint32_t target_size_kbyte =
  76. target_size_byte_per_frame * max_framerate / 1000;
  77. const uint32_t target_size_kbyte_as_percent = target_size_kbyte * 100;
  78. // Don't go below 3 times the per frame bandwidth.
  79. constexpr uint32_t kMinIntraSizePercentage = 300u;
  80. return std::max(kMinIntraSizePercentage, target_size_kbyte_as_percent);
  81. }
  82. libvpx::VP9RateControlRtcConfig CreateRateControlConfig(
  83. const VP9VaapiVideoEncoderDelegate::EncodeParams& encode_params,
  84. const VideoBitrateAllocation& bitrate_allocation,
  85. const size_t num_temporal_layers,
  86. const std::vector<gfx::Size>& spatial_layer_resolutions) {
  87. DCHECK(!spatial_layer_resolutions.empty());
  88. const gfx::Size& encode_size = spatial_layer_resolutions.back();
  89. const size_t num_spatial_layers = spatial_layer_resolutions.size();
  90. libvpx::VP9RateControlRtcConfig rc_cfg{};
  91. rc_cfg.rc_mode = VPX_CBR;
  92. rc_cfg.width = encode_size.width();
  93. rc_cfg.height = encode_size.height();
  94. rc_cfg.max_quantizer = QindexToQuantizer(encode_params.max_qp);
  95. rc_cfg.min_quantizer = QindexToQuantizer(encode_params.min_qp);
  96. // libvpx::VP9RateControlRtcConfig is kbps.
  97. rc_cfg.target_bandwidth = encode_params.bitrate_allocation.GetSumBps() / 1000;
  98. // These default values come from
  99. // //third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc.
  100. rc_cfg.buf_initial_sz = 500;
  101. rc_cfg.buf_optimal_sz = 600;
  102. rc_cfg.buf_sz = 1000;
  103. rc_cfg.undershoot_pct = 50;
  104. rc_cfg.overshoot_pct = 50;
  105. rc_cfg.max_intra_bitrate_pct = MaxSizeOfKeyframeAsPercentage(
  106. rc_cfg.buf_optimal_sz, encode_params.framerate);
  107. rc_cfg.framerate = encode_params.framerate;
  108. // Fill spatial/temporal layers variables.
  109. rc_cfg.ss_number_layers = num_spatial_layers;
  110. rc_cfg.ts_number_layers = num_temporal_layers;
  111. for (size_t sid = 0; sid < num_spatial_layers; ++sid) {
  112. int gcd =
  113. GCD(encode_size.height(), spatial_layer_resolutions[sid].height());
  114. rc_cfg.scaling_factor_num[sid] =
  115. spatial_layer_resolutions[sid].height() / gcd;
  116. rc_cfg.scaling_factor_den[sid] = encode_size.height() / gcd;
  117. int bitrate_sum = 0;
  118. for (size_t tid = 0; tid < num_temporal_layers; ++tid) {
  119. size_t idx = sid * num_temporal_layers + tid;
  120. rc_cfg.max_quantizers[idx] = rc_cfg.max_quantizer;
  121. rc_cfg.min_quantizers[idx] = rc_cfg.min_quantizer;
  122. bitrate_sum += bitrate_allocation.GetBitrateBps(sid, tid);
  123. rc_cfg.layer_target_bitrate[idx] = bitrate_sum / 1000;
  124. rc_cfg.ts_rate_decimator[tid] = 1u << (num_temporal_layers - tid - 1);
  125. }
  126. }
  127. return rc_cfg;
  128. }
  129. scoped_refptr<VP9Picture> GetVP9Picture(
  130. const VaapiVideoEncoderDelegate::EncodeJob& job) {
  131. return base::WrapRefCounted(
  132. reinterpret_cast<VP9Picture*>(job.picture().get()));
  133. }
  134. } // namespace
  135. VP9VaapiVideoEncoderDelegate::EncodeParams::EncodeParams()
  136. : kf_period_frames(kKFPeriod),
  137. framerate(0),
  138. min_qp(kMinQP),
  139. max_qp(kMaxQP) {}
  140. void VP9VaapiVideoEncoderDelegate::set_rate_ctrl_for_testing(
  141. std::unique_ptr<VP9RateControl> rate_ctrl) {
  142. rate_ctrl_ = std::move(rate_ctrl);
  143. }
  144. VP9VaapiVideoEncoderDelegate::VP9VaapiVideoEncoderDelegate(
  145. scoped_refptr<VaapiWrapper> vaapi_wrapper,
  146. base::RepeatingClosure error_cb)
  147. : VaapiVideoEncoderDelegate(std::move(vaapi_wrapper), error_cb) {}
  148. VP9VaapiVideoEncoderDelegate::~VP9VaapiVideoEncoderDelegate() {
  149. // VP9VaapiVideoEncoderDelegate can be destroyed on any thread.
  150. }
  151. bool VP9VaapiVideoEncoderDelegate::Initialize(
  152. const VideoEncodeAccelerator::Config& config,
  153. const VaapiVideoEncoderDelegate::Config& ave_config) {
  154. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  155. if (VideoCodecProfileToVideoCodec(config.output_profile) !=
  156. VideoCodec::kVP9) {
  157. DVLOGF(1) << "Invalid profile: " << GetProfileName(config.output_profile);
  158. return false;
  159. }
  160. if (config.input_visible_size.IsEmpty()) {
  161. DVLOGF(1) << "Input visible size could not be empty";
  162. return false;
  163. }
  164. if (config.bitrate.mode() == Bitrate::Mode::kVariable) {
  165. DVLOGF(1) << "Invalid configuraiton. VBR is not supported for VP9.";
  166. return false;
  167. }
  168. visible_size_ = config.input_visible_size;
  169. coded_size_ = gfx::Size(base::bits::AlignUp(visible_size_.width(), 16),
  170. base::bits::AlignUp(visible_size_.height(), 16));
  171. current_params_ = EncodeParams();
  172. reference_frames_.Clear();
  173. frame_num_ = 0;
  174. size_t num_temporal_layers = 1;
  175. size_t num_spatial_layers = 1;
  176. std::vector<gfx::Size> spatial_layer_resolutions;
  177. if (config.HasTemporalLayer() || config.HasSpatialLayer()) {
  178. num_spatial_layers = config.spatial_layers.size();
  179. num_temporal_layers = config.spatial_layers[0].num_of_temporal_layers;
  180. DCHECK(num_spatial_layers != 1 || num_temporal_layers != 1);
  181. for (size_t sid = 1; sid < num_spatial_layers; ++sid) {
  182. if (num_temporal_layers !=
  183. config.spatial_layers[sid].num_of_temporal_layers) {
  184. VLOGF(1) << "The temporal layer sizes among spatial layers must be "
  185. "identical";
  186. return false;
  187. }
  188. }
  189. if (num_spatial_layers > VP9SVCLayers::kMaxSpatialLayers ||
  190. num_temporal_layers > VP9SVCLayers::kMaxSupportedTemporalLayers) {
  191. VLOGF(1) << "Unsupported amount of spatial/temporal layers: "
  192. << ", Spatial layer number: " << num_spatial_layers
  193. << ", Temporal layer number: " << num_temporal_layers;
  194. return false;
  195. }
  196. if (num_spatial_layers > 1 &&
  197. config.inter_layer_pred !=
  198. VideoEncodeAccelerator::Config::InterLayerPredMode::kOnKeyPic) {
  199. std::string inter_layer_pred;
  200. if (config.inter_layer_pred ==
  201. VideoEncodeAccelerator::Config::InterLayerPredMode::kOn)
  202. inter_layer_pred = base::StringPrintf("InterLayerPredMode::kOn");
  203. else
  204. inter_layer_pred = base::StringPrintf("InterLayerPredMode::kOff");
  205. VLOGF(1) << "Support only k-SVC encoding. inter_layer_pred="
  206. << inter_layer_pred;
  207. return false;
  208. }
  209. for (const auto& spatial_layer : config.spatial_layers) {
  210. spatial_layer_resolutions.emplace_back(
  211. gfx::Size(spatial_layer.width, spatial_layer.height));
  212. }
  213. svc_layers_ = std::make_unique<VP9SVCLayers>(config.spatial_layers);
  214. }
  215. current_params_.max_qp = kMaxQPForSoftwareRateCtrl;
  216. // Store layer size for vp9 simple stream.
  217. if (spatial_layer_resolutions.empty())
  218. spatial_layer_resolutions.push_back(visible_size_);
  219. auto initial_bitrate_allocation = AllocateBitrateForDefaultEncoding(config);
  220. // |rate_ctrl_| might be injected for tests.
  221. if (!rate_ctrl_) {
  222. rate_ctrl_ = VP9RateControl::Create(CreateRateControlConfig(
  223. current_params_, initial_bitrate_allocation, num_temporal_layers,
  224. spatial_layer_resolutions));
  225. }
  226. if (!rate_ctrl_)
  227. return false;
  228. DCHECK(!pending_update_rates_);
  229. pending_update_rates_ =
  230. std::make_pair(initial_bitrate_allocation,
  231. config.initial_framerate.value_or(
  232. VideoEncodeAccelerator::kDefaultFramerate));
  233. return ApplyPendingUpdateRates();
  234. }
  235. gfx::Size VP9VaapiVideoEncoderDelegate::GetCodedSize() const {
  236. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  237. DCHECK(!coded_size_.IsEmpty());
  238. return coded_size_;
  239. }
  240. size_t VP9VaapiVideoEncoderDelegate::GetMaxNumOfRefFrames() const {
  241. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  242. return kVp9NumRefFrames;
  243. }
  244. bool VP9VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob& encode_job) {
  245. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  246. if (svc_layers_) {
  247. if (svc_layers_->UpdateEncodeJob(encode_job.IsKeyframeRequested(),
  248. current_params_.kf_period_frames)) {
  249. encode_job.ProduceKeyframe();
  250. }
  251. } else {
  252. if (encode_job.IsKeyframeRequested())
  253. frame_num_ = 0;
  254. if (frame_num_ == 0)
  255. encode_job.ProduceKeyframe();
  256. frame_num_++;
  257. frame_num_ %= current_params_.kf_period_frames;
  258. }
  259. scoped_refptr<VP9Picture> picture = GetVP9Picture(encode_job);
  260. DCHECK(picture);
  261. std::array<bool, kVp9NumRefsPerFrame> ref_frames_used = {false, false, false};
  262. SetFrameHeader(encode_job.IsKeyframeRequested(), picture.get(),
  263. &ref_frames_used);
  264. if (!SubmitFrameParameters(encode_job, current_params_, picture,
  265. reference_frames_, ref_frames_used)) {
  266. LOG(ERROR) << "Failed submitting frame parameters";
  267. return false;
  268. }
  269. UpdateReferenceFrames(picture);
  270. return true;
  271. }
  272. BitstreamBufferMetadata VP9VaapiVideoEncoderDelegate::GetMetadata(
  273. const EncodeJob& encode_job,
  274. size_t payload_size) {
  275. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  276. auto metadata =
  277. VaapiVideoEncoderDelegate::GetMetadata(encode_job, payload_size);
  278. auto picture = GetVP9Picture(encode_job);
  279. DCHECK(picture);
  280. metadata.vp9 = picture->metadata_for_encoding;
  281. metadata.qp =
  282. base::strict_cast<int32_t>(picture->frame_hdr->quant_params.base_q_idx);
  283. return metadata;
  284. }
  285. std::vector<gfx::Size> VP9VaapiVideoEncoderDelegate::GetSVCLayerResolutions() {
  286. if (!ApplyPendingUpdateRates()) {
  287. DLOG(ERROR) << __func__ << " ApplyPendingUpdateRates failed";
  288. return {};
  289. }
  290. if (svc_layers_) {
  291. return svc_layers_->active_spatial_layer_resolutions();
  292. } else {
  293. return {visible_size_};
  294. }
  295. }
  296. void VP9VaapiVideoEncoderDelegate::BitrateControlUpdate(
  297. uint64_t encoded_chunk_size_bytes) {
  298. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  299. CHECK(rate_ctrl_);
  300. DVLOGF(4) << "|encoded_chunk_size_bytes|=" << encoded_chunk_size_bytes;
  301. rate_ctrl_->PostEncodeUpdate(encoded_chunk_size_bytes);
  302. }
  303. bool VP9VaapiVideoEncoderDelegate::ApplyPendingUpdateRates() {
  304. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  305. if (!pending_update_rates_)
  306. return true;
  307. VLOGF(2) << "New bitrate: " << pending_update_rates_->first.ToString()
  308. << ", New framerate: " << pending_update_rates_->second;
  309. current_params_.bitrate_allocation = pending_update_rates_->first;
  310. current_params_.framerate = pending_update_rates_->second;
  311. pending_update_rates_.reset();
  312. // Update active layer status in |svc_layers_|, and key frame is produced when
  313. // active layer changed.
  314. if (svc_layers_) {
  315. if (!svc_layers_->MaybeUpdateActiveLayer(
  316. &current_params_.bitrate_allocation)) {
  317. return false;
  318. }
  319. } else {
  320. // Simple stream encoding.
  321. if (current_params_.bitrate_allocation.GetSumBps() !=
  322. current_params_.bitrate_allocation.GetBitrateBps(0, 0)) {
  323. return false;
  324. }
  325. }
  326. CHECK(rate_ctrl_);
  327. const size_t num_temporal_layers =
  328. svc_layers_ ? svc_layers_->num_temporal_layers() : 1u;
  329. std::vector<gfx::Size> spatial_layer_resolutions = {visible_size_};
  330. if (svc_layers_)
  331. spatial_layer_resolutions = svc_layers_->active_spatial_layer_resolutions();
  332. rate_ctrl_->UpdateRateControl(CreateRateControlConfig(
  333. current_params_, current_params_.bitrate_allocation, num_temporal_layers,
  334. spatial_layer_resolutions));
  335. return true;
  336. }
  337. bool VP9VaapiVideoEncoderDelegate::UpdateRates(
  338. const VideoBitrateAllocation& bitrate_allocation,
  339. uint32_t framerate) {
  340. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  341. if (bitrate_allocation.GetMode() != Bitrate::Mode::kConstant) {
  342. DLOG(ERROR) << "VBR is not supported for VP9 but was requested.";
  343. return false;
  344. }
  345. if (bitrate_allocation.GetSumBps() == 0u || framerate == 0)
  346. return false;
  347. pending_update_rates_ = std::make_pair(bitrate_allocation, framerate);
  348. if (current_params_.bitrate_allocation == pending_update_rates_->first &&
  349. current_params_.framerate == pending_update_rates_->second) {
  350. pending_update_rates_.reset();
  351. }
  352. return true;
  353. }
  354. Vp9FrameHeader VP9VaapiVideoEncoderDelegate::GetDefaultFrameHeader(
  355. const bool keyframe) const {
  356. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  357. Vp9FrameHeader hdr{};
  358. DCHECK(!visible_size_.IsEmpty());
  359. hdr.frame_width = visible_size_.width();
  360. hdr.frame_height = visible_size_.height();
  361. hdr.render_width = visible_size_.width();
  362. hdr.render_height = visible_size_.height();
  363. hdr.quant_params.base_q_idx = kDefaultQP;
  364. hdr.loop_filter.level = kDefaultLfLevel;
  365. hdr.show_frame = true;
  366. hdr.frame_type =
  367. keyframe ? Vp9FrameHeader::KEYFRAME : Vp9FrameHeader::INTERFRAME;
  368. return hdr;
  369. }
  370. void VP9VaapiVideoEncoderDelegate::SetFrameHeader(
  371. bool keyframe,
  372. VP9Picture* picture,
  373. std::array<bool, kVp9NumRefsPerFrame>* ref_frames_used) {
  374. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  375. DCHECK(picture);
  376. DCHECK(ref_frames_used);
  377. *picture->frame_hdr = GetDefaultFrameHeader(keyframe);
  378. if (svc_layers_) {
  379. // Reference frame settings for k-SVC stream.
  380. svc_layers_->FillUsedRefFramesAndMetadata(picture, ref_frames_used);
  381. // Enable error resilient mode so that the syntax of a frame can be decoded
  382. // independently of previous frames.
  383. picture->frame_hdr->error_resilient_mode = true;
  384. } else {
  385. // Reference frame settings for simple stream.
  386. if (keyframe) {
  387. picture->frame_hdr->refresh_frame_flags = 0xff;
  388. ref_frame_index_ = 0;
  389. } else {
  390. picture->frame_hdr->ref_frame_idx[0] = ref_frame_index_;
  391. picture->frame_hdr->ref_frame_idx[1] =
  392. (ref_frame_index_ - 1) & (kVp9NumRefFrames - 1);
  393. picture->frame_hdr->ref_frame_idx[2] =
  394. (ref_frame_index_ - 2) & (kVp9NumRefFrames - 1);
  395. ref_frame_index_ = (ref_frame_index_ + 1) % kVp9NumRefFrames;
  396. picture->frame_hdr->refresh_frame_flags = 1 << ref_frame_index_;
  397. // Use last, golden, alt frames.
  398. ref_frames_used->fill(true);
  399. }
  400. }
  401. CHECK(rate_ctrl_);
  402. libvpx::VP9FrameParamsQpRTC frame_params{};
  403. frame_params.frame_type =
  404. keyframe ? FRAME_TYPE::KEY_FRAME : FRAME_TYPE::INTER_FRAME;
  405. if (picture->metadata_for_encoding) {
  406. frame_params.temporal_layer_id =
  407. picture->metadata_for_encoding->temporal_idx;
  408. frame_params.spatial_layer_id = picture->metadata_for_encoding->spatial_idx;
  409. }
  410. rate_ctrl_->ComputeQP(frame_params);
  411. picture->frame_hdr->quant_params.base_q_idx = rate_ctrl_->GetQP();
  412. picture->frame_hdr->loop_filter.level = rate_ctrl_->GetLoopfilterLevel();
  413. DVLOGF(4) << "qp="
  414. << static_cast<int>(picture->frame_hdr->quant_params.base_q_idx)
  415. << ", filter_level="
  416. << static_cast<int>(picture->frame_hdr->loop_filter.level)
  417. << ", frame_params.temporal_layer_id:"
  418. << frame_params.temporal_layer_id
  419. << ", frame_params.spatial_layer_id:"
  420. << frame_params.spatial_layer_id;
  421. }
  422. void VP9VaapiVideoEncoderDelegate::UpdateReferenceFrames(
  423. scoped_refptr<VP9Picture> picture) {
  424. reference_frames_.Refresh(picture);
  425. }
  426. bool VP9VaapiVideoEncoderDelegate::SubmitFrameParameters(
  427. EncodeJob& job,
  428. const EncodeParams& encode_params,
  429. scoped_refptr<VP9Picture> pic,
  430. const Vp9ReferenceFrameVector& ref_frames,
  431. const std::array<bool, kVp9NumRefsPerFrame>& ref_frames_used) {
  432. DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
  433. VAEncSequenceParameterBufferVP9 seq_param = {};
  434. const auto& frame_header = pic->frame_hdr;
  435. // TODO(crbug.com/811912): Double check whether the
  436. // max_frame_width or max_frame_height affects any of the memory
  437. // allocation and tighten these values based on that.
  438. constexpr gfx::Size kMaxFrameSize(4096, 4096);
  439. seq_param.max_frame_width = kMaxFrameSize.height();
  440. seq_param.max_frame_height = kMaxFrameSize.width();
  441. seq_param.bits_per_second = encode_params.bitrate_allocation.GetSumBps();
  442. seq_param.intra_period = encode_params.kf_period_frames;
  443. VAEncPictureParameterBufferVP9 pic_param = {};
  444. pic_param.frame_width_src = frame_header->frame_width;
  445. pic_param.frame_height_src = frame_header->frame_height;
  446. pic_param.frame_width_dst = frame_header->render_width;
  447. pic_param.frame_height_dst = frame_header->render_height;
  448. pic_param.reconstructed_frame = pic->AsVaapiVP9Picture()->GetVASurfaceID();
  449. DCHECK_NE(pic_param.reconstructed_frame, VA_INVALID_ID);
  450. for (size_t i = 0; i < kVp9NumRefFrames; i++) {
  451. auto ref_pic = ref_frames.GetFrame(i);
  452. pic_param.reference_frames[i] =
  453. ref_pic ? ref_pic->AsVaapiVP9Picture()->GetVASurfaceID()
  454. : VA_INVALID_ID;
  455. }
  456. pic_param.coded_buf = job.coded_buffer_id();
  457. DCHECK_NE(pic_param.coded_buf, VA_INVALID_ID);
  458. if (frame_header->IsKeyframe()) {
  459. pic_param.ref_flags.bits.force_kf = true;
  460. } else {
  461. // Non-key frame mode, the frame has at least 1 reference frames.
  462. size_t first_used_ref_frame = 3;
  463. for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) {
  464. if (ref_frames_used[i]) {
  465. first_used_ref_frame = std::min(first_used_ref_frame, i);
  466. pic_param.ref_flags.bits.ref_frame_ctrl_l0 |= (1 << i);
  467. }
  468. }
  469. CHECK_LT(first_used_ref_frame, 3u);
  470. pic_param.ref_flags.bits.ref_last_idx =
  471. ref_frames_used[0] ? frame_header->ref_frame_idx[0]
  472. : frame_header->ref_frame_idx[first_used_ref_frame];
  473. pic_param.ref_flags.bits.ref_gf_idx =
  474. ref_frames_used[1] ? frame_header->ref_frame_idx[1]
  475. : frame_header->ref_frame_idx[first_used_ref_frame];
  476. pic_param.ref_flags.bits.ref_arf_idx =
  477. ref_frames_used[2] ? frame_header->ref_frame_idx[2]
  478. : frame_header->ref_frame_idx[first_used_ref_frame];
  479. }
  480. pic_param.pic_flags.bits.frame_type = frame_header->frame_type;
  481. pic_param.pic_flags.bits.show_frame = frame_header->show_frame;
  482. pic_param.pic_flags.bits.error_resilient_mode =
  483. frame_header->error_resilient_mode;
  484. pic_param.pic_flags.bits.intra_only = frame_header->intra_only;
  485. pic_param.pic_flags.bits.allow_high_precision_mv =
  486. frame_header->allow_high_precision_mv;
  487. pic_param.pic_flags.bits.mcomp_filter_type =
  488. frame_header->interpolation_filter;
  489. pic_param.pic_flags.bits.frame_parallel_decoding_mode =
  490. frame_header->frame_parallel_decoding_mode;
  491. pic_param.pic_flags.bits.reset_frame_context =
  492. frame_header->reset_frame_context;
  493. pic_param.pic_flags.bits.refresh_frame_context =
  494. frame_header->refresh_frame_context;
  495. pic_param.pic_flags.bits.frame_context_idx = frame_header->frame_context_idx;
  496. pic_param.refresh_frame_flags = frame_header->refresh_frame_flags;
  497. pic_param.luma_ac_qindex = frame_header->quant_params.base_q_idx;
  498. pic_param.luma_dc_qindex_delta = frame_header->quant_params.delta_q_y_dc;
  499. pic_param.chroma_ac_qindex_delta = frame_header->quant_params.delta_q_uv_ac;
  500. pic_param.chroma_dc_qindex_delta = frame_header->quant_params.delta_q_uv_dc;
  501. pic_param.filter_level = frame_header->loop_filter.level;
  502. pic_param.log2_tile_rows = frame_header->tile_rows_log2;
  503. pic_param.log2_tile_columns = frame_header->tile_cols_log2;
  504. return vaapi_wrapper_->SubmitBuffers(
  505. {{VAEncSequenceParameterBufferType, sizeof(seq_param), &seq_param},
  506. {VAEncPictureParameterBufferType, sizeof(pic_param), &pic_param}});
  507. }
  508. } // namespace media