vaapi_video_encode_accelerator.cc 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161
  1. // Copyright 2014 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "media/gpu/vaapi/vaapi_video_encode_accelerator.h"
  5. #include <string.h>
  6. #include <va/va.h>
  7. #include <algorithm>
  8. #include <memory>
  9. #include <type_traits>
  10. #include <utility>
  11. #include "base/bind.h"
  12. #include "base/bits.h"
  13. #include "base/callback.h"
  14. #include "base/callback_helpers.h"
  15. #include "base/containers/contains.h"
  16. #include "base/cxx17_backports.h"
  17. #include "base/feature_list.h"
  18. #include "base/memory/ptr_util.h"
  19. #include "base/memory/shared_memory_mapping.h"
  20. #include "base/memory/unsafe_shared_memory_region.h"
  21. #include "base/numerics/safe_conversions.h"
  22. #include "base/strings/stringprintf.h"
  23. #include "base/task/task_traits.h"
  24. #include "base/task/thread_pool.h"
  25. #include "base/threading/thread_task_runner_handle.h"
  26. #include "base/trace_event/memory_dump_manager.h"
  27. #include "base/trace_event/trace_event.h"
  28. #include "build/build_config.h"
  29. #include "media/base/bind_to_current_loop.h"
  30. #include "media/base/format_utils.h"
  31. #include "media/base/media_log.h"
  32. #include "media/base/media_switches.h"
  33. #include "media/base/video_bitrate_allocation.h"
  34. #include "media/gpu/chromeos/platform_video_frame_utils.h"
  35. #include "media/gpu/gpu_video_encode_accelerator_helpers.h"
  36. #include "media/gpu/h264_dpb.h"
  37. #include "media/gpu/macros.h"
  38. #include "media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h"
  39. #include "media/gpu/vaapi/va_surface.h"
  40. #include "media/gpu/vaapi/vaapi_common.h"
  41. #include "media/gpu/vaapi/vaapi_utils.h"
  42. #include "media/gpu/vaapi/vaapi_wrapper.h"
  43. #include "media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h"
  44. #include "media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h"
  45. #include "media/gpu/vp8_reference_frame_vector.h"
  46. #include "media/gpu/vp9_reference_frame_vector.h"
  47. #include "media/gpu/vp9_svc_layers.h"
  48. #define NOTIFY_ERROR(error, msg) \
  49. do { \
  50. SetState(kError); \
  51. VLOGF(1) << msg; \
  52. VLOGF(1) << "Calling NotifyError(" << error << ")"; \
  53. NotifyError(error); \
  54. } while (0)
  55. namespace media {
  56. namespace {
  57. // Minimum number of frames in flight for pipeline depth, adjust to this number
  58. // if encoder requests less.
  59. constexpr size_t kMinNumFramesInFlight = 4;
  60. // VASurfaceIDs internal format.
  61. constexpr unsigned int kVaSurfaceFormat = VA_RT_FORMAT_YUV420;
  62. // Creates one |encode_size| ScopedVASurface using |vaapi_wrapper|.
  63. std::unique_ptr<ScopedVASurface> CreateScopedSurface(
  64. VaapiWrapper& vaapi_wrapper,
  65. const gfx::Size& encode_size,
  66. const std::vector<VaapiWrapper::SurfaceUsageHint>& surface_usage_hints) {
  67. auto surfaces = vaapi_wrapper.CreateScopedVASurfaces(
  68. kVaSurfaceFormat, encode_size, surface_usage_hints, 1u,
  69. /*visible_size=*/absl::nullopt,
  70. /*va_fourcc=*/absl::nullopt);
  71. return surfaces.empty() ? nullptr : std::move(surfaces.front());
  72. }
  73. } // namespace
  74. struct VaapiVideoEncodeAccelerator::InputFrameRef {
  75. InputFrameRef(scoped_refptr<VideoFrame> frame, bool force_keyframe)
  76. : frame(frame), force_keyframe(force_keyframe) {}
  77. const scoped_refptr<VideoFrame> frame;
  78. const bool force_keyframe;
  79. };
  80. struct VaapiVideoEncodeAccelerator::BitstreamBufferRef {
  81. BitstreamBufferRef(int32_t id, BitstreamBuffer buffer)
  82. : id(id), shm_region(buffer.TakeRegion()), offset(buffer.offset()) {}
  83. const int32_t id;
  84. base::UnsafeSharedMemoryRegion shm_region;
  85. base::WritableSharedMemoryMapping shm_mapping;
  86. const off_t offset;
  87. };
  88. // static
  89. base::AtomicRefCount VaapiVideoEncodeAccelerator::num_instances_(0);
  90. VideoEncodeAccelerator::SupportedProfiles
  91. VaapiVideoEncodeAccelerator::GetSupportedProfiles() {
  92. if (IsConfiguredForTesting())
  93. return supported_profiles_for_testing_;
  94. return VaapiWrapper::GetSupportedEncodeProfiles();
  95. }
  96. VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator()
  97. : can_use_encoder_(num_instances_.Increment() < kMaxNumOfInstances),
  98. output_buffer_byte_size_(0),
  99. state_(kUninitialized),
  100. child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
  101. // TODO(akahuang): Change to use SequencedTaskRunner to see if the
  102. // performance is affected.
  103. encoder_task_runner_(base::ThreadPool::CreateSingleThreadTaskRunner(
  104. {base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN, base::MayBlock()},
  105. base::SingleThreadTaskRunnerThreadMode::DEDICATED)) {
  106. VLOGF(2);
  107. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  108. DETACH_FROM_SEQUENCE(encoder_sequence_checker_);
  109. child_weak_this_ = child_weak_this_factory_.GetWeakPtr();
  110. encoder_weak_this_ = encoder_weak_this_factory_.GetWeakPtr();
  111. // The default value of VideoEncoderInfo of VaapiVideoEncodeAccelerator.
  112. encoder_info_.implementation_name = "VaapiVideoEncodeAccelerator";
  113. encoder_info_.has_trusted_rate_controller = true;
  114. DCHECK(encoder_info_.is_hardware_accelerated);
  115. DCHECK(encoder_info_.supports_native_handle);
  116. DCHECK(!encoder_info_.supports_simulcast);
  117. }
  118. VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() {
  119. VLOGF(2);
  120. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  121. base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider(
  122. this);
  123. num_instances_.Decrement();
  124. }
  125. bool VaapiVideoEncodeAccelerator::Initialize(
  126. const Config& config,
  127. Client* client,
  128. std::unique_ptr<MediaLog> media_log) {
  129. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  130. DCHECK_EQ(state_, kUninitialized);
  131. VLOGF(2) << "Initializing VAVEA, " << config.AsHumanReadableString();
  132. if (!can_use_encoder_) {
  133. MEDIA_LOG(ERROR, media_log.get()) << "Too many encoders are allocated";
  134. return false;
  135. }
  136. if (AttemptedInitialization()) {
  137. MEDIA_LOG(ERROR, media_log.get())
  138. << "Initialize() cannot be called more than once.";
  139. return false;
  140. }
  141. client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
  142. client_ = client_ptr_factory_->GetWeakPtr();
  143. if (config.HasSpatialLayer()) {
  144. #if BUILDFLAG(IS_CHROMEOS)
  145. if (!base::FeatureList::IsEnabled(kVaapiVp9kSVCHWEncoding) &&
  146. !IsConfiguredForTesting()) {
  147. MEDIA_LOG(ERROR, media_log.get())
  148. << "Spatial layer encoding is not yet enabled by default";
  149. return false;
  150. }
  151. #endif // BUILDFLAG(IS_CHROMEOS)
  152. if (config.inter_layer_pred != Config::InterLayerPredMode::kOnKeyPic) {
  153. MEDIA_LOG(ERROR, media_log.get()) << "Only K-SVC encoding is supported.";
  154. return false;
  155. }
  156. if (config.output_profile != VideoCodecProfile::VP9PROFILE_PROFILE0) {
  157. MEDIA_LOG(ERROR, media_log.get())
  158. << "Spatial layers are only supported for VP9 encoding";
  159. return false;
  160. }
  161. // TODO(crbug.com/1186051): Remove this restriction.
  162. for (size_t i = 0; i < config.spatial_layers.size(); ++i) {
  163. for (size_t j = i + 1; j < config.spatial_layers.size(); ++j) {
  164. if (config.spatial_layers[i].width == config.spatial_layers[j].width &&
  165. config.spatial_layers[i].height ==
  166. config.spatial_layers[j].height) {
  167. MEDIA_LOG(ERROR, media_log.get())
  168. << "Doesn't support k-SVC encoding where spatial layers "
  169. "have the same resolution";
  170. return false;
  171. }
  172. }
  173. }
  174. if (!IsConfiguredForTesting()) {
  175. VAProfile va_profile = VAProfileVP9Profile0;
  176. if (VaapiWrapper::GetDefaultVaEntryPoint(
  177. VaapiWrapper::kEncodeConstantQuantizationParameter, va_profile) !=
  178. VAEntrypointEncSliceLP) {
  179. MEDIA_LOG(ERROR, media_log.get())
  180. << "Currently spatial layer encoding is only supported by "
  181. "VAEntrypointEncSliceLP";
  182. return false;
  183. }
  184. }
  185. }
  186. const VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile);
  187. if (codec != VideoCodec::kH264 && codec != VideoCodec::kVP8 &&
  188. codec != VideoCodec::kVP9) {
  189. MEDIA_LOG(ERROR, media_log.get())
  190. << "Unsupported profile: " << GetProfileName(config.output_profile);
  191. return false;
  192. }
  193. if (config.bitrate.mode() == Bitrate::Mode::kVariable) {
  194. if (!base::FeatureList::IsEnabled(kChromeOSHWVBREncoding)) {
  195. MEDIA_LOG(ERROR, media_log.get()) << "Variable bitrate is disabled.";
  196. return false;
  197. }
  198. if (codec != VideoCodec::kH264) {
  199. MEDIA_LOG(ERROR, media_log.get())
  200. << "Variable bitrate is only supported with H264 encoding.";
  201. return false;
  202. }
  203. }
  204. if (config.input_format != PIXEL_FORMAT_I420 &&
  205. config.input_format != PIXEL_FORMAT_NV12) {
  206. MEDIA_LOG(ERROR, media_log.get())
  207. << "Unsupported input format: " << config.input_format;
  208. return false;
  209. }
  210. if (config.storage_type.value_or(Config::StorageType::kShmem) ==
  211. Config::StorageType::kGpuMemoryBuffer) {
  212. #if !defined(USE_OZONE)
  213. MEDIA_LOG(ERROR, media_log.get())
  214. << "Native mode is only available on OZONE platform.";
  215. return false;
  216. #else
  217. if (config.input_format != PIXEL_FORMAT_NV12) {
  218. // TODO(crbug.com/894381): Support other formats.
  219. MEDIA_LOG(ERROR, media_log.get())
  220. << "Unsupported format for native input mode: "
  221. << VideoPixelFormatToString(config.input_format);
  222. return false;
  223. }
  224. native_input_mode_ = true;
  225. #endif // USE_OZONE
  226. }
  227. if (config.HasSpatialLayer() && !native_input_mode_) {
  228. MEDIA_LOG(ERROR, media_log.get())
  229. << "Spatial scalability is only supported for native input now";
  230. return false;
  231. }
  232. const SupportedProfiles& profiles = GetSupportedProfiles();
  233. const auto profile = find_if(profiles.begin(), profiles.end(),
  234. [output_profile = config.output_profile](
  235. const SupportedProfile& profile) {
  236. return profile.profile == output_profile;
  237. });
  238. if (profile == profiles.end()) {
  239. MEDIA_LOG(ERROR, media_log.get()) << "Unsupported output profile "
  240. << GetProfileName(config.output_profile);
  241. return false;
  242. }
  243. if (config.input_visible_size.width() > profile->max_resolution.width() ||
  244. config.input_visible_size.height() > profile->max_resolution.height()) {
  245. MEDIA_LOG(ERROR, media_log.get())
  246. << "Input size too big: " << config.input_visible_size.ToString()
  247. << ", max supported size: " << profile->max_resolution.ToString();
  248. return false;
  249. }
  250. // Finish remaining initialization on the encoder thread.
  251. encoder_task_runner_->PostTask(
  252. FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::InitializeTask,
  253. encoder_weak_this_, config));
  254. return true;
  255. }
  256. void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
  257. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  258. DCHECK_EQ(state_, kUninitialized);
  259. VLOGF(2);
  260. output_codec_ = VideoCodecProfileToVideoCodec(config.output_profile);
  261. DCHECK_EQ(IsConfiguredForTesting(), !!vaapi_wrapper_);
  262. if (!IsConfiguredForTesting()) {
  263. VaapiWrapper::CodecMode mode;
  264. switch (output_codec_) {
  265. case VideoCodec::kH264:
  266. mode = config.bitrate.mode() == Bitrate::Mode::kConstant
  267. ? VaapiWrapper::kEncodeConstantBitrate
  268. : VaapiWrapper::kEncodeVariableBitrate;
  269. break;
  270. case VideoCodec::kVP8:
  271. case VideoCodec::kVP9:
  272. mode = VaapiWrapper::kEncodeConstantQuantizationParameter;
  273. break;
  274. default:
  275. NOTIFY_ERROR(kInvalidArgumentError,
  276. "Unsupported codec: " + GetCodecName(output_codec_));
  277. return;
  278. }
  279. vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
  280. mode, config.output_profile, EncryptionScheme::kUnencrypted,
  281. base::BindRepeating(&ReportVaapiErrorToUMA,
  282. "Media.VaapiVideoEncodeAccelerator.VAAPIError"));
  283. if (!vaapi_wrapper_) {
  284. NOTIFY_ERROR(kPlatformFailureError,
  285. "Failed initializing VAAPI for profile " +
  286. GetProfileName(config.output_profile));
  287. return;
  288. }
  289. }
  290. DCHECK_EQ(IsConfiguredForTesting(), !!encoder_);
  291. // Base::Unretained(this) is safe because |error_cb| is called by
  292. // |encoder_| and |this| outlives |encoder_|.
  293. auto error_cb = base::BindRepeating(
  294. [](VaapiVideoEncodeAccelerator* const vea) {
  295. vea->SetState(kError);
  296. vea->NotifyError(kPlatformFailureError);
  297. },
  298. base::Unretained(this));
  299. VaapiVideoEncoderDelegate::Config ave_config{};
  300. switch (output_codec_) {
  301. case VideoCodec::kH264:
  302. if (!IsConfiguredForTesting()) {
  303. encoder_ = std::make_unique<H264VaapiVideoEncoderDelegate>(
  304. vaapi_wrapper_, error_cb);
  305. // HW encoders on Intel GPUs will not put average QP in slice/tile
  306. // header when it is not working at CQP mode. Currently only H264 is
  307. // working at non CQP mode.
  308. if (VaapiWrapper::GetImplementationType() ==
  309. VAImplementation::kIntelI965 ||
  310. VaapiWrapper::GetImplementationType() ==
  311. VAImplementation::kIntelIHD) {
  312. encoder_info_.reports_average_qp = false;
  313. }
  314. }
  315. break;
  316. case VideoCodec::kVP8:
  317. if (!IsConfiguredForTesting()) {
  318. encoder_ = std::make_unique<VP8VaapiVideoEncoderDelegate>(
  319. vaapi_wrapper_, error_cb);
  320. }
  321. break;
  322. case VideoCodec::kVP9:
  323. if (!IsConfiguredForTesting()) {
  324. encoder_ = std::make_unique<VP9VaapiVideoEncoderDelegate>(
  325. vaapi_wrapper_, error_cb);
  326. }
  327. break;
  328. default:
  329. NOTREACHED() << "Unsupported codec type " << GetCodecName(output_codec_);
  330. return;
  331. }
  332. if (!vaapi_wrapper_->GetVAEncMaxNumOfRefFrames(
  333. config.output_profile, &ave_config.max_num_ref_frames)) {
  334. NOTIFY_ERROR(kPlatformFailureError,
  335. "Failed getting max number of reference frames"
  336. "supported by the driver");
  337. return;
  338. }
  339. DCHECK_GT(ave_config.max_num_ref_frames, 0u);
  340. if (!encoder_->Initialize(config, ave_config)) {
  341. NOTIFY_ERROR(kInvalidArgumentError, "Failed initializing encoder");
  342. return;
  343. }
  344. output_buffer_byte_size_ = encoder_->GetBitstreamBufferSize();
  345. visible_rect_ = gfx::Rect(config.input_visible_size);
  346. expected_input_coded_size_ = VideoFrame::DetermineAlignedSize(
  347. config.input_format, config.input_visible_size);
  348. DCHECK(
  349. expected_input_coded_size_.width() <= encoder_->GetCodedSize().width() &&
  350. expected_input_coded_size_.height() <= encoder_->GetCodedSize().height());
  351. // The number of required buffers is the number of required reference frames
  352. // + 1 for the current frame to be encoded.
  353. const size_t max_ref_frames = encoder_->GetMaxNumOfRefFrames();
  354. num_frames_in_flight_ = std::max(kMinNumFramesInFlight, max_ref_frames);
  355. DVLOGF(1) << "Frames in flight: " << num_frames_in_flight_;
  356. if (!vaapi_wrapper_->CreateContext(encoder_->GetCodedSize())) {
  357. NOTIFY_ERROR(kPlatformFailureError, "Failed creating VAContext");
  358. return;
  359. }
  360. child_task_runner_->PostTask(
  361. FROM_HERE,
  362. base::BindOnce(&Client::RequireBitstreamBuffers, client_,
  363. num_frames_in_flight_, expected_input_coded_size_,
  364. output_buffer_byte_size_));
  365. if (config.HasSpatialLayer() || config.HasTemporalLayer()) {
  366. DCHECK(!config.spatial_layers.empty());
  367. for (size_t i = 0; i < config.spatial_layers.size(); ++i) {
  368. encoder_info_.fps_allocation[i] =
  369. GetFpsAllocation(config.spatial_layers[i].num_of_temporal_layers);
  370. }
  371. } else {
  372. constexpr uint8_t kFullFramerate = 255;
  373. encoder_info_.fps_allocation[0] = {kFullFramerate};
  374. }
  375. // Notify VideoEncoderInfo after initialization.
  376. child_task_runner_->PostTask(
  377. FROM_HERE,
  378. base::BindOnce(&Client::NotifyEncoderInfoChange, client_, encoder_info_));
  379. SetState(kEncoding);
  380. base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider(
  381. this, "media::VaapiVideoEncodeAccelerator", encoder_task_runner_);
  382. }
  383. void VaapiVideoEncodeAccelerator::RecycleVASurface(
  384. std::vector<std::unique_ptr<ScopedVASurface>>* va_surfaces,
  385. std::unique_ptr<ScopedVASurface> va_surface,
  386. VASurfaceID va_surface_id) {
  387. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  388. DCHECK(va_surface);
  389. DCHECK_EQ(va_surface_id, va_surface->id());
  390. DVLOGF(4) << "va_surface_id: " << va_surface_id;
  391. va_surfaces->push_back(std::move(va_surface));
  392. // At least one surface must available in |available_encode_surfaces_|
  393. // to succeed in EncodePendingInputs(). Checks here to avoid redundant
  394. // EncodePendingInputs() call.
  395. for (const auto& surfaces : available_encode_surfaces_) {
  396. if (surfaces.second.empty())
  397. return;
  398. }
  399. if (!input_queue_.empty())
  400. EncodePendingInputs();
  401. }
  402. void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffers() {
  403. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  404. if (state_ != kEncoding)
  405. return;
  406. TRACE_EVENT2("media,gpu", "VAVEA::TryToReturnBitstreamBuffers",
  407. "pending encode results", pending_encode_results_.size(),
  408. "available bitstream buffers",
  409. available_bitstream_buffers_.size());
  410. while (!pending_encode_results_.empty()) {
  411. if (pending_encode_results_.front() == nullptr) {
  412. // A null job indicates a flush command.
  413. pending_encode_results_.pop();
  414. DVLOGF(2) << "FlushDone";
  415. DCHECK(flush_callback_);
  416. child_task_runner_->PostTask(
  417. FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
  418. continue;
  419. }
  420. if (available_bitstream_buffers_.empty())
  421. return;
  422. auto buffer = std::move(available_bitstream_buffers_.front());
  423. available_bitstream_buffers_.pop();
  424. auto encode_result = std::move(pending_encode_results_.front());
  425. pending_encode_results_.pop();
  426. ReturnBitstreamBuffer(std::move(encode_result), std::move(buffer));
  427. }
  428. }
  429. void VaapiVideoEncodeAccelerator::ReturnBitstreamBuffer(
  430. std::unique_ptr<EncodeResult> encode_result,
  431. std::unique_ptr<BitstreamBufferRef> buffer) {
  432. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  433. uint8_t* target_data = buffer->shm_mapping.GetMemoryAs<uint8_t>();
  434. size_t data_size = 0;
  435. // vaSyncSurface() is not necessary because GetEncodedChunkSize() has been
  436. // called in VaapiVideoEncoderDelegate::Encode().
  437. if (!vaapi_wrapper_->DownloadFromVABuffer(
  438. encode_result->coded_buffer_id(), /*sync_surface_id=*/absl::nullopt,
  439. target_data, buffer->shm_region.GetSize(), &data_size)) {
  440. NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
  441. return;
  442. }
  443. auto metadata = encode_result->metadata();
  444. DCHECK_NE(metadata.payload_size_bytes, 0u);
  445. encode_result.reset();
  446. DVLOGF(4) << "Returning bitstream buffer "
  447. << (metadata.key_frame ? "(keyframe)" : "") << " id: " << buffer->id
  448. << " size: " << data_size;
  449. child_task_runner_->PostTask(
  450. FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, client_,
  451. buffer->id, std::move(metadata)));
  452. }
  453. void VaapiVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
  454. bool force_keyframe) {
  455. DVLOGF(4) << "Frame timestamp: " << frame->timestamp().InMilliseconds()
  456. << " force_keyframe: " << force_keyframe;
  457. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  458. encoder_task_runner_->PostTask(
  459. FROM_HERE,
  460. base::BindOnce(&VaapiVideoEncodeAccelerator::EncodeTask,
  461. encoder_weak_this_, std::move(frame), force_keyframe));
  462. }
  463. void VaapiVideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
  464. bool force_keyframe) {
  465. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  466. DCHECK_NE(state_, kUninitialized);
  467. if (frame) {
  468. // |frame| can be nullptr to indicate a flush.
  469. const bool is_expected_storage_type =
  470. native_input_mode_
  471. ? frame->storage_type() == VideoFrame::STORAGE_GPU_MEMORY_BUFFER
  472. : frame->IsMappable();
  473. if (!is_expected_storage_type) {
  474. NOTIFY_ERROR(kInvalidArgumentError,
  475. "Unexpected storage: " << VideoFrame::StorageTypeToString(
  476. frame->storage_type()));
  477. return;
  478. }
  479. }
  480. input_queue_.push(
  481. std::make_unique<InputFrameRef>(std::move(frame), force_keyframe));
  482. EncodePendingInputs();
  483. }
  484. bool VaapiVideoEncodeAccelerator::CreateSurfacesForGpuMemoryBufferEncoding(
  485. const VideoFrame& frame,
  486. const std::vector<gfx::Size>& spatial_layer_resolutions,
  487. std::vector<scoped_refptr<VASurface>>* input_surfaces,
  488. std::vector<scoped_refptr<VASurface>>* reconstructed_surfaces) {
  489. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  490. DCHECK(native_input_mode_);
  491. DCHECK_EQ(frame.storage_type(), VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
  492. TRACE_EVENT0("media,gpu", "VAVEA::CreateSurfacesForGpuMemoryBuffer");
  493. if (frame.format() != PIXEL_FORMAT_NV12) {
  494. NOTIFY_ERROR(
  495. kPlatformFailureError,
  496. "Expected NV12, got: " << VideoPixelFormatToString(frame.format()));
  497. return false;
  498. }
  499. scoped_refptr<VASurface> source_surface;
  500. {
  501. TRACE_EVENT0("media,gpu", "VAVEA::ImportGpuMemoryBufferToVASurface");
  502. // Create VASurface from GpuMemory-based VideoFrame.
  503. scoped_refptr<gfx::NativePixmap> pixmap = CreateNativePixmapDmaBuf(&frame);
  504. if (!pixmap) {
  505. NOTIFY_ERROR(kPlatformFailureError,
  506. "Failed to create NativePixmap from VideoFrame");
  507. return false;
  508. }
  509. source_surface =
  510. vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap));
  511. if (!source_surface) {
  512. NOTIFY_ERROR(kPlatformFailureError, "Failed to create VASurface");
  513. return false;
  514. }
  515. }
  516. // Create input and reconstructed surfaces.
  517. TRACE_EVENT1("media,gpu", "VAVEA::ConstructSurfaces", "layers",
  518. spatial_layer_resolutions.size());
  519. input_surfaces->reserve(spatial_layer_resolutions.size());
  520. reconstructed_surfaces->reserve(spatial_layer_resolutions.size());
  521. for (const gfx::Size& encode_size : spatial_layer_resolutions) {
  522. const bool engage_vpp = frame.visible_rect() != gfx::Rect(encode_size);
  523. // Crop and Scale input surface to a surface whose size is |encode_size|.
  524. // The size of a reconstructed surface is also |encode_size|.
  525. if (engage_vpp) {
  526. auto blit_surface = ExecuteBlitSurface(*source_surface,
  527. frame.visible_rect(), encode_size);
  528. if (!blit_surface)
  529. return false;
  530. input_surfaces->push_back(std::move(blit_surface));
  531. } else {
  532. input_surfaces->emplace_back(source_surface);
  533. }
  534. reconstructed_surfaces->emplace_back(CreateEncodeSurface(encode_size));
  535. if (!reconstructed_surfaces->back())
  536. return false;
  537. }
  538. DCHECK(!base::Contains(*input_surfaces, nullptr));
  539. DCHECK(!base::Contains(*reconstructed_surfaces, nullptr));
  540. return true;
  541. }
  542. bool VaapiVideoEncodeAccelerator::CreateSurfacesForShmemEncoding(
  543. const VideoFrame& frame,
  544. scoped_refptr<VASurface>* input_surface,
  545. scoped_refptr<VASurface>* reconstructed_surface) {
  546. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  547. DCHECK(!native_input_mode_);
  548. DCHECK(frame.IsMappable());
  549. TRACE_EVENT0("media,gpu", "VAVEA::CreateSurfacesForShmem");
  550. if (expected_input_coded_size_ != frame.coded_size()) {
  551. // In non-zero copy mode, the coded size of the incoming frame should be
  552. // the same as the one we requested through
  553. // Client::RequireBitstreamBuffers().
  554. NOTIFY_ERROR(kPlatformFailureError,
  555. "Expected frame coded size: "
  556. << expected_input_coded_size_.ToString()
  557. << ", but got: " << frame.coded_size().ToString());
  558. return false;
  559. }
  560. DCHECK(visible_rect_.origin().IsOrigin());
  561. if (visible_rect_ != frame.visible_rect()) {
  562. // In non-zero copy mode, the client is responsible for scaling and
  563. // cropping.
  564. NOTIFY_ERROR(kPlatformFailureError,
  565. "Expected frame visible rectangle: "
  566. << visible_rect_.ToString()
  567. << ", but got: " << frame.visible_rect().ToString());
  568. return false;
  569. }
  570. const gfx::Size& encode_size = encoder_->GetCodedSize();
  571. *input_surface =
  572. CreateInputSurface(*vaapi_wrapper_, encode_size,
  573. {VaapiWrapper::SurfaceUsageHint::kVideoEncoder});
  574. if (!*input_surface) {
  575. NOTIFY_ERROR(kPlatformFailureError, "Failed to create input surface");
  576. return false;
  577. }
  578. if (!vaapi_wrapper_->UploadVideoFrameToSurface(frame, (*input_surface)->id(),
  579. (*input_surface)->size())) {
  580. NOTIFY_ERROR(kPlatformFailureError, "Failed to upload frame");
  581. return false;
  582. }
  583. *reconstructed_surface = CreateEncodeSurface(encode_size);
  584. return !!*reconstructed_surface;
  585. }
  586. scoped_refptr<VASurface> VaapiVideoEncodeAccelerator::CreateInputSurface(
  587. VaapiWrapper& vaapi_wrapper,
  588. const gfx::Size& encode_size,
  589. const std::vector<VaapiWrapper::SurfaceUsageHint>& surface_usage_hints) {
  590. if (!base::Contains(input_surfaces_, encode_size)) {
  591. auto surface =
  592. CreateScopedSurface(vaapi_wrapper, encode_size, surface_usage_hints);
  593. if (!surface) {
  594. NOTIFY_ERROR(kPlatformFailureError, "Failed to create surface");
  595. return nullptr;
  596. }
  597. input_surfaces_[encode_size] = std::move(surface);
  598. }
  599. const ScopedVASurface& surface = *input_surfaces_[encode_size];
  600. return base::MakeRefCounted<VASurface>(surface.id(), surface.size(),
  601. surface.format(), base::DoNothing());
  602. }
  603. scoped_refptr<VASurface> VaapiVideoEncodeAccelerator::CreateEncodeSurface(
  604. const gfx::Size& encode_size) {
  605. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  606. const size_t max_allocated_surfaces = num_frames_in_flight_ + 1;
  607. const bool no_surfaces_available =
  608. !base::Contains(available_encode_surfaces_, encode_size) ||
  609. available_encode_surfaces_[encode_size].empty();
  610. if (no_surfaces_available &&
  611. encode_surfaces_count_[encode_size] >= max_allocated_surfaces) {
  612. DVLOGF(4) << "Not enough surfaces available";
  613. return nullptr;
  614. }
  615. if (no_surfaces_available) {
  616. auto surface =
  617. CreateScopedSurface(*vaapi_wrapper_, encode_size,
  618. {VaapiWrapper::SurfaceUsageHint::kVideoEncoder});
  619. if (!surface) {
  620. NOTIFY_ERROR(kPlatformFailureError, "Failed creating surfaces");
  621. return nullptr;
  622. }
  623. available_encode_surfaces_[encode_size].push_back(std::move(surface));
  624. encode_surfaces_count_[encode_size] += 1;
  625. }
  626. auto& surfaces = available_encode_surfaces_[encode_size];
  627. auto scoped_va_surface = std::move(surfaces.back());
  628. surfaces.pop_back();
  629. const VASurfaceID id = scoped_va_surface->id();
  630. const gfx::Size& size = scoped_va_surface->size();
  631. const unsigned int format = scoped_va_surface->format();
  632. VASurface::ReleaseCB release_cb = BindToCurrentLoop(base::BindOnce(
  633. &VaapiVideoEncodeAccelerator::RecycleVASurface, encoder_weak_this_,
  634. &surfaces, std::move(scoped_va_surface)));
  635. return base::MakeRefCounted<VASurface>(id, size, format,
  636. std::move(release_cb));
  637. }
  638. scoped_refptr<VaapiWrapper>
  639. VaapiVideoEncodeAccelerator::CreateVppVaapiWrapper() {
  640. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  641. DCHECK(!vpp_vaapi_wrapper_);
  642. auto vpp_vaapi_wrapper = VaapiWrapper::Create(
  643. VaapiWrapper::kVideoProcess, VAProfileNone,
  644. EncryptionScheme::kUnencrypted,
  645. base::BindRepeating(&ReportVaapiErrorToUMA,
  646. "Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
  647. if (!vpp_vaapi_wrapper) {
  648. NOTIFY_ERROR(kPlatformFailureError, "Failed to initialize VppVaapiWrapper");
  649. return nullptr;
  650. }
  651. // VA context for VPP is not associated with a specific resolution.
  652. if (!vpp_vaapi_wrapper->CreateContext(gfx::Size())) {
  653. NOTIFY_ERROR(kPlatformFailureError, "Failed creating Context for VPP");
  654. return nullptr;
  655. }
  656. return vpp_vaapi_wrapper;
  657. }
  658. scoped_refptr<VASurface> VaapiVideoEncodeAccelerator::ExecuteBlitSurface(
  659. const VASurface& source_surface,
  660. const gfx::Rect source_visible_rect,
  661. const gfx::Size& encode_size) {
  662. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  663. if (!vpp_vaapi_wrapper_) {
  664. vpp_vaapi_wrapper_ = CreateVppVaapiWrapper();
  665. if (!vpp_vaapi_wrapper_) {
  666. NOTIFY_ERROR(kPlatformFailureError, "Failed to create Vpp");
  667. return nullptr;
  668. }
  669. }
  670. auto blit_surface =
  671. CreateInputSurface(*vpp_vaapi_wrapper_, encode_size,
  672. {VaapiWrapper::SurfaceUsageHint::kVideoProcessWrite,
  673. VaapiWrapper::SurfaceUsageHint::kVideoEncoder});
  674. if (!blit_surface)
  675. return nullptr;
  676. DCHECK(vpp_vaapi_wrapper_);
  677. if (!vpp_vaapi_wrapper_->BlitSurface(source_surface, *blit_surface,
  678. source_visible_rect,
  679. gfx::Rect(encode_size))) {
  680. NOTIFY_ERROR(kPlatformFailureError,
  681. "Failed BlitSurface on frame size: "
  682. << source_surface.size().ToString()
  683. << " (visible rect: " << source_visible_rect.ToString()
  684. << ") -> encode size: " << encode_size.ToString());
  685. return nullptr;
  686. }
  687. return blit_surface;
  688. }
  689. std::unique_ptr<VaapiVideoEncoderDelegate::EncodeJob>
  690. VaapiVideoEncodeAccelerator::CreateEncodeJob(
  691. bool force_keyframe,
  692. base::TimeDelta frame_timestamp,
  693. const VASurface& input_surface,
  694. scoped_refptr<VASurface> reconstructed_surface) {
  695. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  696. DCHECK_NE(input_surface.id(), VA_INVALID_ID);
  697. DCHECK(!input_surface.size().IsEmpty());
  698. DCHECK(reconstructed_surface);
  699. std::unique_ptr<ScopedVABuffer> coded_buffer;
  700. {
  701. TRACE_EVENT1("media,gpu", "VAVEA::CreateVABuffer", "buffer size",
  702. output_buffer_byte_size_);
  703. coded_buffer = vaapi_wrapper_->CreateVABuffer(VAEncCodedBufferType,
  704. output_buffer_byte_size_);
  705. if (!coded_buffer) {
  706. NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
  707. return nullptr;
  708. }
  709. }
  710. scoped_refptr<CodecPicture> picture;
  711. switch (output_codec_) {
  712. case VideoCodec::kH264:
  713. picture = new VaapiH264Picture(std::move(reconstructed_surface));
  714. break;
  715. case VideoCodec::kVP8:
  716. picture = new VaapiVP8Picture(std::move(reconstructed_surface));
  717. break;
  718. case VideoCodec::kVP9:
  719. picture = new VaapiVP9Picture(std::move(reconstructed_surface));
  720. break;
  721. default:
  722. return nullptr;
  723. }
  724. return std::make_unique<EncodeJob>(force_keyframe, frame_timestamp,
  725. input_surface.id(), std::move(picture),
  726. std::move(coded_buffer));
  727. }
  728. void VaapiVideoEncodeAccelerator::EncodePendingInputs() {
  729. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  730. DVLOGF(4);
  731. std::vector<gfx::Size> spatial_layer_resolutions =
  732. encoder_->GetSVCLayerResolutions();
  733. if (spatial_layer_resolutions.empty()) {
  734. VLOGF(1) << " Failed to get SVC layer resolutions";
  735. return;
  736. }
  737. TRACE_EVENT1("media,gpu", "VAVEA::EncodePendingInputs",
  738. "pending input frames", input_queue_.size());
  739. while (state_ == kEncoding && !input_queue_.empty()) {
  740. std::unique_ptr<InputFrameRef>& input_frame = input_queue_.front();
  741. if (!input_frame) {
  742. // If this is a flush (null) frame, don't create/submit a new encode
  743. // result for it, but forward a null result to the
  744. // |pending_encode_results_| queue.
  745. pending_encode_results_.push(nullptr);
  746. input_queue_.pop();
  747. TryToReturnBitstreamBuffers();
  748. continue;
  749. }
  750. TRACE_EVENT0("media,gpu",
  751. "VAVEA::EncodeOneInputFrameAndReturnEncodedChunks");
  752. const size_t num_spatial_layers = spatial_layer_resolutions.size();
  753. std::vector<scoped_refptr<VASurface>> input_surfaces;
  754. std::vector<scoped_refptr<VASurface>> reconstructed_surfaces;
  755. if (native_input_mode_) {
  756. if (!CreateSurfacesForGpuMemoryBufferEncoding(
  757. *input_frame->frame, spatial_layer_resolutions, &input_surfaces,
  758. &reconstructed_surfaces)) {
  759. return;
  760. }
  761. } else {
  762. DCHECK_EQ(num_spatial_layers, 1u);
  763. input_surfaces.resize(1u);
  764. reconstructed_surfaces.resize(1u);
  765. if (!CreateSurfacesForShmemEncoding(*input_frame->frame,
  766. &input_surfaces[0],
  767. &reconstructed_surfaces[0])) {
  768. return;
  769. }
  770. }
  771. // Encoding different spatial layers for |input_frame|.
  772. std::vector<std::unique_ptr<EncodeJob>> jobs;
  773. for (size_t spatial_idx = 0; spatial_idx < num_spatial_layers;
  774. ++spatial_idx) {
  775. std::unique_ptr<EncodeJob> job;
  776. TRACE_EVENT0("media,gpu", "VAVEA::FromCreateEncodeJobToReturn");
  777. const bool force_key =
  778. (spatial_idx == 0 ? input_frame->force_keyframe : false);
  779. job = CreateEncodeJob(force_key, input_frame->frame->timestamp(),
  780. *input_surfaces[spatial_idx],
  781. std::move(reconstructed_surfaces[spatial_idx]));
  782. if (!job)
  783. return;
  784. jobs.emplace_back(std::move(job));
  785. }
  786. for (auto& job : jobs) {
  787. TRACE_EVENT0("media,gpu", "VAVEA::Encode");
  788. if (!encoder_->Encode(*job)) {
  789. NOTIFY_ERROR(kPlatformFailureError, "Failed encoding job");
  790. return;
  791. }
  792. }
  793. // Invalidates |input_frame| here; it notifies a client |input_frame->frame|
  794. // can be reused for the future encoding.
  795. // If the frame is copied (|native_input_mode_| == false), it is clearly
  796. // safe to release |input_frame|. If the frame is imported
  797. // (|native_input_mode_| == true), the write operation to the frame is
  798. // blocked on DMA_BUF_IOCTL_SYNC because a VA-API driver protects the buffer
  799. // through a DRM driver until encoding is complete, that is, vaMapBuffer()
  800. // on a coded buffer returns.
  801. input_frame.reset();
  802. input_queue_.pop();
  803. for (auto&& job : jobs) {
  804. TRACE_EVENT0("media,gpu", "VAVEA::GetEncodeResult");
  805. std::unique_ptr<EncodeResult> result =
  806. encoder_->GetEncodeResult(std::move(job));
  807. if (!result) {
  808. NOTIFY_ERROR(kPlatformFailureError, "Failed getting encode result");
  809. return;
  810. }
  811. pending_encode_results_.push(std::move(result));
  812. }
  813. TryToReturnBitstreamBuffers();
  814. }
  815. }
  816. void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
  817. BitstreamBuffer buffer) {
  818. DVLOGF(4) << "id: " << buffer.id();
  819. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  820. if (buffer.size() < output_buffer_byte_size_) {
  821. NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small");
  822. return;
  823. }
  824. auto buffer_ref =
  825. std::make_unique<BitstreamBufferRef>(buffer.id(), std::move(buffer));
  826. encoder_task_runner_->PostTask(
  827. FROM_HERE,
  828. base::BindOnce(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask,
  829. encoder_weak_this_, std::move(buffer_ref)));
  830. }
  831. void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
  832. std::unique_ptr<BitstreamBufferRef> buffer_ref) {
  833. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  834. DCHECK_NE(state_, kUninitialized);
  835. buffer_ref->shm_mapping = buffer_ref->shm_region.MapAt(
  836. buffer_ref->offset, buffer_ref->shm_region.GetSize());
  837. if (!buffer_ref->shm_mapping.IsValid()) {
  838. NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
  839. return;
  840. }
  841. available_bitstream_buffers_.push(std::move(buffer_ref));
  842. TryToReturnBitstreamBuffers();
  843. }
  844. void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
  845. const Bitrate& bitrate,
  846. uint32_t framerate) {
  847. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  848. VideoBitrateAllocation allocation;
  849. allocation.SetBitrate(0, 0, bitrate.target_bps());
  850. encoder_task_runner_->PostTask(
  851. FROM_HERE,
  852. base::BindOnce(
  853. &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
  854. encoder_weak_this_, allocation, framerate));
  855. }
  856. void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
  857. const VideoBitrateAllocation& bitrate_allocation,
  858. uint32_t framerate) {
  859. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  860. encoder_task_runner_->PostTask(
  861. FROM_HERE,
  862. base::BindOnce(
  863. &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
  864. encoder_weak_this_, bitrate_allocation, framerate));
  865. }
  866. void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
  867. VideoBitrateAllocation bitrate_allocation,
  868. uint32_t framerate) {
  869. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  870. DCHECK_NE(state_, kUninitialized);
  871. if (!encoder_->UpdateRates(bitrate_allocation, framerate)) {
  872. VLOGF(1) << "Failed to update rates to " << bitrate_allocation.GetSumBps()
  873. << " " << framerate;
  874. }
  875. }
  876. void VaapiVideoEncodeAccelerator::Flush(FlushCallback flush_callback) {
  877. DVLOGF(2);
  878. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  879. encoder_task_runner_->PostTask(
  880. FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::FlushTask,
  881. encoder_weak_this_, std::move(flush_callback)));
  882. }
  883. void VaapiVideoEncodeAccelerator::FlushTask(FlushCallback flush_callback) {
  884. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  885. if (flush_callback_) {
  886. NOTIFY_ERROR(kIllegalStateError, "There is a pending flush");
  887. child_task_runner_->PostTask(
  888. FROM_HERE, base::BindOnce(std::move(flush_callback), false));
  889. return;
  890. }
  891. flush_callback_ = std::move(flush_callback);
  892. // Insert an null job to indicate a flush command.
  893. input_queue_.push(std::unique_ptr<InputFrameRef>(nullptr));
  894. EncodePendingInputs();
  895. }
  896. bool VaapiVideoEncodeAccelerator::IsFlushSupported() {
  897. return true;
  898. }
  899. void VaapiVideoEncodeAccelerator::Destroy() {
  900. DVLOGF(2);
  901. DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
  902. child_weak_this_factory_.InvalidateWeakPtrs();
  903. // We're destroying; cancel all callbacks.
  904. if (client_ptr_factory_)
  905. client_ptr_factory_->InvalidateWeakPtrs();
  906. encoder_task_runner_->PostTask(
  907. FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::DestroyTask,
  908. encoder_weak_this_));
  909. }
  910. void VaapiVideoEncodeAccelerator::DestroyTask() {
  911. VLOGF(2);
  912. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  913. encoder_weak_this_factory_.InvalidateWeakPtrs();
  914. if (flush_callback_) {
  915. child_task_runner_->PostTask(
  916. FROM_HERE, base::BindOnce(std::move(flush_callback_), false));
  917. }
  918. // Clean up members that are to be accessed on the encoder thread only.
  919. // Call DestroyContext() explicitly to make sure it's destroyed before
  920. // VA surfaces.
  921. if (vaapi_wrapper_)
  922. vaapi_wrapper_->DestroyContext();
  923. available_encode_surfaces_.clear();
  924. if (vpp_vaapi_wrapper_)
  925. vpp_vaapi_wrapper_->DestroyContext();
  926. input_surfaces_.clear();
  927. while (!available_bitstream_buffers_.empty())
  928. available_bitstream_buffers_.pop();
  929. while (!input_queue_.empty())
  930. input_queue_.pop();
  931. // Note ScopedVABuffer owned by EncodeResults must be destroyed before
  932. // |vaapi_wrapper_| is destroyed to ensure VADisplay is valid on the
  933. // ScopedVABuffer's destruction.
  934. DCHECK(vaapi_wrapper_ || pending_encode_results_.empty());
  935. while (!pending_encode_results_.empty())
  936. pending_encode_results_.pop();
  937. encoder_ = nullptr;
  938. delete this;
  939. }
  940. void VaapiVideoEncodeAccelerator::SetState(State state) {
  941. // Only touch state on encoder thread, unless it's not running.
  942. if (!encoder_task_runner_->BelongsToCurrentThread()) {
  943. encoder_task_runner_->PostTask(
  944. FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::SetState,
  945. encoder_weak_this_, state));
  946. return;
  947. }
  948. VLOGF(2) << "setting state to: " << state;
  949. state_ = state;
  950. }
  951. void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
  952. if (!child_task_runner_->BelongsToCurrentThread()) {
  953. child_task_runner_->PostTask(
  954. FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::NotifyError,
  955. child_weak_this_, error));
  956. return;
  957. }
  958. if (client_) {
  959. client_->NotifyError(error);
  960. client_ptr_factory_->InvalidateWeakPtrs();
  961. }
  962. }
  963. bool VaapiVideoEncodeAccelerator::OnMemoryDump(
  964. const base::trace_event::MemoryDumpArgs& args,
  965. base::trace_event::ProcessMemoryDump* pmd) {
  966. using base::trace_event::MemoryAllocatorDump;
  967. DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
  968. auto dump_name = base::StringPrintf("gpu/vaapi/encoder/0x%" PRIxPTR,
  969. reinterpret_cast<uintptr_t>(this));
  970. MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(dump_name);
  971. dump->AddString("encoder native input mode", "",
  972. native_input_mode_ ? "true" : "false");
  973. constexpr double kNumBytesPerPixelYUV420 = 12.0 / 8;
  974. for (const auto& surface : encode_surfaces_count_) {
  975. const gfx::Size& resolution = surface.first;
  976. const size_t count = surface.second;
  977. MemoryAllocatorDump* sub_dump = pmd->CreateAllocatorDump(
  978. dump_name + "/encode surface/" + resolution.ToString());
  979. sub_dump->AddScalar(MemoryAllocatorDump::kNameObjectCount,
  980. MemoryAllocatorDump::kUnitsObjects,
  981. static_cast<uint64_t>(count));
  982. const uint64_t surfaces_packed_size = static_cast<uint64_t>(
  983. resolution.GetArea() * kNumBytesPerPixelYUV420 * count);
  984. sub_dump->AddScalar(MemoryAllocatorDump::kNameSize,
  985. MemoryAllocatorDump::kUnitsBytes, surfaces_packed_size);
  986. }
  987. for (const auto& surface : input_surfaces_) {
  988. const gfx::Size& resolution = surface.first;
  989. MemoryAllocatorDump* sub_dump = pmd->CreateAllocatorDump(
  990. dump_name + "/input surface/" + resolution.ToString());
  991. const uint64_t surfaces_packed_size =
  992. static_cast<uint64_t>(resolution.GetArea() * kNumBytesPerPixelYUV420);
  993. sub_dump->AddScalar(MemoryAllocatorDump::kNameSize,
  994. MemoryAllocatorDump::kUnitsBytes, surfaces_packed_size);
  995. }
  996. return true;
  997. }
  998. } // namespace media