libyuv_image_processor_backend.cc 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555
  1. // Copyright 2018 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "media/gpu/chromeos/libyuv_image_processor_backend.h"
  5. #include <sys/mman.h>
  6. #include "base/containers/contains.h"
  7. #include "base/memory/ptr_util.h"
  8. #include "base/metrics/histogram_macros.h"
  9. #include "base/numerics/checked_math.h"
  10. #include "base/trace_event/trace_event.h"
  11. #include "media/gpu/chromeos/fourcc.h"
  12. #include "media/gpu/macros.h"
  13. #include "media/gpu/video_frame_mapper.h"
  14. #include "media/gpu/video_frame_mapper_factory.h"
  15. #include "third_party/libyuv/include/libyuv/convert.h"
  16. #include "third_party/libyuv/include/libyuv/convert_from.h"
  17. #include "third_party/libyuv/include/libyuv/convert_from_argb.h"
  18. #include "third_party/libyuv/include/libyuv/rotate.h"
  19. #include "third_party/libyuv/include/libyuv/scale.h"
  20. namespace media {
  21. namespace {
  22. // TODO(https://bugs.chromium.org/p/libyuv/issues/detail?id=840): Remove
  23. // this once libyuv implements NV12Rotate() and use the libyuv::NV12Rotate().
  24. int NV12Rotate(uint8_t* tmp_buffer,
  25. const uint8_t* src_y,
  26. int src_stride_y,
  27. const uint8_t* src_uv,
  28. int src_stride_uv,
  29. uint8_t* dst_y,
  30. int dst_stride_y,
  31. uint8_t* dst_uv,
  32. int dst_stride_uv,
  33. int width,
  34. int height,
  35. VideoRotation relative_rotation) {
  36. libyuv::RotationModeEnum rotation = libyuv::kRotate0;
  37. int tmp_width = width;
  38. int tmp_height = height;
  39. switch (relative_rotation) {
  40. case VIDEO_ROTATION_0:
  41. NOTREACHED() << "Unexpected rotation: " << rotation;
  42. return -1;
  43. case VIDEO_ROTATION_90:
  44. rotation = libyuv::kRotate90;
  45. tmp_width = height;
  46. tmp_height = width;
  47. break;
  48. case VIDEO_ROTATION_180:
  49. rotation = libyuv::kRotate180;
  50. tmp_width = width;
  51. tmp_height = height;
  52. break;
  53. case VIDEO_ROTATION_270:
  54. rotation = libyuv::kRotate270;
  55. tmp_width = height;
  56. tmp_height = width;
  57. break;
  58. }
  59. // Rotating.
  60. int tmp_uv_width = 0;
  61. int tmp_uv_height = 0;
  62. if (!(base::CheckAdd<int>(tmp_width, 1) / 2).AssignIfValid(&tmp_uv_width) ||
  63. !(base::CheckAdd<int>(tmp_height, 1) / 2).AssignIfValid(&tmp_uv_height)) {
  64. VLOGF(1) << "Overflow occurred for " << tmp_width << "x" << tmp_height;
  65. return -1;
  66. }
  67. uint8_t* const tmp_u = tmp_buffer;
  68. uint8_t* const tmp_v = tmp_u + tmp_uv_width * tmp_uv_height;
  69. // Rotate the NV12 planes to I420.
  70. int ret = libyuv::NV12ToI420Rotate(
  71. src_y, src_stride_y, src_uv, src_stride_uv, dst_y, dst_stride_y, tmp_u,
  72. tmp_uv_width, tmp_v, tmp_uv_width, width, height, rotation);
  73. if (ret != 0)
  74. return ret;
  75. // Merge the UV planes into the destination.
  76. libyuv::MergeUVPlane(tmp_u, tmp_uv_width, tmp_v, tmp_uv_width, dst_uv,
  77. dst_stride_uv, tmp_uv_width, tmp_uv_height);
  78. return 0;
  79. }
  80. enum class SupportResult {
  81. Supported,
  82. SupportedWithI420Pivot,
  83. SupportedWithNV12Pivot,
  84. Unsupported,
  85. };
  86. enum class Transform {
  87. kConversion,
  88. kScaling,
  89. kRotation,
  90. };
  91. static constexpr struct {
  92. uint32_t input;
  93. uint32_t output;
  94. Transform transform;
  95. SupportResult support_result;
  96. } kSupportFormatConversionArray[] = {
  97. #define CONV(in, out, trans, result) \
  98. {Fourcc::in, Fourcc::out, Transform::trans, SupportResult::result}
  99. // Conversion.
  100. CONV(NV12, NV12, kConversion, Supported),
  101. CONV(YM16, NV12, kConversion, Supported),
  102. CONV(YM16, YU12, kConversion, Supported),
  103. CONV(YU12, NV12, kConversion, Supported),
  104. CONV(YU12, YU12, kConversion, Supported),
  105. CONV(YUYV, NV12, kConversion, Supported),
  106. CONV(YUYV, YU12, kConversion, Supported),
  107. CONV(YV12, NV12, kConversion, Supported),
  108. CONV(MM21, NV12, kConversion, Supported),
  109. // Scaling.
  110. CONV(NV12, NV12, kScaling, Supported),
  111. CONV(YM16, NV12, kScaling, SupportedWithNV12Pivot),
  112. CONV(YM16, YU12, kScaling, SupportedWithI420Pivot),
  113. CONV(YU12, YU12, kScaling, Supported),
  114. CONV(YUYV, NV12, kScaling, SupportedWithNV12Pivot),
  115. CONV(YUYV, YU12, kScaling, SupportedWithI420Pivot),
  116. // Rotating.
  117. CONV(NV12, NV12, kRotation, SupportedWithI420Pivot),
  118. #undef CONV
  119. };
  120. SupportResult IsConversionSupported(Fourcc input_fourcc,
  121. Fourcc output_fourcc,
  122. Transform transform) {
  123. const auto single_input_fourcc = input_fourcc.ToSinglePlanar();
  124. const auto single_output_fourcc = output_fourcc.ToSinglePlanar();
  125. if (!single_input_fourcc || !single_output_fourcc)
  126. return SupportResult::Unsupported;
  127. // Compare fourccs by formatting single planar formats because LibyuvIP can
  128. // process either single- or multi-planar formats.
  129. for (const auto& conv : kSupportFormatConversionArray) {
  130. const auto conv_input_fourcc = Fourcc::FromUint32(conv.input);
  131. const auto conv_output_fourcc = Fourcc::FromUint32(conv.output);
  132. if (!conv_input_fourcc || !conv_output_fourcc)
  133. continue;
  134. const auto single_conv_input_fourcc = conv_input_fourcc->ToSinglePlanar();
  135. const auto single_conv_output_fourcc = conv_output_fourcc->ToSinglePlanar();
  136. if (!single_conv_input_fourcc || !single_conv_output_fourcc)
  137. continue;
  138. if (single_input_fourcc == single_conv_input_fourcc &&
  139. single_output_fourcc == single_conv_output_fourcc &&
  140. transform == conv.transform) {
  141. return conv.support_result;
  142. }
  143. }
  144. return SupportResult::Unsupported;
  145. }
  146. } // namespace
  147. // static
  148. std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
  149. const PortConfig& input_config,
  150. const PortConfig& output_config,
  151. OutputMode output_mode,
  152. VideoRotation relative_rotation,
  153. ErrorCB error_cb,
  154. scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
  155. VLOGF(2);
  156. DCHECK_EQ(output_mode, OutputMode::IMPORT)
  157. << "Only OutputMode::IMPORT supported";
  158. std::unique_ptr<VideoFrameMapper> input_frame_mapper;
  159. // LibYUVImageProcessorBackend supports only memory-based video frame for
  160. // input.
  161. VideoFrame::StorageType input_storage_type = VideoFrame::STORAGE_UNKNOWN;
  162. for (auto input_type : input_config.preferred_storage_types) {
  163. if (input_type == VideoFrame::STORAGE_DMABUFS ||
  164. input_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
  165. input_frame_mapper = VideoFrameMapperFactory::CreateMapper(
  166. input_config.fourcc.ToVideoPixelFormat(), input_type, true);
  167. if (input_frame_mapper) {
  168. input_storage_type = input_type;
  169. break;
  170. }
  171. }
  172. if (VideoFrame::IsStorageTypeMappable(input_type)) {
  173. input_storage_type = input_type;
  174. break;
  175. }
  176. }
  177. if (input_storage_type == VideoFrame::STORAGE_UNKNOWN) {
  178. VLOGF(2) << "Unsupported input storage type";
  179. return nullptr;
  180. }
  181. std::unique_ptr<VideoFrameMapper> output_frame_mapper;
  182. VideoFrame::StorageType output_storage_type = VideoFrame::STORAGE_UNKNOWN;
  183. for (auto output_type : output_config.preferred_storage_types) {
  184. if (output_type == VideoFrame::STORAGE_DMABUFS ||
  185. output_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
  186. output_frame_mapper = VideoFrameMapperFactory::CreateMapper(
  187. output_config.fourcc.ToVideoPixelFormat(), output_type, true);
  188. if (output_frame_mapper) {
  189. output_storage_type = output_type;
  190. break;
  191. }
  192. }
  193. if (VideoFrame::IsStorageTypeMappable(output_type)) {
  194. output_storage_type = output_type;
  195. break;
  196. }
  197. }
  198. if (output_storage_type == VideoFrame::STORAGE_UNKNOWN) {
  199. VLOGF(2) << "Unsupported output storage type";
  200. return nullptr;
  201. }
  202. const gfx::Size& input_size = input_config.visible_rect.size();
  203. const gfx::Size& output_size = output_config.visible_rect.size();
  204. Transform transform = Transform::kConversion;
  205. if (relative_rotation != VIDEO_ROTATION_0) {
  206. transform = Transform::kRotation;
  207. bool size_mismatch = false;
  208. if (relative_rotation == VIDEO_ROTATION_180) {
  209. size_mismatch = input_size.width() != output_size.width() ||
  210. input_size.height() != output_size.height();
  211. } else { // For VIDEO_ROTATION_90 and 270.
  212. size_mismatch = input_size.width() != output_size.height() ||
  213. input_size.height() != output_size.width();
  214. }
  215. if (size_mismatch) {
  216. VLOGF(1) << "input and output resolution mismatch: "
  217. << "input=" << input_size.ToString()
  218. << ", output=" << output_size.ToString();
  219. return nullptr;
  220. }
  221. } else if (input_size.width() != output_size.width() ||
  222. input_size.height() != output_size.height()) {
  223. transform = Transform::kScaling;
  224. }
  225. SupportResult res = IsConversionSupported(input_config.fourcc,
  226. output_config.fourcc, transform);
  227. if (res == SupportResult::Unsupported) {
  228. VLOGF(2) << "Conversion from " << input_size.ToString() << "/"
  229. << input_config.fourcc.ToString() << " to "
  230. << output_size.ToString() << "/" << output_config.fourcc.ToString()
  231. << " with rotation " << relative_rotation << " is not supported";
  232. return nullptr;
  233. }
  234. if (input_config.fourcc.ToVideoPixelFormat() ==
  235. output_config.fourcc.ToVideoPixelFormat()) {
  236. if (output_config.visible_rect.origin() != gfx::Point(0, 0)) {
  237. VLOGF(2) << "Output visible rectangle is not (0, 0), "
  238. << "output_config.visible_rect="
  239. << output_config.visible_rect.ToString();
  240. return nullptr;
  241. }
  242. }
  243. scoped_refptr<VideoFrame> intermediate_frame;
  244. if (res == SupportResult::SupportedWithI420Pivot ||
  245. res == SupportResult::SupportedWithNV12Pivot) {
  246. intermediate_frame = VideoFrame::CreateFrame(
  247. res == SupportResult::SupportedWithI420Pivot ? PIXEL_FORMAT_I420
  248. : PIXEL_FORMAT_NV12,
  249. input_config.visible_rect.size(),
  250. gfx::Rect(input_config.visible_rect.size()),
  251. input_config.visible_rect.size(), base::TimeDelta());
  252. if (!intermediate_frame) {
  253. VLOGF(1) << "Failed to create intermediate frame";
  254. return nullptr;
  255. }
  256. }
  257. auto processor =
  258. base::WrapUnique<ImageProcessorBackend>(new LibYUVImageProcessorBackend(
  259. std::move(input_frame_mapper), std::move(output_frame_mapper),
  260. std::move(intermediate_frame),
  261. PortConfig(input_config.fourcc, input_config.size,
  262. input_config.planes, input_config.visible_rect,
  263. {input_storage_type}),
  264. PortConfig(output_config.fourcc, output_config.size,
  265. output_config.planes, output_config.visible_rect,
  266. {output_storage_type}),
  267. OutputMode::IMPORT, relative_rotation, std::move(error_cb),
  268. std::move(backend_task_runner)));
  269. VLOGF(2) << "LibYUVImageProcessorBackend created for converting from "
  270. << input_config.ToString() << " to " << output_config.ToString();
  271. return processor;
  272. }
  273. LibYUVImageProcessorBackend::LibYUVImageProcessorBackend(
  274. std::unique_ptr<VideoFrameMapper> input_frame_mapper,
  275. std::unique_ptr<VideoFrameMapper> output_frame_mapper,
  276. scoped_refptr<VideoFrame> intermediate_frame,
  277. const PortConfig& input_config,
  278. const PortConfig& output_config,
  279. OutputMode output_mode,
  280. VideoRotation relative_rotation,
  281. ErrorCB error_cb,
  282. scoped_refptr<base::SequencedTaskRunner> backend_task_runner)
  283. : ImageProcessorBackend(input_config,
  284. output_config,
  285. output_mode,
  286. relative_rotation,
  287. std::move(error_cb),
  288. std::move(backend_task_runner)),
  289. input_frame_mapper_(std::move(input_frame_mapper)),
  290. output_frame_mapper_(std::move(output_frame_mapper)),
  291. intermediate_frame_(std::move(intermediate_frame)) {}
  292. LibYUVImageProcessorBackend::~LibYUVImageProcessorBackend() {
  293. DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
  294. }
  295. void LibYUVImageProcessorBackend::Process(
  296. scoped_refptr<VideoFrame> input_frame,
  297. scoped_refptr<VideoFrame> output_frame,
  298. FrameReadyCB cb) {
  299. DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
  300. DVLOGF(4);
  301. if (input_frame->storage_type() == VideoFrame::STORAGE_DMABUFS ||
  302. input_frame->storage_type() == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
  303. DCHECK_NE(input_frame_mapper_.get(), nullptr);
  304. int mapping_permissions = PROT_READ;
  305. if (input_frame->storage_type() != VideoFrame::STORAGE_DMABUFS)
  306. mapping_permissions |= PROT_WRITE;
  307. input_frame =
  308. input_frame_mapper_->Map(std::move(input_frame), mapping_permissions);
  309. if (!input_frame) {
  310. VLOGF(1) << "Failed to map input VideoFrame";
  311. error_cb_.Run();
  312. return;
  313. }
  314. }
  315. // We don't replace |output_frame| with a mapped frame, because |output_frame|
  316. // is the output of ImageProcessor.
  317. scoped_refptr<VideoFrame> mapped_frame = output_frame;
  318. if (output_frame->storage_type() == VideoFrame::STORAGE_DMABUFS ||
  319. output_frame->storage_type() == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
  320. DCHECK_NE(output_frame_mapper_.get(), nullptr);
  321. mapped_frame =
  322. output_frame_mapper_->Map(output_frame, PROT_READ | PROT_WRITE);
  323. if (!mapped_frame) {
  324. VLOGF(1) << "Failed to map output VideoFrame";
  325. error_cb_.Run();
  326. return;
  327. }
  328. }
  329. int res;
  330. {
  331. TRACE_EVENT0("media", "LibYUVImageProcessorBackend::Process");
  332. SCOPED_UMA_HISTOGRAM_TIMER("LibYUVImageProcessorBackend::Process");
  333. res = DoConversion(input_frame.get(), mapped_frame.get());
  334. }
  335. if (res != 0) {
  336. VLOGF(1) << "libyuv returns non-zero code: " << res;
  337. error_cb_.Run();
  338. return;
  339. }
  340. output_frame->set_timestamp(input_frame->timestamp());
  341. output_frame->set_color_space(input_frame->ColorSpace());
  342. std::move(cb).Run(std::move(output_frame));
  343. }
  344. int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
  345. VideoFrame* const output) {
  346. DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
  347. #define Y_U_V_DATA(fr) \
  348. fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane), \
  349. fr->visible_data(VideoFrame::kUPlane), fr->stride(VideoFrame::kUPlane), \
  350. fr->visible_data(VideoFrame::kVPlane), fr->stride(VideoFrame::kVPlane)
  351. #define Y_V_U_DATA(fr) \
  352. fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane), \
  353. fr->visible_data(VideoFrame::kVPlane), fr->stride(VideoFrame::kVPlane), \
  354. fr->visible_data(VideoFrame::kUPlane), fr->stride(VideoFrame::kUPlane)
  355. #define Y_UV_DATA(fr) \
  356. fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane), \
  357. fr->visible_data(VideoFrame::kUVPlane), fr->stride(VideoFrame::kUVPlane)
  358. #define YUY2_DATA(fr) \
  359. fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane)
  360. #define LIBYUV_FUNC(func, i, o) \
  361. libyuv::func(i, o, output->visible_rect().width(), \
  362. output->visible_rect().height())
  363. if (output->format() == PIXEL_FORMAT_NV12) {
  364. switch (input->format()) {
  365. case PIXEL_FORMAT_I420:
  366. return LIBYUV_FUNC(I420ToNV12, Y_U_V_DATA(input), Y_UV_DATA(output));
  367. case PIXEL_FORMAT_YV12:
  368. return LIBYUV_FUNC(I420ToNV12, Y_V_U_DATA(input), Y_UV_DATA(output));
  369. case PIXEL_FORMAT_NV12:
  370. // MM21 mode.
  371. if (input_config_.fourcc == Fourcc(Fourcc::MM21))
  372. return LIBYUV_FUNC(MM21ToNV12, Y_UV_DATA(input), Y_UV_DATA(output));
  373. // Rotation mode.
  374. if (relative_rotation_ != VIDEO_ROTATION_0) {
  375. // The size of |tmp_buffer| of NV12Rotate() should be
  376. // 2 * ceil(|output_visible_rect_.width()| / 2) *
  377. // ceil(|output_visible_rect_.height()| / 2), which used to store
  378. // temporary U and V planes for I420 data. Although
  379. // |intermediate_frame_->data(0)| is much larger than the required
  380. // size, we use the frame to simplify the code.
  381. return NV12Rotate(intermediate_frame_->data(0), Y_UV_DATA(input),
  382. Y_UV_DATA(output), input->visible_rect().width(),
  383. input->visible_rect().height(), relative_rotation_);
  384. }
  385. // Scaling mode.
  386. return libyuv::NV12Scale(
  387. Y_UV_DATA(input), input->visible_rect().width(),
  388. input->visible_rect().height(), Y_UV_DATA(output),
  389. output->visible_rect().width(), output->visible_rect().height(),
  390. libyuv::kFilterBilinear);
  391. case PIXEL_FORMAT_YUY2:
  392. if (input->visible_rect().size() == output->visible_rect().size()) {
  393. return LIBYUV_FUNC(YUY2ToNV12, YUY2_DATA(input), Y_UV_DATA(output));
  394. } else {
  395. DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_NV12);
  396. int ret = libyuv::YUY2ToNV12(
  397. YUY2_DATA(input), Y_UV_DATA(intermediate_frame_),
  398. intermediate_frame_->visible_rect().width(),
  399. intermediate_frame_->visible_rect().height());
  400. if (ret != 0)
  401. return ret;
  402. return libyuv::NV12Scale(
  403. Y_UV_DATA(intermediate_frame_),
  404. intermediate_frame_->visible_rect().width(),
  405. intermediate_frame_->visible_rect().height(), Y_UV_DATA(output),
  406. output->visible_rect().width(), output->visible_rect().height(),
  407. libyuv::kFilterBilinear);
  408. }
  409. case PIXEL_FORMAT_I422:
  410. if (input->visible_rect().size() == output->visible_rect().size()) {
  411. return LIBYUV_FUNC(I422ToNV21, Y_V_U_DATA(input), Y_UV_DATA(output));
  412. } else {
  413. DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_NV12);
  414. int ret = libyuv::I422ToNV21(
  415. Y_V_U_DATA(input), Y_UV_DATA(intermediate_frame_),
  416. intermediate_frame_->visible_rect().width(),
  417. intermediate_frame_->visible_rect().height());
  418. if (ret != 0)
  419. return ret;
  420. return libyuv::NV12Scale(
  421. Y_UV_DATA(intermediate_frame_),
  422. intermediate_frame_->visible_rect().width(),
  423. intermediate_frame_->visible_rect().height(), Y_UV_DATA(output),
  424. output->visible_rect().width(), output->visible_rect().height(),
  425. libyuv::kFilterBilinear);
  426. }
  427. default:
  428. VLOGF(1) << "Unexpected input format: " << input->format();
  429. return -1;
  430. }
  431. }
  432. if (output->format() == PIXEL_FORMAT_I420) {
  433. switch (input->format()) {
  434. case PIXEL_FORMAT_I420:
  435. return libyuv::I420Scale(
  436. Y_U_V_DATA(input), input->visible_rect().width(),
  437. input->visible_rect().height(), Y_U_V_DATA(output),
  438. output->visible_rect().width(), output->visible_rect().height(),
  439. libyuv::kFilterBilinear);
  440. case PIXEL_FORMAT_YUY2:
  441. if (input->visible_rect().size() == output->visible_rect().size()) {
  442. return LIBYUV_FUNC(YUY2ToI420, YUY2_DATA(input), Y_U_V_DATA(output));
  443. } else {
  444. DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_I420);
  445. int ret = libyuv::YUY2ToI420(
  446. YUY2_DATA(input), Y_U_V_DATA(intermediate_frame_),
  447. intermediate_frame_->visible_rect().width(),
  448. intermediate_frame_->visible_rect().height());
  449. if (ret != 0)
  450. return ret;
  451. return libyuv::I420Scale(
  452. Y_U_V_DATA(intermediate_frame_),
  453. intermediate_frame_->visible_rect().width(),
  454. intermediate_frame_->visible_rect().height(), Y_U_V_DATA(output),
  455. output->visible_rect().width(), output->visible_rect().height(),
  456. libyuv::kFilterBilinear);
  457. }
  458. case PIXEL_FORMAT_I422:
  459. if (input->visible_rect().size() == output->visible_rect().size()) {
  460. return LIBYUV_FUNC(I422ToI420, Y_U_V_DATA(input), Y_U_V_DATA(output));
  461. } else {
  462. DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_I420);
  463. int ret = libyuv::I422ToI420(
  464. Y_U_V_DATA(input), Y_U_V_DATA(intermediate_frame_),
  465. intermediate_frame_->visible_rect().width(),
  466. intermediate_frame_->visible_rect().height());
  467. if (ret != 0)
  468. return ret;
  469. return libyuv::I420Scale(
  470. Y_U_V_DATA(intermediate_frame_),
  471. intermediate_frame_->visible_rect().width(),
  472. intermediate_frame_->visible_rect().height(), Y_U_V_DATA(output),
  473. output->visible_rect().width(), output->visible_rect().height(),
  474. libyuv::kFilterBilinear);
  475. }
  476. default:
  477. VLOGF(1) << "Unexpected input format: " << input->format();
  478. return -1;
  479. }
  480. }
  481. #undef Y_U_V_DATA
  482. #undef Y_V_U_DATA
  483. #undef Y_UV_DATA
  484. #undef LIBYUV_FUNC
  485. VLOGF(1) << "Unexpected output format: " << output->format();
  486. return -1;
  487. }
  488. bool LibYUVImageProcessorBackend::needs_linear_output_buffers() const {
  489. return true;
  490. }
  491. std::vector<Fourcc> LibYUVImageProcessorBackend::GetSupportedOutputFormats(
  492. Fourcc input_format) {
  493. std::vector<Fourcc> supported_formats;
  494. for (const auto& conv : kSupportFormatConversionArray) {
  495. if (Fourcc::FromUint32(conv.input) &&
  496. *Fourcc::FromUint32(conv.input) == input_format &&
  497. Fourcc::FromUint32(conv.output))
  498. supported_formats.emplace_back(*Fourcc::FromUint32(conv.output));
  499. }
  500. return supported_formats;
  501. }
  502. bool LibYUVImageProcessorBackend::supports_incoherent_buffers() const {
  503. return true;
  504. }
  505. } // namespace media