malidp_planes.c 28 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
  4. * Author: Liviu Dudau <Liviu.Dudau@arm.com>
  5. *
  6. * ARM Mali DP plane manipulation routines.
  7. */
  8. #include <linux/iommu.h>
  9. #include <linux/platform_device.h>
  10. #include <drm/drm_atomic.h>
  11. #include <drm/drm_atomic_helper.h>
  12. #include <drm/drm_drv.h>
  13. #include <drm/drm_fb_cma_helper.h>
  14. #include <drm/drm_fourcc.h>
  15. #include <drm/drm_gem_cma_helper.h>
  16. #include <drm/drm_gem_framebuffer_helper.h>
  17. #include <drm/drm_plane_helper.h>
  18. #include <drm/drm_print.h>
  19. #include "malidp_hw.h"
  20. #include "malidp_drv.h"
  21. /* Layer specific register offsets */
  22. #define MALIDP_LAYER_FORMAT 0x000
  23. #define LAYER_FORMAT_MASK 0x3f
  24. #define MALIDP_LAYER_CONTROL 0x004
  25. #define LAYER_ENABLE (1 << 0)
  26. #define LAYER_FLOWCFG_MASK 7
  27. #define LAYER_FLOWCFG(x) (((x) & LAYER_FLOWCFG_MASK) << 1)
  28. #define LAYER_FLOWCFG_SCALE_SE 3
  29. #define LAYER_ROT_OFFSET 8
  30. #define LAYER_H_FLIP (1 << 10)
  31. #define LAYER_V_FLIP (1 << 11)
  32. #define LAYER_ROT_MASK (0xf << 8)
  33. #define LAYER_COMP_MASK (0x3 << 12)
  34. #define LAYER_COMP_PIXEL (0x3 << 12)
  35. #define LAYER_COMP_PLANE (0x2 << 12)
  36. #define LAYER_PMUL_ENABLE (0x1 << 14)
  37. #define LAYER_ALPHA_OFFSET (16)
  38. #define LAYER_ALPHA_MASK (0xff)
  39. #define LAYER_ALPHA(x) (((x) & LAYER_ALPHA_MASK) << LAYER_ALPHA_OFFSET)
  40. #define MALIDP_LAYER_COMPOSE 0x008
  41. #define MALIDP_LAYER_SIZE 0x00c
  42. #define LAYER_H_VAL(x) (((x) & 0x1fff) << 0)
  43. #define LAYER_V_VAL(x) (((x) & 0x1fff) << 16)
  44. #define MALIDP_LAYER_COMP_SIZE 0x010
  45. #define MALIDP_LAYER_OFFSET 0x014
  46. #define MALIDP550_LS_ENABLE 0x01c
  47. #define MALIDP550_LS_R1_IN_SIZE 0x020
  48. #define MODIFIERS_COUNT_MAX 15
  49. /*
  50. * This 4-entry look-up-table is used to determine the full 8-bit alpha value
  51. * for formats with 1- or 2-bit alpha channels.
  52. * We set it to give 100%/0% opacity for 1-bit formats and 100%/66%/33%/0%
  53. * opacity for 2-bit formats.
  54. */
  55. #define MALIDP_ALPHA_LUT 0xffaa5500
  56. /* page sizes the MMU prefetcher can support */
  57. #define MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES (SZ_4K | SZ_64K)
  58. #define MALIDP_MMU_PREFETCH_FULL_PGSIZES (SZ_1M | SZ_2M)
  59. /* readahead for partial-frame prefetch */
  60. #define MALIDP_MMU_PREFETCH_READAHEAD 8
  61. static void malidp_de_plane_destroy(struct drm_plane *plane)
  62. {
  63. struct malidp_plane *mp = to_malidp_plane(plane);
  64. drm_plane_cleanup(plane);
  65. kfree(mp);
  66. }
  67. /*
  68. * Replicate what the default ->reset hook does: free the state pointer and
  69. * allocate a new empty object. We just need enough space to store
  70. * a malidp_plane_state instead of a drm_plane_state.
  71. */
  72. static void malidp_plane_reset(struct drm_plane *plane)
  73. {
  74. struct malidp_plane_state *state = to_malidp_plane_state(plane->state);
  75. if (state)
  76. __drm_atomic_helper_plane_destroy_state(&state->base);
  77. kfree(state);
  78. plane->state = NULL;
  79. state = kzalloc(sizeof(*state), GFP_KERNEL);
  80. if (state)
  81. __drm_atomic_helper_plane_reset(plane, &state->base);
  82. }
  83. static struct
  84. drm_plane_state *malidp_duplicate_plane_state(struct drm_plane *plane)
  85. {
  86. struct malidp_plane_state *state, *m_state;
  87. if (!plane->state)
  88. return NULL;
  89. state = kmalloc(sizeof(*state), GFP_KERNEL);
  90. if (!state)
  91. return NULL;
  92. m_state = to_malidp_plane_state(plane->state);
  93. __drm_atomic_helper_plane_duplicate_state(plane, &state->base);
  94. state->rotmem_size = m_state->rotmem_size;
  95. state->format = m_state->format;
  96. state->n_planes = m_state->n_planes;
  97. state->mmu_prefetch_mode = m_state->mmu_prefetch_mode;
  98. state->mmu_prefetch_pgsize = m_state->mmu_prefetch_pgsize;
  99. return &state->base;
  100. }
  101. static void malidp_destroy_plane_state(struct drm_plane *plane,
  102. struct drm_plane_state *state)
  103. {
  104. struct malidp_plane_state *m_state = to_malidp_plane_state(state);
  105. __drm_atomic_helper_plane_destroy_state(state);
  106. kfree(m_state);
  107. }
  108. static const char * const prefetch_mode_names[] = {
  109. [MALIDP_PREFETCH_MODE_NONE] = "MMU_PREFETCH_NONE",
  110. [MALIDP_PREFETCH_MODE_PARTIAL] = "MMU_PREFETCH_PARTIAL",
  111. [MALIDP_PREFETCH_MODE_FULL] = "MMU_PREFETCH_FULL",
  112. };
  113. static void malidp_plane_atomic_print_state(struct drm_printer *p,
  114. const struct drm_plane_state *state)
  115. {
  116. struct malidp_plane_state *ms = to_malidp_plane_state(state);
  117. drm_printf(p, "\trotmem_size=%u\n", ms->rotmem_size);
  118. drm_printf(p, "\tformat_id=%u\n", ms->format);
  119. drm_printf(p, "\tn_planes=%u\n", ms->n_planes);
  120. drm_printf(p, "\tmmu_prefetch_mode=%s\n",
  121. prefetch_mode_names[ms->mmu_prefetch_mode]);
  122. drm_printf(p, "\tmmu_prefetch_pgsize=%d\n", ms->mmu_prefetch_pgsize);
  123. }
  124. bool malidp_format_mod_supported(struct drm_device *drm,
  125. u32 format, u64 modifier)
  126. {
  127. const struct drm_format_info *info;
  128. const u64 *modifiers;
  129. struct malidp_drm *malidp = drm->dev_private;
  130. const struct malidp_hw_regmap *map = &malidp->dev->hw->map;
  131. if (WARN_ON(modifier == DRM_FORMAT_MOD_INVALID))
  132. return false;
  133. /* Some pixel formats are supported without any modifier */
  134. if (modifier == DRM_FORMAT_MOD_LINEAR) {
  135. /*
  136. * However these pixel formats need to be supported with
  137. * modifiers only
  138. */
  139. return !malidp_hw_format_is_afbc_only(format);
  140. }
  141. if ((modifier >> 56) != DRM_FORMAT_MOD_VENDOR_ARM) {
  142. DRM_ERROR("Unknown modifier (not Arm)\n");
  143. return false;
  144. }
  145. if (modifier &
  146. ~DRM_FORMAT_MOD_ARM_AFBC(AFBC_MOD_VALID_BITS)) {
  147. DRM_DEBUG_KMS("Unsupported modifiers\n");
  148. return false;
  149. }
  150. modifiers = malidp_format_modifiers;
  151. /* SPLIT buffers must use SPARSE layout */
  152. if (WARN_ON_ONCE((modifier & AFBC_SPLIT) && !(modifier & AFBC_SPARSE)))
  153. return false;
  154. /* CBR only applies to YUV formats, where YTR should be always 0 */
  155. if (WARN_ON_ONCE((modifier & AFBC_CBR) && (modifier & AFBC_YTR)))
  156. return false;
  157. while (*modifiers != DRM_FORMAT_MOD_INVALID) {
  158. if (*modifiers == modifier)
  159. break;
  160. modifiers++;
  161. }
  162. /* return false, if the modifier was not found */
  163. if (*modifiers == DRM_FORMAT_MOD_INVALID) {
  164. DRM_DEBUG_KMS("Unsupported modifier\n");
  165. return false;
  166. }
  167. info = drm_format_info(format);
  168. if (info->num_planes != 1) {
  169. DRM_DEBUG_KMS("AFBC buffers expect one plane\n");
  170. return false;
  171. }
  172. if (malidp_hw_format_is_linear_only(format) == true) {
  173. DRM_DEBUG_KMS("Given format (0x%x) is supported is linear mode only\n",
  174. format);
  175. return false;
  176. }
  177. /*
  178. * RGB formats need to provide YTR modifier and YUV formats should not
  179. * provide YTR modifier.
  180. */
  181. if (!(info->is_yuv) != !!(modifier & AFBC_FORMAT_MOD_YTR)) {
  182. DRM_DEBUG_KMS("AFBC_FORMAT_MOD_YTR is %s for %s formats\n",
  183. info->is_yuv ? "disallowed" : "mandatory",
  184. info->is_yuv ? "YUV" : "RGB");
  185. return false;
  186. }
  187. if (modifier & AFBC_SPLIT) {
  188. if (!info->is_yuv) {
  189. if (info->cpp[0] <= 2) {
  190. DRM_DEBUG_KMS("RGB formats <= 16bpp are not supported with SPLIT\n");
  191. return false;
  192. }
  193. }
  194. if ((info->hsub != 1) || (info->vsub != 1)) {
  195. if (!(format == DRM_FORMAT_YUV420_10BIT &&
  196. (map->features & MALIDP_DEVICE_AFBC_YUV_420_10_SUPPORT_SPLIT))) {
  197. DRM_DEBUG_KMS("Formats which are sub-sampled should never be split\n");
  198. return false;
  199. }
  200. }
  201. }
  202. if (modifier & AFBC_CBR) {
  203. if ((info->hsub == 1) || (info->vsub == 1)) {
  204. DRM_DEBUG_KMS("Formats which are not sub-sampled should not have CBR set\n");
  205. return false;
  206. }
  207. }
  208. return true;
  209. }
  210. static bool malidp_format_mod_supported_per_plane(struct drm_plane *plane,
  211. u32 format, u64 modifier)
  212. {
  213. return malidp_format_mod_supported(plane->dev, format, modifier);
  214. }
  215. static const struct drm_plane_funcs malidp_de_plane_funcs = {
  216. .update_plane = drm_atomic_helper_update_plane,
  217. .disable_plane = drm_atomic_helper_disable_plane,
  218. .destroy = malidp_de_plane_destroy,
  219. .reset = malidp_plane_reset,
  220. .atomic_duplicate_state = malidp_duplicate_plane_state,
  221. .atomic_destroy_state = malidp_destroy_plane_state,
  222. .atomic_print_state = malidp_plane_atomic_print_state,
  223. .format_mod_supported = malidp_format_mod_supported_per_plane,
  224. };
  225. static int malidp_se_check_scaling(struct malidp_plane *mp,
  226. struct drm_plane_state *state)
  227. {
  228. struct drm_crtc_state *crtc_state =
  229. drm_atomic_get_existing_crtc_state(state->state, state->crtc);
  230. struct malidp_crtc_state *mc;
  231. u32 src_w, src_h;
  232. int ret;
  233. if (!crtc_state)
  234. return -EINVAL;
  235. mc = to_malidp_crtc_state(crtc_state);
  236. ret = drm_atomic_helper_check_plane_state(state, crtc_state,
  237. 0, INT_MAX, true, true);
  238. if (ret)
  239. return ret;
  240. if (state->rotation & MALIDP_ROTATED_MASK) {
  241. src_w = state->src_h >> 16;
  242. src_h = state->src_w >> 16;
  243. } else {
  244. src_w = state->src_w >> 16;
  245. src_h = state->src_h >> 16;
  246. }
  247. if ((state->crtc_w == src_w) && (state->crtc_h == src_h)) {
  248. /* Scaling not necessary for this plane. */
  249. mc->scaled_planes_mask &= ~(mp->layer->id);
  250. return 0;
  251. }
  252. if (mp->layer->id & (DE_SMART | DE_GRAPHICS2))
  253. return -EINVAL;
  254. mc->scaled_planes_mask |= mp->layer->id;
  255. /* Defer scaling requirements calculation to the crtc check. */
  256. return 0;
  257. }
  258. static u32 malidp_get_pgsize_bitmap(struct malidp_plane *mp)
  259. {
  260. u32 pgsize_bitmap = 0;
  261. if (iommu_present(&platform_bus_type)) {
  262. struct iommu_domain *mmu_dom =
  263. iommu_get_domain_for_dev(mp->base.dev->dev);
  264. if (mmu_dom)
  265. pgsize_bitmap = mmu_dom->pgsize_bitmap;
  266. }
  267. return pgsize_bitmap;
  268. }
  269. /*
  270. * Check if the framebuffer is entirely made up of pages at least pgsize in
  271. * size. Only a heuristic: assumes that each scatterlist entry has been aligned
  272. * to the largest page size smaller than its length and that the MMU maps to
  273. * the largest page size possible.
  274. */
  275. static bool malidp_check_pages_threshold(struct malidp_plane_state *ms,
  276. u32 pgsize)
  277. {
  278. int i;
  279. for (i = 0; i < ms->n_planes; i++) {
  280. struct drm_gem_object *obj;
  281. struct drm_gem_cma_object *cma_obj;
  282. struct sg_table *sgt;
  283. struct scatterlist *sgl;
  284. obj = drm_gem_fb_get_obj(ms->base.fb, i);
  285. cma_obj = to_drm_gem_cma_obj(obj);
  286. if (cma_obj->sgt)
  287. sgt = cma_obj->sgt;
  288. else
  289. sgt = obj->funcs->get_sg_table(obj);
  290. if (!sgt)
  291. return false;
  292. sgl = sgt->sgl;
  293. while (sgl) {
  294. if (sgl->length < pgsize) {
  295. if (!cma_obj->sgt)
  296. kfree(sgt);
  297. return false;
  298. }
  299. sgl = sg_next(sgl);
  300. }
  301. if (!cma_obj->sgt)
  302. kfree(sgt);
  303. }
  304. return true;
  305. }
  306. /*
  307. * Check if it is possible to enable partial-frame MMU prefetch given the
  308. * current format, AFBC state and rotation.
  309. */
  310. static bool malidp_partial_prefetch_supported(u32 format, u64 modifier,
  311. unsigned int rotation)
  312. {
  313. bool afbc, sparse;
  314. /* rotation and horizontal flip not supported for partial prefetch */
  315. if (rotation & (DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
  316. DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X))
  317. return false;
  318. afbc = modifier & DRM_FORMAT_MOD_ARM_AFBC(0);
  319. sparse = modifier & AFBC_FORMAT_MOD_SPARSE;
  320. switch (format) {
  321. case DRM_FORMAT_ARGB2101010:
  322. case DRM_FORMAT_RGBA1010102:
  323. case DRM_FORMAT_BGRA1010102:
  324. case DRM_FORMAT_ARGB8888:
  325. case DRM_FORMAT_RGBA8888:
  326. case DRM_FORMAT_BGRA8888:
  327. case DRM_FORMAT_XRGB8888:
  328. case DRM_FORMAT_XBGR8888:
  329. case DRM_FORMAT_RGBX8888:
  330. case DRM_FORMAT_BGRX8888:
  331. case DRM_FORMAT_RGB888:
  332. case DRM_FORMAT_RGBA5551:
  333. case DRM_FORMAT_RGB565:
  334. /* always supported */
  335. return true;
  336. case DRM_FORMAT_ABGR2101010:
  337. case DRM_FORMAT_ABGR8888:
  338. case DRM_FORMAT_ABGR1555:
  339. case DRM_FORMAT_BGR565:
  340. /* supported, but if AFBC then must be sparse mode */
  341. return (!afbc) || (afbc && sparse);
  342. case DRM_FORMAT_BGR888:
  343. /* supported, but not for AFBC */
  344. return !afbc;
  345. case DRM_FORMAT_YUYV:
  346. case DRM_FORMAT_UYVY:
  347. case DRM_FORMAT_NV12:
  348. case DRM_FORMAT_YUV420:
  349. /* not supported */
  350. return false;
  351. default:
  352. return false;
  353. }
  354. }
  355. /*
  356. * Select the preferred MMU prefetch mode. Full-frame prefetch is preferred as
  357. * long as the framebuffer is all large pages. Otherwise partial-frame prefetch
  358. * is selected as long as it is supported for the current format. The selected
  359. * page size for prefetch is returned in pgsize_bitmap.
  360. */
  361. static enum mmu_prefetch_mode malidp_mmu_prefetch_select_mode
  362. (struct malidp_plane_state *ms, u32 *pgsize_bitmap)
  363. {
  364. u32 pgsizes;
  365. /* get the full-frame prefetch page size(s) supported by the MMU */
  366. pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_FULL_PGSIZES;
  367. while (pgsizes) {
  368. u32 largest_pgsize = 1 << __fls(pgsizes);
  369. if (malidp_check_pages_threshold(ms, largest_pgsize)) {
  370. *pgsize_bitmap = largest_pgsize;
  371. return MALIDP_PREFETCH_MODE_FULL;
  372. }
  373. pgsizes -= largest_pgsize;
  374. }
  375. /* get the partial-frame prefetch page size(s) supported by the MMU */
  376. pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES;
  377. if (malidp_partial_prefetch_supported(ms->base.fb->format->format,
  378. ms->base.fb->modifier,
  379. ms->base.rotation)) {
  380. /* partial prefetch using the smallest page size */
  381. *pgsize_bitmap = 1 << __ffs(pgsizes);
  382. return MALIDP_PREFETCH_MODE_PARTIAL;
  383. }
  384. *pgsize_bitmap = 0;
  385. return MALIDP_PREFETCH_MODE_NONE;
  386. }
  387. static u32 malidp_calc_mmu_control_value(enum mmu_prefetch_mode mode,
  388. u8 readahead, u8 n_planes, u32 pgsize)
  389. {
  390. u32 mmu_ctrl = 0;
  391. if (mode != MALIDP_PREFETCH_MODE_NONE) {
  392. mmu_ctrl |= MALIDP_MMU_CTRL_EN;
  393. if (mode == MALIDP_PREFETCH_MODE_PARTIAL) {
  394. mmu_ctrl |= MALIDP_MMU_CTRL_MODE;
  395. mmu_ctrl |= MALIDP_MMU_CTRL_PP_NUM_REQ(readahead);
  396. }
  397. if (pgsize == SZ_64K || pgsize == SZ_2M) {
  398. int i;
  399. for (i = 0; i < n_planes; i++)
  400. mmu_ctrl |= MALIDP_MMU_CTRL_PX_PS(i);
  401. }
  402. }
  403. return mmu_ctrl;
  404. }
  405. static void malidp_de_prefetch_settings(struct malidp_plane *mp,
  406. struct malidp_plane_state *ms)
  407. {
  408. if (!mp->layer->mmu_ctrl_offset)
  409. return;
  410. /* get the page sizes supported by the MMU */
  411. ms->mmu_prefetch_pgsize = malidp_get_pgsize_bitmap(mp);
  412. ms->mmu_prefetch_mode =
  413. malidp_mmu_prefetch_select_mode(ms, &ms->mmu_prefetch_pgsize);
  414. }
  415. static int malidp_de_plane_check(struct drm_plane *plane,
  416. struct drm_plane_state *state)
  417. {
  418. struct malidp_plane *mp = to_malidp_plane(plane);
  419. struct malidp_plane_state *ms = to_malidp_plane_state(state);
  420. bool rotated = state->rotation & MALIDP_ROTATED_MASK;
  421. struct drm_framebuffer *fb;
  422. u16 pixel_alpha = state->pixel_blend_mode;
  423. int i, ret;
  424. unsigned int block_w, block_h;
  425. if (!state->crtc || WARN_ON(!state->fb))
  426. return 0;
  427. fb = state->fb;
  428. ms->format = malidp_hw_get_format_id(&mp->hwdev->hw->map,
  429. mp->layer->id, fb->format->format,
  430. !!fb->modifier);
  431. if (ms->format == MALIDP_INVALID_FORMAT_ID)
  432. return -EINVAL;
  433. ms->n_planes = fb->format->num_planes;
  434. for (i = 0; i < ms->n_planes; i++) {
  435. u8 alignment = malidp_hw_get_pitch_align(mp->hwdev, rotated);
  436. if (((fb->pitches[i] * drm_format_info_block_height(fb->format, i))
  437. & (alignment - 1)) && !(fb->modifier)) {
  438. DRM_DEBUG_KMS("Invalid pitch %u for plane %d\n",
  439. fb->pitches[i], i);
  440. return -EINVAL;
  441. }
  442. }
  443. block_w = drm_format_info_block_width(fb->format, 0);
  444. block_h = drm_format_info_block_height(fb->format, 0);
  445. if (fb->width % block_w || fb->height % block_h) {
  446. DRM_DEBUG_KMS("Buffer width/height needs to be a multiple of tile sizes");
  447. return -EINVAL;
  448. }
  449. if ((state->src_x >> 16) % block_w || (state->src_y >> 16) % block_h) {
  450. DRM_DEBUG_KMS("Plane src_x/src_y needs to be a multiple of tile sizes");
  451. return -EINVAL;
  452. }
  453. if ((state->crtc_w > mp->hwdev->max_line_size) ||
  454. (state->crtc_h > mp->hwdev->max_line_size) ||
  455. (state->crtc_w < mp->hwdev->min_line_size) ||
  456. (state->crtc_h < mp->hwdev->min_line_size))
  457. return -EINVAL;
  458. /*
  459. * DP550/650 video layers can accept 3 plane formats only if
  460. * fb->pitches[1] == fb->pitches[2] since they don't have a
  461. * third plane stride register.
  462. */
  463. if (ms->n_planes == 3 &&
  464. !(mp->hwdev->hw->features & MALIDP_DEVICE_LV_HAS_3_STRIDES) &&
  465. (state->fb->pitches[1] != state->fb->pitches[2]))
  466. return -EINVAL;
  467. ret = malidp_se_check_scaling(mp, state);
  468. if (ret)
  469. return ret;
  470. /* validate the rotation constraints for each layer */
  471. if (state->rotation != DRM_MODE_ROTATE_0) {
  472. if (mp->layer->rot == ROTATE_NONE)
  473. return -EINVAL;
  474. if ((mp->layer->rot == ROTATE_COMPRESSED) && !(fb->modifier))
  475. return -EINVAL;
  476. /*
  477. * packed RGB888 / BGR888 can't be rotated or flipped
  478. * unless they are stored in a compressed way
  479. */
  480. if ((fb->format->format == DRM_FORMAT_RGB888 ||
  481. fb->format->format == DRM_FORMAT_BGR888) && !(fb->modifier))
  482. return -EINVAL;
  483. }
  484. /* SMART layer does not support AFBC */
  485. if (mp->layer->id == DE_SMART && fb->modifier) {
  486. DRM_ERROR("AFBC framebuffer not supported in SMART layer");
  487. return -EINVAL;
  488. }
  489. ms->rotmem_size = 0;
  490. if (state->rotation & MALIDP_ROTATED_MASK) {
  491. int val;
  492. val = mp->hwdev->hw->rotmem_required(mp->hwdev, state->crtc_w,
  493. state->crtc_h,
  494. fb->format->format,
  495. !!(fb->modifier));
  496. if (val < 0)
  497. return val;
  498. ms->rotmem_size = val;
  499. }
  500. /* HW can't support plane + pixel blending */
  501. if ((state->alpha != DRM_BLEND_ALPHA_OPAQUE) &&
  502. (pixel_alpha != DRM_MODE_BLEND_PIXEL_NONE) &&
  503. fb->format->has_alpha)
  504. return -EINVAL;
  505. malidp_de_prefetch_settings(mp, ms);
  506. return 0;
  507. }
  508. static void malidp_de_set_plane_pitches(struct malidp_plane *mp,
  509. int num_planes, unsigned int pitches[3])
  510. {
  511. int i;
  512. int num_strides = num_planes;
  513. if (!mp->layer->stride_offset)
  514. return;
  515. if (num_planes == 3)
  516. num_strides = (mp->hwdev->hw->features &
  517. MALIDP_DEVICE_LV_HAS_3_STRIDES) ? 3 : 2;
  518. /*
  519. * The drm convention for pitch is that it needs to cover width * cpp,
  520. * but our hardware wants the pitch/stride to cover all rows included
  521. * in a tile.
  522. */
  523. for (i = 0; i < num_strides; ++i) {
  524. unsigned int block_h = drm_format_info_block_height(mp->base.state->fb->format, i);
  525. malidp_hw_write(mp->hwdev, pitches[i] * block_h,
  526. mp->layer->base +
  527. mp->layer->stride_offset + i * 4);
  528. }
  529. }
  530. static const s16
  531. malidp_yuv2rgb_coeffs[][DRM_COLOR_RANGE_MAX][MALIDP_COLORADJ_NUM_COEFFS] = {
  532. [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  533. 1192, 0, 1634,
  534. 1192, -401, -832,
  535. 1192, 2066, 0,
  536. 64, 512, 512
  537. },
  538. [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_FULL_RANGE] = {
  539. 1024, 0, 1436,
  540. 1024, -352, -731,
  541. 1024, 1815, 0,
  542. 0, 512, 512
  543. },
  544. [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  545. 1192, 0, 1836,
  546. 1192, -218, -546,
  547. 1192, 2163, 0,
  548. 64, 512, 512
  549. },
  550. [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_FULL_RANGE] = {
  551. 1024, 0, 1613,
  552. 1024, -192, -479,
  553. 1024, 1900, 0,
  554. 0, 512, 512
  555. },
  556. [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
  557. 1024, 0, 1476,
  558. 1024, -165, -572,
  559. 1024, 1884, 0,
  560. 0, 512, 512
  561. },
  562. [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_FULL_RANGE] = {
  563. 1024, 0, 1510,
  564. 1024, -168, -585,
  565. 1024, 1927, 0,
  566. 0, 512, 512
  567. }
  568. };
  569. static void malidp_de_set_color_encoding(struct malidp_plane *plane,
  570. enum drm_color_encoding enc,
  571. enum drm_color_range range)
  572. {
  573. unsigned int i;
  574. for (i = 0; i < MALIDP_COLORADJ_NUM_COEFFS; i++) {
  575. /* coefficients are signed, two's complement values */
  576. malidp_hw_write(plane->hwdev, malidp_yuv2rgb_coeffs[enc][range][i],
  577. plane->layer->base + plane->layer->yuv2rgb_offset +
  578. i * 4);
  579. }
  580. }
  581. static void malidp_de_set_mmu_control(struct malidp_plane *mp,
  582. struct malidp_plane_state *ms)
  583. {
  584. u32 mmu_ctrl;
  585. /* check hardware supports MMU prefetch */
  586. if (!mp->layer->mmu_ctrl_offset)
  587. return;
  588. mmu_ctrl = malidp_calc_mmu_control_value(ms->mmu_prefetch_mode,
  589. MALIDP_MMU_PREFETCH_READAHEAD,
  590. ms->n_planes,
  591. ms->mmu_prefetch_pgsize);
  592. malidp_hw_write(mp->hwdev, mmu_ctrl,
  593. mp->layer->base + mp->layer->mmu_ctrl_offset);
  594. }
  595. static void malidp_set_plane_base_addr(struct drm_framebuffer *fb,
  596. struct malidp_plane *mp,
  597. int plane_index)
  598. {
  599. dma_addr_t paddr;
  600. u16 ptr;
  601. struct drm_plane *plane = &mp->base;
  602. bool afbc = fb->modifier ? true : false;
  603. ptr = mp->layer->ptr + (plane_index << 4);
  604. /*
  605. * drm_fb_cma_get_gem_addr() alters the physical base address of the
  606. * framebuffer as per the plane's src_x, src_y co-ordinates (ie to
  607. * take care of source cropping).
  608. * For AFBC, this is not needed as the cropping is handled by _AD_CROP_H
  609. * and _AD_CROP_V registers.
  610. */
  611. if (!afbc) {
  612. paddr = drm_fb_cma_get_gem_addr(fb, plane->state,
  613. plane_index);
  614. } else {
  615. struct drm_gem_cma_object *obj;
  616. obj = drm_fb_cma_get_gem_obj(fb, plane_index);
  617. if (WARN_ON(!obj))
  618. return;
  619. paddr = obj->paddr;
  620. }
  621. malidp_hw_write(mp->hwdev, lower_32_bits(paddr), ptr);
  622. malidp_hw_write(mp->hwdev, upper_32_bits(paddr), ptr + 4);
  623. }
  624. static void malidp_de_set_plane_afbc(struct drm_plane *plane)
  625. {
  626. struct malidp_plane *mp;
  627. u32 src_w, src_h, val = 0, src_x, src_y;
  628. struct drm_framebuffer *fb = plane->state->fb;
  629. mp = to_malidp_plane(plane);
  630. /* no afbc_decoder_offset means AFBC is not supported on this plane */
  631. if (!mp->layer->afbc_decoder_offset)
  632. return;
  633. if (!fb->modifier) {
  634. malidp_hw_write(mp->hwdev, 0, mp->layer->afbc_decoder_offset);
  635. return;
  636. }
  637. /* convert src values from Q16 fixed point to integer */
  638. src_w = plane->state->src_w >> 16;
  639. src_h = plane->state->src_h >> 16;
  640. src_x = plane->state->src_x >> 16;
  641. src_y = plane->state->src_y >> 16;
  642. val = ((fb->width - (src_x + src_w)) << MALIDP_AD_CROP_RIGHT_OFFSET) |
  643. src_x;
  644. malidp_hw_write(mp->hwdev, val,
  645. mp->layer->afbc_decoder_offset + MALIDP_AD_CROP_H);
  646. val = ((fb->height - (src_y + src_h)) << MALIDP_AD_CROP_BOTTOM_OFFSET) |
  647. src_y;
  648. malidp_hw_write(mp->hwdev, val,
  649. mp->layer->afbc_decoder_offset + MALIDP_AD_CROP_V);
  650. val = MALIDP_AD_EN;
  651. if (fb->modifier & AFBC_FORMAT_MOD_SPLIT)
  652. val |= MALIDP_AD_BS;
  653. if (fb->modifier & AFBC_FORMAT_MOD_YTR)
  654. val |= MALIDP_AD_YTR;
  655. malidp_hw_write(mp->hwdev, val, mp->layer->afbc_decoder_offset);
  656. }
  657. static void malidp_de_plane_update(struct drm_plane *plane,
  658. struct drm_plane_state *old_state)
  659. {
  660. struct malidp_plane *mp;
  661. struct malidp_plane_state *ms = to_malidp_plane_state(plane->state);
  662. struct drm_plane_state *state = plane->state;
  663. u16 pixel_alpha = state->pixel_blend_mode;
  664. u8 plane_alpha = state->alpha >> 8;
  665. u32 src_w, src_h, dest_w, dest_h, val;
  666. int i;
  667. struct drm_framebuffer *fb = plane->state->fb;
  668. mp = to_malidp_plane(plane);
  669. /*
  670. * For AFBC framebuffer, use the framebuffer width and height for
  671. * configuring layer input size register.
  672. */
  673. if (fb->modifier) {
  674. src_w = fb->width;
  675. src_h = fb->height;
  676. } else {
  677. /* convert src values from Q16 fixed point to integer */
  678. src_w = state->src_w >> 16;
  679. src_h = state->src_h >> 16;
  680. }
  681. dest_w = state->crtc_w;
  682. dest_h = state->crtc_h;
  683. val = malidp_hw_read(mp->hwdev, mp->layer->base);
  684. val = (val & ~LAYER_FORMAT_MASK) | ms->format;
  685. malidp_hw_write(mp->hwdev, val, mp->layer->base);
  686. for (i = 0; i < ms->n_planes; i++)
  687. malidp_set_plane_base_addr(fb, mp, i);
  688. malidp_de_set_mmu_control(mp, ms);
  689. malidp_de_set_plane_pitches(mp, ms->n_planes,
  690. state->fb->pitches);
  691. if ((plane->state->color_encoding != old_state->color_encoding) ||
  692. (plane->state->color_range != old_state->color_range))
  693. malidp_de_set_color_encoding(mp, plane->state->color_encoding,
  694. plane->state->color_range);
  695. malidp_hw_write(mp->hwdev, LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
  696. mp->layer->base + MALIDP_LAYER_SIZE);
  697. malidp_hw_write(mp->hwdev, LAYER_H_VAL(dest_w) | LAYER_V_VAL(dest_h),
  698. mp->layer->base + MALIDP_LAYER_COMP_SIZE);
  699. malidp_hw_write(mp->hwdev, LAYER_H_VAL(state->crtc_x) |
  700. LAYER_V_VAL(state->crtc_y),
  701. mp->layer->base + MALIDP_LAYER_OFFSET);
  702. if (mp->layer->id == DE_SMART) {
  703. /*
  704. * Enable the first rectangle in the SMART layer to be
  705. * able to use it as a drm plane.
  706. */
  707. malidp_hw_write(mp->hwdev, 1,
  708. mp->layer->base + MALIDP550_LS_ENABLE);
  709. malidp_hw_write(mp->hwdev,
  710. LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
  711. mp->layer->base + MALIDP550_LS_R1_IN_SIZE);
  712. }
  713. malidp_de_set_plane_afbc(plane);
  714. /* first clear the rotation bits */
  715. val = malidp_hw_read(mp->hwdev, mp->layer->base + MALIDP_LAYER_CONTROL);
  716. val &= ~LAYER_ROT_MASK;
  717. /* setup the rotation and axis flip bits */
  718. if (state->rotation & DRM_MODE_ROTATE_MASK)
  719. val |= ilog2(plane->state->rotation & DRM_MODE_ROTATE_MASK) <<
  720. LAYER_ROT_OFFSET;
  721. if (state->rotation & DRM_MODE_REFLECT_X)
  722. val |= LAYER_H_FLIP;
  723. if (state->rotation & DRM_MODE_REFLECT_Y)
  724. val |= LAYER_V_FLIP;
  725. val &= ~(LAYER_COMP_MASK | LAYER_PMUL_ENABLE | LAYER_ALPHA(0xff));
  726. if (state->alpha != DRM_BLEND_ALPHA_OPAQUE) {
  727. val |= LAYER_COMP_PLANE;
  728. } else if (state->fb->format->has_alpha) {
  729. /* We only care about blend mode if the format has alpha */
  730. switch (pixel_alpha) {
  731. case DRM_MODE_BLEND_PREMULTI:
  732. val |= LAYER_COMP_PIXEL | LAYER_PMUL_ENABLE;
  733. break;
  734. case DRM_MODE_BLEND_COVERAGE:
  735. val |= LAYER_COMP_PIXEL;
  736. break;
  737. }
  738. }
  739. val |= LAYER_ALPHA(plane_alpha);
  740. val &= ~LAYER_FLOWCFG(LAYER_FLOWCFG_MASK);
  741. if (state->crtc) {
  742. struct malidp_crtc_state *m =
  743. to_malidp_crtc_state(state->crtc->state);
  744. if (m->scaler_config.scale_enable &&
  745. m->scaler_config.plane_src_id == mp->layer->id)
  746. val |= LAYER_FLOWCFG(LAYER_FLOWCFG_SCALE_SE);
  747. }
  748. /* set the 'enable layer' bit */
  749. val |= LAYER_ENABLE;
  750. malidp_hw_write(mp->hwdev, val,
  751. mp->layer->base + MALIDP_LAYER_CONTROL);
  752. }
  753. static void malidp_de_plane_disable(struct drm_plane *plane,
  754. struct drm_plane_state *state)
  755. {
  756. struct malidp_plane *mp = to_malidp_plane(plane);
  757. malidp_hw_clearbits(mp->hwdev,
  758. LAYER_ENABLE | LAYER_FLOWCFG(LAYER_FLOWCFG_MASK),
  759. mp->layer->base + MALIDP_LAYER_CONTROL);
  760. }
  761. static const struct drm_plane_helper_funcs malidp_de_plane_helper_funcs = {
  762. .atomic_check = malidp_de_plane_check,
  763. .atomic_update = malidp_de_plane_update,
  764. .atomic_disable = malidp_de_plane_disable,
  765. };
  766. static const uint64_t linear_only_modifiers[] = {
  767. DRM_FORMAT_MOD_LINEAR,
  768. DRM_FORMAT_MOD_INVALID
  769. };
  770. int malidp_de_planes_init(struct drm_device *drm)
  771. {
  772. struct malidp_drm *malidp = drm->dev_private;
  773. const struct malidp_hw_regmap *map = &malidp->dev->hw->map;
  774. struct malidp_plane *plane = NULL;
  775. enum drm_plane_type plane_type;
  776. unsigned long crtcs = BIT(drm->mode_config.num_crtc);
  777. unsigned long flags = DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
  778. DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X | DRM_MODE_REFLECT_Y;
  779. unsigned int blend_caps = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
  780. BIT(DRM_MODE_BLEND_PREMULTI) |
  781. BIT(DRM_MODE_BLEND_COVERAGE);
  782. u32 *formats;
  783. int ret, i = 0, j = 0, n;
  784. u64 supported_modifiers[MODIFIERS_COUNT_MAX];
  785. const u64 *modifiers;
  786. modifiers = malidp_format_modifiers;
  787. if (!(map->features & MALIDP_DEVICE_AFBC_SUPPORT_SPLIT)) {
  788. /*
  789. * Since our hardware does not support SPLIT, so build the list
  790. * of supported modifiers excluding SPLIT ones.
  791. */
  792. while (*modifiers != DRM_FORMAT_MOD_INVALID) {
  793. if (!(*modifiers & AFBC_SPLIT))
  794. supported_modifiers[j++] = *modifiers;
  795. modifiers++;
  796. }
  797. supported_modifiers[j++] = DRM_FORMAT_MOD_INVALID;
  798. modifiers = supported_modifiers;
  799. }
  800. formats = kcalloc(map->n_pixel_formats, sizeof(*formats), GFP_KERNEL);
  801. if (!formats) {
  802. ret = -ENOMEM;
  803. goto cleanup;
  804. }
  805. for (i = 0; i < map->n_layers; i++) {
  806. u8 id = map->layers[i].id;
  807. plane = kzalloc(sizeof(*plane), GFP_KERNEL);
  808. if (!plane) {
  809. ret = -ENOMEM;
  810. goto cleanup;
  811. }
  812. /* build the list of DRM supported formats based on the map */
  813. for (n = 0, j = 0; j < map->n_pixel_formats; j++) {
  814. if ((map->pixel_formats[j].layer & id) == id)
  815. formats[n++] = map->pixel_formats[j].format;
  816. }
  817. plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
  818. DRM_PLANE_TYPE_OVERLAY;
  819. /*
  820. * All the layers except smart layer supports AFBC modifiers.
  821. */
  822. ret = drm_universal_plane_init(drm, &plane->base, crtcs,
  823. &malidp_de_plane_funcs, formats, n,
  824. (id == DE_SMART) ? linear_only_modifiers : modifiers,
  825. plane_type, NULL);
  826. if (ret < 0)
  827. goto cleanup;
  828. drm_plane_helper_add(&plane->base,
  829. &malidp_de_plane_helper_funcs);
  830. plane->hwdev = malidp->dev;
  831. plane->layer = &map->layers[i];
  832. drm_plane_create_alpha_property(&plane->base);
  833. drm_plane_create_blend_mode_property(&plane->base, blend_caps);
  834. if (id == DE_SMART) {
  835. /* Skip the features which the SMART layer doesn't have. */
  836. continue;
  837. }
  838. drm_plane_create_rotation_property(&plane->base, DRM_MODE_ROTATE_0, flags);
  839. malidp_hw_write(malidp->dev, MALIDP_ALPHA_LUT,
  840. plane->layer->base + MALIDP_LAYER_COMPOSE);
  841. /* Attach the YUV->RGB property only to video layers */
  842. if (id & (DE_VIDEO1 | DE_VIDEO2)) {
  843. /* default encoding for YUV->RGB is BT601 NARROW */
  844. enum drm_color_encoding enc = DRM_COLOR_YCBCR_BT601;
  845. enum drm_color_range range = DRM_COLOR_YCBCR_LIMITED_RANGE;
  846. ret = drm_plane_create_color_properties(&plane->base,
  847. BIT(DRM_COLOR_YCBCR_BT601) | \
  848. BIT(DRM_COLOR_YCBCR_BT709) | \
  849. BIT(DRM_COLOR_YCBCR_BT2020),
  850. BIT(DRM_COLOR_YCBCR_LIMITED_RANGE) | \
  851. BIT(DRM_COLOR_YCBCR_FULL_RANGE),
  852. enc, range);
  853. if (!ret)
  854. /* program the HW registers */
  855. malidp_de_set_color_encoding(plane, enc, range);
  856. else
  857. DRM_WARN("Failed to create video layer %d color properties\n", id);
  858. }
  859. }
  860. kfree(formats);
  861. return 0;
  862. cleanup:
  863. kfree(formats);
  864. return ret;
  865. }